From d6a3bd871cf4b1c6356bcfa19bcd354ce3c523ac Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 26 Sep 2016 12:39:21 -0700 Subject: [PATCH 001/611] Moving all datastore files into subdirectory. Done via: $ mkdir -p datastore/google/cloud $ cp google/__init__.py datastore/google/__init__.py $ git add datastore/google/__init__.py $ cp google/cloud/__init__.py datastore/google/cloud/__init__.py $ git add datastore/google/cloud/__init__.py $ git mv google/cloud/datastore datastore/google/cloud/datastore $ git mv unit_tests/datastore datastore/unit_tests --- .../google-cloud-datastore/google/__init__.py | 20 + .../google/cloud/__init__.py | 20 + .../google/cloud/datastore/__init__.py | 63 + .../cloud/datastore/_generated/__init__.py | 15 + .../datastore/_generated/_datastore.proto | 316 +++++ .../cloud/datastore/_generated/_entity.proto | 201 +++ .../cloud/datastore/_generated/_query.proto | 306 +++++ .../_generated/datastore_grpc_pb2.py | 301 +++++ .../datastore/_generated/datastore_pb2.py | 891 +++++++++++++ .../cloud/datastore/_generated/entity_pb2.py | 495 +++++++ .../cloud/datastore/_generated/query_pb2.py | 934 ++++++++++++++ .../google/cloud/datastore/batch.py | 322 +++++ .../google/cloud/datastore/client.py | 488 +++++++ .../google/cloud/datastore/connection.py | 672 ++++++++++ .../google/cloud/datastore/entity.py | 143 +++ .../google/cloud/datastore/helpers.py | 472 +++++++ .../google/cloud/datastore/key.py | 403 ++++++ .../google/cloud/datastore/query.py | 539 ++++++++ .../google/cloud/datastore/transaction.py | 170 +++ .../unit_tests/__init__.py | 13 + .../unit_tests/test_batch.py | 471 +++++++ .../unit_tests/test_client.py | 1058 +++++++++++++++ .../unit_tests/test_connection.py | 1140 +++++++++++++++++ .../unit_tests/test_entity.py | 211 +++ .../unit_tests/test_helpers.py | 926 +++++++++++++ .../unit_tests/test_key.py | 431 +++++++ .../unit_tests/test_query.py | 759 +++++++++++ .../unit_tests/test_transaction.py | 256 ++++ 28 files changed, 12036 insertions(+) create mode 100644 packages/google-cloud-datastore/google/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_generated/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_generated/_datastore.proto create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_generated/_entity.proto create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_generated/_query.proto create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_generated/datastore_grpc_pb2.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_generated/datastore_pb2.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_generated/entity_pb2.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_generated/query_pb2.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/batch.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/client.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/connection.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/entity.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/helpers.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/key.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/query.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/transaction.py create mode 100644 packages/google-cloud-datastore/unit_tests/__init__.py create mode 100644 packages/google-cloud-datastore/unit_tests/test_batch.py create mode 100644 packages/google-cloud-datastore/unit_tests/test_client.py create mode 100644 packages/google-cloud-datastore/unit_tests/test_connection.py create mode 100644 packages/google-cloud-datastore/unit_tests/test_entity.py create mode 100644 packages/google-cloud-datastore/unit_tests/test_helpers.py create mode 100644 packages/google-cloud-datastore/unit_tests/test_key.py create mode 100644 packages/google-cloud-datastore/unit_tests/test_query.py create mode 100644 packages/google-cloud-datastore/unit_tests/test_transaction.py diff --git a/packages/google-cloud-datastore/google/__init__.py b/packages/google-cloud-datastore/google/__init__.py new file mode 100644 index 000000000000..b2b833373882 --- /dev/null +++ b/packages/google-cloud-datastore/google/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/packages/google-cloud-datastore/google/cloud/__init__.py b/packages/google-cloud-datastore/google/cloud/__init__.py new file mode 100644 index 000000000000..8ac7b74af136 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py new file mode 100644 index 000000000000..452597ffe7a1 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py @@ -0,0 +1,63 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Shortcut methods for getting set up with Google Cloud Datastore. + +You'll typically use these to get started with the API:: + + >>> from google.cloud import datastore + >>> + >>> client = datastore.Client() + >>> key = client.key('EntityKind', 1234) + >>> entity = datastore.Entity(key) + >>> query = client.query(kind='EntityKind') + +The main concepts with this API are: + +- :class:`google.cloud.datastore.connection.Connection` + which represents a connection between your machine and the Cloud Datastore + API. + +- :class:`google.cloud.datastore.client.Client` + which represents a project (string) and namespace (string) bundled with + a connection and has convenience methods for constructing objects with that + project / namespace. + +- :class:`google.cloud.datastore.entity.Entity` + which represents a single entity in the datastore + (akin to a row in relational database world). + +- :class:`google.cloud.datastore.key.Key` + which represents a pointer to a particular entity in the datastore + (akin to a unique identifier in relational database world). + +- :class:`google.cloud.datastore.query.Query` + which represents a lookup or search over the rows in the datastore. + +- :class:`google.cloud.datastore.transaction.Transaction` + which represents an all-or-none transaction and enables consistency + when race conditions may occur. +""" + + +from google.cloud.datastore.batch import Batch +from google.cloud.datastore.connection import Connection +from google.cloud.datastore.client import Client +from google.cloud.datastore.entity import Entity +from google.cloud.datastore.key import Key +from google.cloud.datastore.query import Query +from google.cloud.datastore.transaction import Transaction + + +SCOPE = Connection.SCOPE diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_generated/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore/_generated/__init__.py new file mode 100644 index 000000000000..5b2724764514 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/_generated/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generated protobuf modules for Google Cloud Datastore API.""" diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_generated/_datastore.proto b/packages/google-cloud-datastore/google/cloud/datastore/_generated/_datastore.proto new file mode 100644 index 000000000000..5881e9a14714 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/_generated/_datastore.proto @@ -0,0 +1,316 @@ +// Copyright 2016 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.datastore.v1; + +import "google/api/annotations.proto"; +import "google/datastore/v1/entity.proto"; +import "google/datastore/v1/query.proto"; + +option java_multiple_files = true; +option java_outer_classname = "DatastoreProto"; +option java_package = "com.google.datastore.v1"; + + +// Each RPC normalizes the partition IDs of the keys in its input entities, +// and always returns entities with keys with normalized partition IDs. +// This applies to all keys and entities, including those in values, except keys +// with both an empty path and an empty or unset partition ID. Normalization of +// input keys sets the project ID (if not already set) to the project ID from +// the request. +// +service Datastore { + // Looks up entities by key. + rpc Lookup(LookupRequest) returns (LookupResponse) { + option (google.api.http) = { post: "/v1/projects/{project_id}:lookup" body: "*" }; + } + + // Queries for entities. + rpc RunQuery(RunQueryRequest) returns (RunQueryResponse) { + option (google.api.http) = { post: "/v1/projects/{project_id}:runQuery" body: "*" }; + } + + // Begins a new transaction. + rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) { + option (google.api.http) = { post: "/v1/projects/{project_id}:beginTransaction" body: "*" }; + } + + // Commits a transaction, optionally creating, deleting or modifying some + // entities. + rpc Commit(CommitRequest) returns (CommitResponse) { + option (google.api.http) = { post: "/v1/projects/{project_id}:commit" body: "*" }; + } + + // Rolls back a transaction. + rpc Rollback(RollbackRequest) returns (RollbackResponse) { + option (google.api.http) = { post: "/v1/projects/{project_id}:rollback" body: "*" }; + } + + // Allocates IDs for the given keys, which is useful for referencing an entity + // before it is inserted. + rpc AllocateIds(AllocateIdsRequest) returns (AllocateIdsResponse) { + option (google.api.http) = { post: "/v1/projects/{project_id}:allocateIds" body: "*" }; + } +} + +// The request for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. +message LookupRequest { + // The ID of the project against which to make the request. + string project_id = 8; + + // The options for this lookup request. + ReadOptions read_options = 1; + + // Keys of entities to look up. + repeated Key keys = 3; +} + +// The response for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. +message LookupResponse { + // Entities found as `ResultType.FULL` entities. The order of results in this + // field is undefined and has no relation to the order of the keys in the + // input. + repeated EntityResult found = 1; + + // Entities not found as `ResultType.KEY_ONLY` entities. The order of results + // in this field is undefined and has no relation to the order of the keys + // in the input. + repeated EntityResult missing = 2; + + // A list of keys that were not looked up due to resource constraints. The + // order of results in this field is undefined and has no relation to the + // order of the keys in the input. + repeated Key deferred = 3; +} + +// The request for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. +message RunQueryRequest { + // The ID of the project against which to make the request. + string project_id = 8; + + // Entities are partitioned into subsets, identified by a partition ID. + // Queries are scoped to a single partition. + // This partition ID is normalized with the standard default context + // partition ID. + PartitionId partition_id = 2; + + // The options for this query. + ReadOptions read_options = 1; + + // The type of query. + oneof query_type { + // The query to run. + Query query = 3; + + // The GQL query to run. + GqlQuery gql_query = 7; + } +} + +// The response for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. +message RunQueryResponse { + // A batch of query results (always present). + QueryResultBatch batch = 1; + + // The parsed form of the `GqlQuery` from the request, if it was set. + Query query = 2; +} + +// The request for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. +message BeginTransactionRequest { + // The ID of the project against which to make the request. + string project_id = 8; +} + +// The response for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. +message BeginTransactionResponse { + // The transaction identifier (always present). + bytes transaction = 1; +} + +// The request for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. +message RollbackRequest { + // The ID of the project against which to make the request. + string project_id = 8; + + // The transaction identifier, returned by a call to + // [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + bytes transaction = 1; +} + +// The response for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. +// (an empty message). +message RollbackResponse { + +} + +// The request for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. +message CommitRequest { + // The modes available for commits. + enum Mode { + // Unspecified. This value must not be used. + MODE_UNSPECIFIED = 0; + + // Transactional: The mutations are either all applied, or none are applied. + // Learn about transactions [here](https://cloud.google.com/datastore/docs/concepts/transactions). + TRANSACTIONAL = 1; + + // Non-transactional: The mutations may not apply as all or none. + NON_TRANSACTIONAL = 2; + } + + // The ID of the project against which to make the request. + string project_id = 8; + + // The type of commit to perform. Defaults to `TRANSACTIONAL`. + Mode mode = 5; + + // Must be set when mode is `TRANSACTIONAL`. + oneof transaction_selector { + // The identifier of the transaction associated with the commit. A + // transaction identifier is returned by a call to + // [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + bytes transaction = 1; + } + + // The mutations to perform. + // + // When mode is `TRANSACTIONAL`, mutations affecting a single entity are + // applied in order. The following sequences of mutations affecting a single + // entity are not permitted in a single `Commit` request: + // + // - `insert` followed by `insert` + // - `update` followed by `insert` + // - `upsert` followed by `insert` + // - `delete` followed by `update` + // + // When mode is `NON_TRANSACTIONAL`, no two mutations may affect a single + // entity. + repeated Mutation mutations = 6; +} + +// The response for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. +message CommitResponse { + // The result of performing the mutations. + // The i-th mutation result corresponds to the i-th mutation in the request. + repeated MutationResult mutation_results = 3; + + // The number of index entries updated during the commit, or zero if none were + // updated. + int32 index_updates = 4; +} + +// The request for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. +message AllocateIdsRequest { + // The ID of the project against which to make the request. + string project_id = 8; + + // A list of keys with incomplete key paths for which to allocate IDs. + // No key may be reserved/read-only. + repeated Key keys = 1; +} + +// The response for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. +message AllocateIdsResponse { + // The keys specified in the request (in the same order), each with + // its key path completed with a newly allocated ID. + repeated Key keys = 1; +} + +// A mutation to apply to an entity. +message Mutation { + // The mutation operation. + // + // For `insert`, `update`, and `upsert`: + // - The entity's key must not be reserved/read-only. + // - No property in the entity may have a reserved name, + // not even a property in an entity in a value. + // - No value in the entity may have meaning 18, + // not even a value in an entity in another value. + oneof operation { + // The entity to insert. The entity must not already exist. + // The entity key's final path element may be incomplete. + Entity insert = 4; + + // The entity to update. The entity must already exist. + // Must have a complete key path. + Entity update = 5; + + // The entity to upsert. The entity may or may not already exist. + // The entity key's final path element may be incomplete. + Entity upsert = 6; + + // The key of the entity to delete. The entity may or may not already exist. + // Must have a complete key path and must not be reserved/read-only. + Key delete = 7; + } + + // When set, the server will detect whether or not this mutation conflicts + // with the current version of the entity on the server. Conflicting mutations + // are not applied, and are marked as such in MutationResult. + oneof conflict_detection_strategy { + // The version of the entity that this mutation is being applied to. If this + // does not match the current version on the server, the mutation conflicts. + int64 base_version = 8; + } +} + +// The result of applying a mutation. +message MutationResult { + // The automatically allocated key. + // Set only when the mutation allocated a key. + Key key = 3; + + // The version of the entity on the server after processing the mutation. If + // the mutation doesn't change anything on the server, then the version will + // be the version of the current entity or, if no entity is present, a version + // that is strictly greater than the version of any previous entity and less + // than the version of any possible future entity. + int64 version = 4; + + // Whether a conflict was detected for this mutation. Always false when a + // conflict detection strategy field is not set in the mutation. + bool conflict_detected = 5; +} + +// The options shared by read requests. +message ReadOptions { + // The possible values for read consistencies. + enum ReadConsistency { + // Unspecified. This value must not be used. + READ_CONSISTENCY_UNSPECIFIED = 0; + + // Strong consistency. + STRONG = 1; + + // Eventual consistency. + EVENTUAL = 2; + } + + // If not specified, lookups and ancestor queries default to + // `read_consistency`=`STRONG`, global queries default to + // `read_consistency`=`EVENTUAL`. + oneof consistency_type { + // The non-transactional read consistency to use. + // Cannot be set to `STRONG` for global queries. + ReadConsistency read_consistency = 1; + + // The identifier of the transaction in which to read. A + // transaction identifier is returned by a call to + // [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + bytes transaction = 2; + } +} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_generated/_entity.proto b/packages/google-cloud-datastore/google/cloud/datastore/_generated/_entity.proto new file mode 100644 index 000000000000..a0e7d39138f2 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/_generated/_entity.proto @@ -0,0 +1,201 @@ +// Copyright 2016 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.datastore.v1; + +import "google/api/annotations.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/timestamp.proto"; +import "google/type/latlng.proto"; + +option java_multiple_files = true; +option java_outer_classname = "EntityProto"; +option java_package = "com.google.datastore.v1"; + + +// A partition ID identifies a grouping of entities. The grouping is always +// by project and namespace, however the namespace ID may be empty. +// +// A partition ID contains several dimensions: +// project ID and namespace ID. +// +// Partition dimensions: +// +// - May be `""`. +// - Must be valid UTF-8 bytes. +// - Must have values that match regex `[A-Za-z\d\.\-_]{1,100}` +// If the value of any dimension matches regex `__.*__`, the partition is +// reserved/read-only. +// A reserved/read-only partition ID is forbidden in certain documented +// contexts. +// +// Foreign partition IDs (in which the project ID does +// not match the context project ID ) are discouraged. +// Reads and writes of foreign partition IDs may fail if the project is not in an active state. +message PartitionId { + // The ID of the project to which the entities belong. + string project_id = 2; + + // If not empty, the ID of the namespace to which the entities belong. + string namespace_id = 4; +} + +// A unique identifier for an entity. +// If a key's partition ID or any of its path kinds or names are +// reserved/read-only, the key is reserved/read-only. +// A reserved/read-only key is forbidden in certain documented contexts. +message Key { + // A (kind, ID/name) pair used to construct a key path. + // + // If either name or ID is set, the element is complete. + // If neither is set, the element is incomplete. + message PathElement { + // The kind of the entity. + // A kind matching regex `__.*__` is reserved/read-only. + // A kind must not contain more than 1500 bytes when UTF-8 encoded. + // Cannot be `""`. + string kind = 1; + + // The type of ID. + oneof id_type { + // The auto-allocated ID of the entity. + // Never equal to zero. Values less than zero are discouraged and may not + // be supported in the future. + int64 id = 2; + + // The name of the entity. + // A name matching regex `__.*__` is reserved/read-only. + // A name must not be more than 1500 bytes when UTF-8 encoded. + // Cannot be `""`. + string name = 3; + } + } + + // Entities are partitioned into subsets, currently identified by a project + // ID and namespace ID. + // Queries are scoped to a single partition. + PartitionId partition_id = 1; + + // The entity path. + // An entity path consists of one or more elements composed of a kind and a + // string or numerical identifier, which identify entities. The first + // element identifies a _root entity_, the second element identifies + // a _child_ of the root entity, the third element identifies a child of the + // second entity, and so forth. The entities identified by all prefixes of + // the path are called the element's _ancestors_. + // + // An entity path is always fully complete: *all* of the entity's ancestors + // are required to be in the path along with the entity identifier itself. + // The only exception is that in some documented cases, the identifier in the + // last path element (for the entity) itself may be omitted. For example, + // the last path element of the key of `Mutation.insert` may have no + // identifier. + // + // A path can never be empty, and a path can have at most 100 elements. + repeated PathElement path = 2; +} + +// An array value. +message ArrayValue { + // Values in the array. + // The order of this array may not be preserved if it contains a mix of + // indexed and unindexed values. + repeated Value values = 1; +} + +// A message that can hold any of the supported value types and associated +// metadata. +message Value { + // Must have a value set. + oneof value_type { + // A null value. + google.protobuf.NullValue null_value = 11; + + // A boolean value. + bool boolean_value = 1; + + // An integer value. + int64 integer_value = 2; + + // A double value. + double double_value = 3; + + // A timestamp value. + // When stored in the Datastore, precise only to microseconds; + // any additional precision is rounded down. + google.protobuf.Timestamp timestamp_value = 10; + + // A key value. + Key key_value = 5; + + // A UTF-8 encoded string value. + // When `exclude_from_indexes` is false (it is indexed) , may have at most 1500 bytes. + // Otherwise, may be set to at least 1,000,000 bytes. + string string_value = 17; + + // A blob value. + // May have at most 1,000,000 bytes. + // When `exclude_from_indexes` is false, may have at most 1500 bytes. + // In JSON requests, must be base64-encoded. + bytes blob_value = 18; + + // A geo point value representing a point on the surface of Earth. + google.type.LatLng geo_point_value = 8; + + // An entity value. + // + // - May have no key. + // - May have a key with an incomplete key path. + // - May have a reserved/read-only key. + Entity entity_value = 6; + + // An array value. + // Cannot contain another array value. + // A `Value` instance that sets field `array_value` must not set fields + // `meaning` or `exclude_from_indexes`. + ArrayValue array_value = 9; + } + + // The `meaning` field should only be populated for backwards compatibility. + int32 meaning = 14; + + // If the value should be excluded from all indexes including those defined + // explicitly. + bool exclude_from_indexes = 19; +} + +// A Datastore data object. +// +// An entity is limited to 1 megabyte when stored. That _roughly_ +// corresponds to a limit of 1 megabyte for the serialized form of this +// message. +message Entity { + // The entity's key. + // + // An entity must have a key, unless otherwise documented (for example, + // an entity in `Value.entity_value` may have no key). + // An entity's kind is its key path's last element's kind, + // or null if it has no key. + Key key = 1; + + // The entity's properties. + // The map's keys are property names. + // A property name matching regex `__.*__` is reserved. + // A reserved property name is forbidden in certain documented contexts. + // The name must not contain more than 500 characters. + // The name cannot be `""`. + map properties = 3; +} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_generated/_query.proto b/packages/google-cloud-datastore/google/cloud/datastore/_generated/_query.proto new file mode 100644 index 000000000000..e6dba2b226f9 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/_generated/_query.proto @@ -0,0 +1,306 @@ +// Copyright 2016 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.datastore.v1; + +import "google/api/annotations.proto"; +import "google/datastore/v1/entity.proto"; +import "google/protobuf/wrappers.proto"; +import "google/type/latlng.proto"; + +option java_multiple_files = true; +option java_outer_classname = "QueryProto"; +option java_package = "com.google.datastore.v1"; + + +// The result of fetching an entity from Datastore. +message EntityResult { + // Specifies what data the 'entity' field contains. + // A `ResultType` is either implied (for example, in `LookupResponse.missing` + // from `datastore.proto`, it is always `KEY_ONLY`) or specified by context + // (for example, in message `QueryResultBatch`, field `entity_result_type` + // specifies a `ResultType` for all the values in field `entity_results`). + enum ResultType { + // Unspecified. This value is never used. + RESULT_TYPE_UNSPECIFIED = 0; + + // The key and properties. + FULL = 1; + + // A projected subset of properties. The entity may have no key. + PROJECTION = 2; + + // Only the key. + KEY_ONLY = 3; + } + + // The resulting entity. + Entity entity = 1; + + // The version of the entity, a strictly positive number that monotonically + // increases with changes to the entity. + // + // This field is set for [`FULL`][google.datastore.v1.EntityResult.ResultType.FULL] entity + // results. + // + // For [missing][google.datastore.v1.LookupResponse.missing] entities in `LookupResponse`, this + // is the version of the snapshot that was used to look up the entity, and it + // is always set except for eventually consistent reads. + int64 version = 4; + + // A cursor that points to the position after the result entity. + // Set only when the `EntityResult` is part of a `QueryResultBatch` message. + bytes cursor = 3; +} + +// A query for entities. +message Query { + // The projection to return. Defaults to returning all properties. + repeated Projection projection = 2; + + // The kinds to query (if empty, returns entities of all kinds). + // Currently at most 1 kind may be specified. + repeated KindExpression kind = 3; + + // The filter to apply. + Filter filter = 4; + + // The order to apply to the query results (if empty, order is unspecified). + repeated PropertyOrder order = 5; + + // The properties to make distinct. The query results will contain the first + // result for each distinct combination of values for the given properties + // (if empty, all results are returned). + repeated PropertyReference distinct_on = 6; + + // A starting point for the query results. Query cursors are + // returned in query result batches and + // [can only be used to continue the same query](https://cloud.google.com/datastore/docs/concepts/queries#cursors_limits_and_offsets). + bytes start_cursor = 7; + + // An ending point for the query results. Query cursors are + // returned in query result batches and + // [can only be used to limit the same query](https://cloud.google.com/datastore/docs/concepts/queries#cursors_limits_and_offsets). + bytes end_cursor = 8; + + // The number of results to skip. Applies before limit, but after all other + // constraints. Optional. Must be >= 0 if specified. + int32 offset = 10; + + // The maximum number of results to return. Applies after all other + // constraints. Optional. + // Unspecified is interpreted as no limit. + // Must be >= 0 if specified. + google.protobuf.Int32Value limit = 12; +} + +// A representation of a kind. +message KindExpression { + // The name of the kind. + string name = 1; +} + +// A reference to a property relative to the kind expressions. +message PropertyReference { + // The name of the property. + // If name includes "."s, it may be interpreted as a property name path. + string name = 2; +} + +// A representation of a property in a projection. +message Projection { + // The property to project. + PropertyReference property = 1; +} + +// The desired order for a specific property. +message PropertyOrder { + // The sort direction. + enum Direction { + // Unspecified. This value must not be used. + DIRECTION_UNSPECIFIED = 0; + + // Ascending. + ASCENDING = 1; + + // Descending. + DESCENDING = 2; + } + + // The property to order by. + PropertyReference property = 1; + + // The direction to order by. Defaults to `ASCENDING`. + Direction direction = 2; +} + +// A holder for any type of filter. +message Filter { + // The type of filter. + oneof filter_type { + // A composite filter. + CompositeFilter composite_filter = 1; + + // A filter on a property. + PropertyFilter property_filter = 2; + } +} + +// A filter that merges multiple other filters using the given operator. +message CompositeFilter { + // A composite filter operator. + enum Operator { + // Unspecified. This value must not be used. + OPERATOR_UNSPECIFIED = 0; + + // The results are required to satisfy each of the combined filters. + AND = 1; + } + + // The operator for combining multiple filters. + Operator op = 1; + + // The list of filters to combine. + // Must contain at least one filter. + repeated Filter filters = 2; +} + +// A filter on a specific property. +message PropertyFilter { + // A property filter operator. + enum Operator { + // Unspecified. This value must not be used. + OPERATOR_UNSPECIFIED = 0; + + // Less than. + LESS_THAN = 1; + + // Less than or equal. + LESS_THAN_OR_EQUAL = 2; + + // Greater than. + GREATER_THAN = 3; + + // Greater than or equal. + GREATER_THAN_OR_EQUAL = 4; + + // Equal. + EQUAL = 5; + + // Has ancestor. + HAS_ANCESTOR = 11; + } + + // The property to filter by. + PropertyReference property = 1; + + // The operator to filter by. + Operator op = 2; + + // The value to compare the property to. + Value value = 3; +} + +// A [GQL query](https://cloud.google.com/datastore/docs/apis/gql/gql_reference). +message GqlQuery { + // A string of the format described + // [here](https://cloud.google.com/datastore/docs/apis/gql/gql_reference). + string query_string = 1; + + // When false, the query string must not contain any literals and instead must + // bind all values. For example, + // `SELECT * FROM Kind WHERE a = 'string literal'` is not allowed, while + // `SELECT * FROM Kind WHERE a = @value` is. + bool allow_literals = 2; + + // For each non-reserved named binding site in the query string, there must be + // a named parameter with that name, but not necessarily the inverse. + // + // Key must match regex `[A-Za-z_$][A-Za-z_$0-9]*`, must not match regex + // `__.*__`, and must not be `""`. + map named_bindings = 5; + + // Numbered binding site @1 references the first numbered parameter, + // effectively using 1-based indexing, rather than the usual 0. + // + // For each binding site numbered i in `query_string`, there must be an i-th + // numbered parameter. The inverse must also be true. + repeated GqlQueryParameter positional_bindings = 4; +} + +// A binding parameter for a GQL query. +message GqlQueryParameter { + // The type of parameter. + oneof parameter_type { + // A value parameter. + Value value = 2; + + // A query cursor. Query cursors are returned in query + // result batches. + bytes cursor = 3; + } +} + +// A batch of results produced by a query. +message QueryResultBatch { + // The possible values for the `more_results` field. + enum MoreResultsType { + // Unspecified. This value is never used. + MORE_RESULTS_TYPE_UNSPECIFIED = 0; + + // There may be additional batches to fetch from this query. + NOT_FINISHED = 1; + + // The query is finished, but there may be more results after the limit. + MORE_RESULTS_AFTER_LIMIT = 2; + + // The query is finished, but there may be more results after the end + // cursor. + MORE_RESULTS_AFTER_CURSOR = 4; + + // The query has been exhausted. + NO_MORE_RESULTS = 3; + } + + // The number of results skipped, typically because of an offset. + int32 skipped_results = 6; + + // A cursor that points to the position after the last skipped result. + // Will be set when `skipped_results` != 0. + bytes skipped_cursor = 3; + + // The result type for every entity in `entity_results`. + EntityResult.ResultType entity_result_type = 1; + + // The results for this batch. + repeated EntityResult entity_results = 2; + + // A cursor that points to the position after the last result in the batch. + bytes end_cursor = 4; + + // The state of the query after the current batch. + MoreResultsType more_results = 5; + + // The version number of the snapshot this batch was returned from. + // This applies to the range of results from the query's `start_cursor` (or + // the beginning of the query if no cursor was given) to this batch's + // `end_cursor` (not the query's `end_cursor`). + // + // In a single transaction, subsequent query result batches for the same query + // can have a greater snapshot version number. Each batch's snapshot version + // is valid for all preceding batches. + int64 snapshot_version = 7; +} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_generated/datastore_grpc_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore/_generated/datastore_grpc_pb2.py new file mode 100644 index 000000000000..beea35710c6c --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/_generated/datastore_grpc_pb2.py @@ -0,0 +1,301 @@ +# BEGIN: Imports from datastore_pb2 +from google.cloud.datastore._generated.datastore_pb2 import AllocateIdsRequest +from google.cloud.datastore._generated.datastore_pb2 import AllocateIdsResponse +from google.cloud.datastore._generated.datastore_pb2 import BeginTransactionRequest +from google.cloud.datastore._generated.datastore_pb2 import BeginTransactionResponse +from google.cloud.datastore._generated.datastore_pb2 import CommitRequest +from google.cloud.datastore._generated.datastore_pb2 import CommitResponse +from google.cloud.datastore._generated.datastore_pb2 import LookupRequest +from google.cloud.datastore._generated.datastore_pb2 import LookupResponse +from google.cloud.datastore._generated.datastore_pb2 import Mutation +from google.cloud.datastore._generated.datastore_pb2 import MutationResult +from google.cloud.datastore._generated.datastore_pb2 import ReadOptions +from google.cloud.datastore._generated.datastore_pb2 import RollbackRequest +from google.cloud.datastore._generated.datastore_pb2 import RollbackResponse +from google.cloud.datastore._generated.datastore_pb2 import RunQueryRequest +from google.cloud.datastore._generated.datastore_pb2 import RunQueryResponse +# END: Imports from datastore_pb2 +import grpc +from grpc.beta import implementations as beta_implementations +from grpc.beta import interfaces as beta_interfaces +from grpc.framework.common import cardinality +from grpc.framework.interfaces.face import utilities as face_utilities + + +class DatastoreStub(object): + """Each RPC normalizes the partition IDs of the keys in its input entities, + and always returns entities with keys with normalized partition IDs. + This applies to all keys and entities, including those in values, except keys + with both an empty path and an empty or unset partition ID. Normalization of + input keys sets the project ID (if not already set) to the project ID from + the request. + + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Lookup = channel.unary_unary( + '/google.datastore.v1.Datastore/Lookup', + request_serializer=LookupRequest.SerializeToString, + response_deserializer=LookupResponse.FromString, + ) + self.RunQuery = channel.unary_unary( + '/google.datastore.v1.Datastore/RunQuery', + request_serializer=RunQueryRequest.SerializeToString, + response_deserializer=RunQueryResponse.FromString, + ) + self.BeginTransaction = channel.unary_unary( + '/google.datastore.v1.Datastore/BeginTransaction', + request_serializer=BeginTransactionRequest.SerializeToString, + response_deserializer=BeginTransactionResponse.FromString, + ) + self.Commit = channel.unary_unary( + '/google.datastore.v1.Datastore/Commit', + request_serializer=CommitRequest.SerializeToString, + response_deserializer=CommitResponse.FromString, + ) + self.Rollback = channel.unary_unary( + '/google.datastore.v1.Datastore/Rollback', + request_serializer=RollbackRequest.SerializeToString, + response_deserializer=RollbackResponse.FromString, + ) + self.AllocateIds = channel.unary_unary( + '/google.datastore.v1.Datastore/AllocateIds', + request_serializer=AllocateIdsRequest.SerializeToString, + response_deserializer=AllocateIdsResponse.FromString, + ) + + +class DatastoreServicer(object): + """Each RPC normalizes the partition IDs of the keys in its input entities, + and always returns entities with keys with normalized partition IDs. + This applies to all keys and entities, including those in values, except keys + with both an empty path and an empty or unset partition ID. Normalization of + input keys sets the project ID (if not already set) to the project ID from + the request. + + """ + + def Lookup(self, request, context): + """Looks up entities by key. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RunQuery(self, request, context): + """Queries for entities. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def BeginTransaction(self, request, context): + """Begins a new transaction. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Commit(self, request, context): + """Commits a transaction, optionally creating, deleting or modifying some + entities. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Rollback(self, request, context): + """Rolls back a transaction. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AllocateIds(self, request, context): + """Allocates IDs for the given keys, which is useful for referencing an entity + before it is inserted. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_DatastoreServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Lookup': grpc.unary_unary_rpc_method_handler( + servicer.Lookup, + request_deserializer=LookupRequest.FromString, + response_serializer=LookupResponse.SerializeToString, + ), + 'RunQuery': grpc.unary_unary_rpc_method_handler( + servicer.RunQuery, + request_deserializer=RunQueryRequest.FromString, + response_serializer=RunQueryResponse.SerializeToString, + ), + 'BeginTransaction': grpc.unary_unary_rpc_method_handler( + servicer.BeginTransaction, + request_deserializer=BeginTransactionRequest.FromString, + response_serializer=BeginTransactionResponse.SerializeToString, + ), + 'Commit': grpc.unary_unary_rpc_method_handler( + servicer.Commit, + request_deserializer=CommitRequest.FromString, + response_serializer=CommitResponse.SerializeToString, + ), + 'Rollback': grpc.unary_unary_rpc_method_handler( + servicer.Rollback, + request_deserializer=RollbackRequest.FromString, + response_serializer=RollbackResponse.SerializeToString, + ), + 'AllocateIds': grpc.unary_unary_rpc_method_handler( + servicer.AllocateIds, + request_deserializer=AllocateIdsRequest.FromString, + response_serializer=AllocateIdsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.datastore.v1.Datastore', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + +class BetaDatastoreServicer(object): + """Each RPC normalizes the partition IDs of the keys in its input entities, + and always returns entities with keys with normalized partition IDs. + This applies to all keys and entities, including those in values, except keys + with both an empty path and an empty or unset partition ID. Normalization of + input keys sets the project ID (if not already set) to the project ID from + the request. + + """ + def Lookup(self, request, context): + """Looks up entities by key. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def RunQuery(self, request, context): + """Queries for entities. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def BeginTransaction(self, request, context): + """Begins a new transaction. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Commit(self, request, context): + """Commits a transaction, optionally creating, deleting or modifying some + entities. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Rollback(self, request, context): + """Rolls back a transaction. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def AllocateIds(self, request, context): + """Allocates IDs for the given keys, which is useful for referencing an entity + before it is inserted. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + +class BetaDatastoreStub(object): + """Each RPC normalizes the partition IDs of the keys in its input entities, + and always returns entities with keys with normalized partition IDs. + This applies to all keys and entities, including those in values, except keys + with both an empty path and an empty or unset partition ID. Normalization of + input keys sets the project ID (if not already set) to the project ID from + the request. + + """ + def Lookup(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Looks up entities by key. + """ + raise NotImplementedError() + Lookup.future = None + def RunQuery(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Queries for entities. + """ + raise NotImplementedError() + RunQuery.future = None + def BeginTransaction(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Begins a new transaction. + """ + raise NotImplementedError() + BeginTransaction.future = None + def Commit(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Commits a transaction, optionally creating, deleting or modifying some + entities. + """ + raise NotImplementedError() + Commit.future = None + def Rollback(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Rolls back a transaction. + """ + raise NotImplementedError() + Rollback.future = None + def AllocateIds(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Allocates IDs for the given keys, which is useful for referencing an entity + before it is inserted. + """ + raise NotImplementedError() + AllocateIds.future = None + + +def beta_create_Datastore_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + request_deserializers = { + ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsRequest.FromString, + ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionRequest.FromString, + ('google.datastore.v1.Datastore', 'Commit'): CommitRequest.FromString, + ('google.datastore.v1.Datastore', 'Lookup'): LookupRequest.FromString, + ('google.datastore.v1.Datastore', 'Rollback'): RollbackRequest.FromString, + ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryRequest.FromString, + } + response_serializers = { + ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsResponse.SerializeToString, + ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionResponse.SerializeToString, + ('google.datastore.v1.Datastore', 'Commit'): CommitResponse.SerializeToString, + ('google.datastore.v1.Datastore', 'Lookup'): LookupResponse.SerializeToString, + ('google.datastore.v1.Datastore', 'Rollback'): RollbackResponse.SerializeToString, + ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryResponse.SerializeToString, + } + method_implementations = { + ('google.datastore.v1.Datastore', 'AllocateIds'): face_utilities.unary_unary_inline(servicer.AllocateIds), + ('google.datastore.v1.Datastore', 'BeginTransaction'): face_utilities.unary_unary_inline(servicer.BeginTransaction), + ('google.datastore.v1.Datastore', 'Commit'): face_utilities.unary_unary_inline(servicer.Commit), + ('google.datastore.v1.Datastore', 'Lookup'): face_utilities.unary_unary_inline(servicer.Lookup), + ('google.datastore.v1.Datastore', 'Rollback'): face_utilities.unary_unary_inline(servicer.Rollback), + ('google.datastore.v1.Datastore', 'RunQuery'): face_utilities.unary_unary_inline(servicer.RunQuery), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + +def beta_create_Datastore_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + request_serializers = { + ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsRequest.SerializeToString, + ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionRequest.SerializeToString, + ('google.datastore.v1.Datastore', 'Commit'): CommitRequest.SerializeToString, + ('google.datastore.v1.Datastore', 'Lookup'): LookupRequest.SerializeToString, + ('google.datastore.v1.Datastore', 'Rollback'): RollbackRequest.SerializeToString, + ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryRequest.SerializeToString, + } + response_deserializers = { + ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsResponse.FromString, + ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionResponse.FromString, + ('google.datastore.v1.Datastore', 'Commit'): CommitResponse.FromString, + ('google.datastore.v1.Datastore', 'Lookup'): LookupResponse.FromString, + ('google.datastore.v1.Datastore', 'Rollback'): RollbackResponse.FromString, + ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryResponse.FromString, + } + cardinalities = { + 'AllocateIds': cardinality.Cardinality.UNARY_UNARY, + 'BeginTransaction': cardinality.Cardinality.UNARY_UNARY, + 'Commit': cardinality.Cardinality.UNARY_UNARY, + 'Lookup': cardinality.Cardinality.UNARY_UNARY, + 'Rollback': cardinality.Cardinality.UNARY_UNARY, + 'RunQuery': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.datastore.v1.Datastore', cardinalities, options=stub_options) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_generated/datastore_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore/_generated/datastore_pb2.py new file mode 100644 index 000000000000..f7a321a5c6b8 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/_generated/datastore_pb2.py @@ -0,0 +1,891 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/datastore/v1/datastore.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.cloud.datastore._generated import entity_pb2 as google_dot_datastore_dot_v1_dot_entity__pb2 +from google.cloud.datastore._generated import query_pb2 as google_dot_datastore_dot_v1_dot_query__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/datastore/v1/datastore.proto', + package='google.datastore.v1', + syntax='proto3', + serialized_pb=_b('\n#google/datastore/v1/datastore.proto\x12\x13google.datastore.v1\x1a\x1cgoogle/api/annotations.proto\x1a google/datastore/v1/entity.proto\x1a\x1fgoogle/datastore/v1/query.proto\"\x83\x01\n\rLookupRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x36\n\x0cread_options\x18\x01 \x01(\x0b\x32 .google.datastore.v1.ReadOptions\x12&\n\x04keys\x18\x03 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\xa2\x01\n\x0eLookupResponse\x12\x30\n\x05\x66ound\x18\x01 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x32\n\x07missing\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12*\n\x08\x64\x65\x66\x65rred\x18\x03 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\x84\x02\n\x0fRunQueryRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x36\n\x0cpartition_id\x18\x02 \x01(\x0b\x32 .google.datastore.v1.PartitionId\x12\x36\n\x0cread_options\x18\x01 \x01(\x0b\x32 .google.datastore.v1.ReadOptions\x12+\n\x05query\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.QueryH\x00\x12\x32\n\tgql_query\x18\x07 \x01(\x0b\x32\x1d.google.datastore.v1.GqlQueryH\x00\x42\x0c\n\nquery_type\"s\n\x10RunQueryResponse\x12\x34\n\x05\x62\x61tch\x18\x01 \x01(\x0b\x32%.google.datastore.v1.QueryResultBatch\x12)\n\x05query\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.Query\"-\n\x17\x42\x65ginTransactionRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\":\n\x0fRollbackRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\"\x12\n\x10RollbackResponse\"\x83\x02\n\rCommitRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x35\n\x04mode\x18\x05 \x01(\x0e\x32\'.google.datastore.v1.CommitRequest.Mode\x12\x15\n\x0btransaction\x18\x01 \x01(\x0cH\x00\x12\x30\n\tmutations\x18\x06 \x03(\x0b\x32\x1d.google.datastore.v1.Mutation\"F\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\x11\n\rTRANSACTIONAL\x10\x01\x12\x15\n\x11NON_TRANSACTIONAL\x10\x02\x42\x16\n\x14transaction_selector\"f\n\x0e\x43ommitResponse\x12=\n\x10mutation_results\x18\x03 \x03(\x0b\x32#.google.datastore.v1.MutationResult\x12\x15\n\rindex_updates\x18\x04 \x01(\x05\"P\n\x12\x41llocateIdsRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"=\n\x13\x41llocateIdsResponse\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\x87\x02\n\x08Mutation\x12-\n\x06insert\x18\x04 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12-\n\x06update\x18\x05 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12-\n\x06upsert\x18\x06 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12*\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x18.google.datastore.v1.KeyH\x00\x12\x16\n\x0c\x62\x61se_version\x18\x08 \x01(\x03H\x01\x42\x0b\n\toperationB\x1d\n\x1b\x63onflict_detection_strategy\"c\n\x0eMutationResult\x12%\n\x03key\x18\x03 \x01(\x0b\x32\x18.google.datastore.v1.Key\x12\x0f\n\x07version\x18\x04 \x01(\x03\x12\x19\n\x11\x63onflict_detected\x18\x05 \x01(\x08\"\xd5\x01\n\x0bReadOptions\x12L\n\x10read_consistency\x18\x01 \x01(\x0e\x32\x30.google.datastore.v1.ReadOptions.ReadConsistencyH\x00\x12\x15\n\x0btransaction\x18\x02 \x01(\x0cH\x00\"M\n\x0fReadConsistency\x12 \n\x1cREAD_CONSISTENCY_UNSPECIFIED\x10\x00\x12\n\n\x06STRONG\x10\x01\x12\x0c\n\x08\x45VENTUAL\x10\x02\x42\x12\n\x10\x63onsistency_type2\xdb\x06\n\tDatastore\x12~\n\x06Lookup\x12\".google.datastore.v1.LookupRequest\x1a#.google.datastore.v1.LookupResponse\"+\x82\xd3\xe4\x93\x02%\" /v1/projects/{project_id}:lookup:\x01*\x12\x86\x01\n\x08RunQuery\x12$.google.datastore.v1.RunQueryRequest\x1a%.google.datastore.v1.RunQueryResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1/projects/{project_id}:runQuery:\x01*\x12\xa6\x01\n\x10\x42\x65ginTransaction\x12,.google.datastore.v1.BeginTransactionRequest\x1a-.google.datastore.v1.BeginTransactionResponse\"5\x82\xd3\xe4\x93\x02/\"*/v1/projects/{project_id}:beginTransaction:\x01*\x12~\n\x06\x43ommit\x12\".google.datastore.v1.CommitRequest\x1a#.google.datastore.v1.CommitResponse\"+\x82\xd3\xe4\x93\x02%\" /v1/projects/{project_id}:commit:\x01*\x12\x86\x01\n\x08Rollback\x12$.google.datastore.v1.RollbackRequest\x1a%.google.datastore.v1.RollbackResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1/projects/{project_id}:rollback:\x01*\x12\x92\x01\n\x0b\x41llocateIds\x12\'.google.datastore.v1.AllocateIdsRequest\x1a(.google.datastore.v1.AllocateIdsResponse\"0\x82\xd3\xe4\x93\x02*\"%/v1/projects/{project_id}:allocateIds:\x01*B+\n\x17\x63om.google.datastore.v1B\x0e\x44\x61tastoreProtoP\x01\x62\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_datastore_dot_v1_dot_entity__pb2.DESCRIPTOR,google_dot_datastore_dot_v1_dot_query__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_COMMITREQUEST_MODE = _descriptor.EnumDescriptor( + name='Mode', + full_name='google.datastore.v1.CommitRequest.Mode', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='MODE_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TRANSACTIONAL', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NON_TRANSACTIONAL', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1178, + serialized_end=1248, +) +_sym_db.RegisterEnumDescriptor(_COMMITREQUEST_MODE) + +_READOPTIONS_READCONSISTENCY = _descriptor.EnumDescriptor( + name='ReadConsistency', + full_name='google.datastore.v1.ReadOptions.ReadConsistency', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='READ_CONSISTENCY_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='STRONG', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='EVENTUAL', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2007, + serialized_end=2084, +) +_sym_db.RegisterEnumDescriptor(_READOPTIONS_READCONSISTENCY) + + +_LOOKUPREQUEST = _descriptor.Descriptor( + name='LookupRequest', + full_name='google.datastore.v1.LookupRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='google.datastore.v1.LookupRequest.project_id', index=0, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='read_options', full_name='google.datastore.v1.LookupRequest.read_options', index=1, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='keys', full_name='google.datastore.v1.LookupRequest.keys', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=158, + serialized_end=289, +) + + +_LOOKUPRESPONSE = _descriptor.Descriptor( + name='LookupResponse', + full_name='google.datastore.v1.LookupResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='found', full_name='google.datastore.v1.LookupResponse.found', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='missing', full_name='google.datastore.v1.LookupResponse.missing', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='deferred', full_name='google.datastore.v1.LookupResponse.deferred', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=292, + serialized_end=454, +) + + +_RUNQUERYREQUEST = _descriptor.Descriptor( + name='RunQueryRequest', + full_name='google.datastore.v1.RunQueryRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='google.datastore.v1.RunQueryRequest.project_id', index=0, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='partition_id', full_name='google.datastore.v1.RunQueryRequest.partition_id', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='read_options', full_name='google.datastore.v1.RunQueryRequest.read_options', index=2, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='query', full_name='google.datastore.v1.RunQueryRequest.query', index=3, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='gql_query', full_name='google.datastore.v1.RunQueryRequest.gql_query', index=4, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='query_type', full_name='google.datastore.v1.RunQueryRequest.query_type', + index=0, containing_type=None, fields=[]), + ], + serialized_start=457, + serialized_end=717, +) + + +_RUNQUERYRESPONSE = _descriptor.Descriptor( + name='RunQueryResponse', + full_name='google.datastore.v1.RunQueryResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='batch', full_name='google.datastore.v1.RunQueryResponse.batch', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='query', full_name='google.datastore.v1.RunQueryResponse.query', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=719, + serialized_end=834, +) + + +_BEGINTRANSACTIONREQUEST = _descriptor.Descriptor( + name='BeginTransactionRequest', + full_name='google.datastore.v1.BeginTransactionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='google.datastore.v1.BeginTransactionRequest.project_id', index=0, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=836, + serialized_end=881, +) + + +_BEGINTRANSACTIONRESPONSE = _descriptor.Descriptor( + name='BeginTransactionResponse', + full_name='google.datastore.v1.BeginTransactionResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='transaction', full_name='google.datastore.v1.BeginTransactionResponse.transaction', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=883, + serialized_end=930, +) + + +_ROLLBACKREQUEST = _descriptor.Descriptor( + name='RollbackRequest', + full_name='google.datastore.v1.RollbackRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='google.datastore.v1.RollbackRequest.project_id', index=0, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='transaction', full_name='google.datastore.v1.RollbackRequest.transaction', index=1, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=932, + serialized_end=990, +) + + +_ROLLBACKRESPONSE = _descriptor.Descriptor( + name='RollbackResponse', + full_name='google.datastore.v1.RollbackResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=992, + serialized_end=1010, +) + + +_COMMITREQUEST = _descriptor.Descriptor( + name='CommitRequest', + full_name='google.datastore.v1.CommitRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='google.datastore.v1.CommitRequest.project_id', index=0, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mode', full_name='google.datastore.v1.CommitRequest.mode', index=1, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='transaction', full_name='google.datastore.v1.CommitRequest.transaction', index=2, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mutations', full_name='google.datastore.v1.CommitRequest.mutations', index=3, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _COMMITREQUEST_MODE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='transaction_selector', full_name='google.datastore.v1.CommitRequest.transaction_selector', + index=0, containing_type=None, fields=[]), + ], + serialized_start=1013, + serialized_end=1272, +) + + +_COMMITRESPONSE = _descriptor.Descriptor( + name='CommitResponse', + full_name='google.datastore.v1.CommitResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='mutation_results', full_name='google.datastore.v1.CommitResponse.mutation_results', index=0, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='index_updates', full_name='google.datastore.v1.CommitResponse.index_updates', index=1, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1274, + serialized_end=1376, +) + + +_ALLOCATEIDSREQUEST = _descriptor.Descriptor( + name='AllocateIdsRequest', + full_name='google.datastore.v1.AllocateIdsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='google.datastore.v1.AllocateIdsRequest.project_id', index=0, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='keys', full_name='google.datastore.v1.AllocateIdsRequest.keys', index=1, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1378, + serialized_end=1458, +) + + +_ALLOCATEIDSRESPONSE = _descriptor.Descriptor( + name='AllocateIdsResponse', + full_name='google.datastore.v1.AllocateIdsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='keys', full_name='google.datastore.v1.AllocateIdsResponse.keys', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1460, + serialized_end=1521, +) + + +_MUTATION = _descriptor.Descriptor( + name='Mutation', + full_name='google.datastore.v1.Mutation', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='insert', full_name='google.datastore.v1.Mutation.insert', index=0, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update', full_name='google.datastore.v1.Mutation.update', index=1, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='upsert', full_name='google.datastore.v1.Mutation.upsert', index=2, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='delete', full_name='google.datastore.v1.Mutation.delete', index=3, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='base_version', full_name='google.datastore.v1.Mutation.base_version', index=4, + number=8, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='operation', full_name='google.datastore.v1.Mutation.operation', + index=0, containing_type=None, fields=[]), + _descriptor.OneofDescriptor( + name='conflict_detection_strategy', full_name='google.datastore.v1.Mutation.conflict_detection_strategy', + index=1, containing_type=None, fields=[]), + ], + serialized_start=1524, + serialized_end=1787, +) + + +_MUTATIONRESULT = _descriptor.Descriptor( + name='MutationResult', + full_name='google.datastore.v1.MutationResult', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.datastore.v1.MutationResult.key', index=0, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='version', full_name='google.datastore.v1.MutationResult.version', index=1, + number=4, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='conflict_detected', full_name='google.datastore.v1.MutationResult.conflict_detected', index=2, + number=5, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1789, + serialized_end=1888, +) + + +_READOPTIONS = _descriptor.Descriptor( + name='ReadOptions', + full_name='google.datastore.v1.ReadOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='read_consistency', full_name='google.datastore.v1.ReadOptions.read_consistency', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='transaction', full_name='google.datastore.v1.ReadOptions.transaction', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _READOPTIONS_READCONSISTENCY, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='consistency_type', full_name='google.datastore.v1.ReadOptions.consistency_type', + index=0, containing_type=None, fields=[]), + ], + serialized_start=1891, + serialized_end=2104, +) + +_LOOKUPREQUEST.fields_by_name['read_options'].message_type = _READOPTIONS +_LOOKUPREQUEST.fields_by_name['keys'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._KEY +_LOOKUPRESPONSE.fields_by_name['found'].message_type = google_dot_datastore_dot_v1_dot_query__pb2._ENTITYRESULT +_LOOKUPRESPONSE.fields_by_name['missing'].message_type = google_dot_datastore_dot_v1_dot_query__pb2._ENTITYRESULT +_LOOKUPRESPONSE.fields_by_name['deferred'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._KEY +_RUNQUERYREQUEST.fields_by_name['partition_id'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._PARTITIONID +_RUNQUERYREQUEST.fields_by_name['read_options'].message_type = _READOPTIONS +_RUNQUERYREQUEST.fields_by_name['query'].message_type = google_dot_datastore_dot_v1_dot_query__pb2._QUERY +_RUNQUERYREQUEST.fields_by_name['gql_query'].message_type = google_dot_datastore_dot_v1_dot_query__pb2._GQLQUERY +_RUNQUERYREQUEST.oneofs_by_name['query_type'].fields.append( + _RUNQUERYREQUEST.fields_by_name['query']) +_RUNQUERYREQUEST.fields_by_name['query'].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name['query_type'] +_RUNQUERYREQUEST.oneofs_by_name['query_type'].fields.append( + _RUNQUERYREQUEST.fields_by_name['gql_query']) +_RUNQUERYREQUEST.fields_by_name['gql_query'].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name['query_type'] +_RUNQUERYRESPONSE.fields_by_name['batch'].message_type = google_dot_datastore_dot_v1_dot_query__pb2._QUERYRESULTBATCH +_RUNQUERYRESPONSE.fields_by_name['query'].message_type = google_dot_datastore_dot_v1_dot_query__pb2._QUERY +_COMMITREQUEST.fields_by_name['mode'].enum_type = _COMMITREQUEST_MODE +_COMMITREQUEST.fields_by_name['mutations'].message_type = _MUTATION +_COMMITREQUEST_MODE.containing_type = _COMMITREQUEST +_COMMITREQUEST.oneofs_by_name['transaction_selector'].fields.append( + _COMMITREQUEST.fields_by_name['transaction']) +_COMMITREQUEST.fields_by_name['transaction'].containing_oneof = _COMMITREQUEST.oneofs_by_name['transaction_selector'] +_COMMITRESPONSE.fields_by_name['mutation_results'].message_type = _MUTATIONRESULT +_ALLOCATEIDSREQUEST.fields_by_name['keys'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._KEY +_ALLOCATEIDSRESPONSE.fields_by_name['keys'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._KEY +_MUTATION.fields_by_name['insert'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._ENTITY +_MUTATION.fields_by_name['update'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._ENTITY +_MUTATION.fields_by_name['upsert'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._ENTITY +_MUTATION.fields_by_name['delete'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._KEY +_MUTATION.oneofs_by_name['operation'].fields.append( + _MUTATION.fields_by_name['insert']) +_MUTATION.fields_by_name['insert'].containing_oneof = _MUTATION.oneofs_by_name['operation'] +_MUTATION.oneofs_by_name['operation'].fields.append( + _MUTATION.fields_by_name['update']) +_MUTATION.fields_by_name['update'].containing_oneof = _MUTATION.oneofs_by_name['operation'] +_MUTATION.oneofs_by_name['operation'].fields.append( + _MUTATION.fields_by_name['upsert']) +_MUTATION.fields_by_name['upsert'].containing_oneof = _MUTATION.oneofs_by_name['operation'] +_MUTATION.oneofs_by_name['operation'].fields.append( + _MUTATION.fields_by_name['delete']) +_MUTATION.fields_by_name['delete'].containing_oneof = _MUTATION.oneofs_by_name['operation'] +_MUTATION.oneofs_by_name['conflict_detection_strategy'].fields.append( + _MUTATION.fields_by_name['base_version']) +_MUTATION.fields_by_name['base_version'].containing_oneof = _MUTATION.oneofs_by_name['conflict_detection_strategy'] +_MUTATIONRESULT.fields_by_name['key'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._KEY +_READOPTIONS.fields_by_name['read_consistency'].enum_type = _READOPTIONS_READCONSISTENCY +_READOPTIONS_READCONSISTENCY.containing_type = _READOPTIONS +_READOPTIONS.oneofs_by_name['consistency_type'].fields.append( + _READOPTIONS.fields_by_name['read_consistency']) +_READOPTIONS.fields_by_name['read_consistency'].containing_oneof = _READOPTIONS.oneofs_by_name['consistency_type'] +_READOPTIONS.oneofs_by_name['consistency_type'].fields.append( + _READOPTIONS.fields_by_name['transaction']) +_READOPTIONS.fields_by_name['transaction'].containing_oneof = _READOPTIONS.oneofs_by_name['consistency_type'] +DESCRIPTOR.message_types_by_name['LookupRequest'] = _LOOKUPREQUEST +DESCRIPTOR.message_types_by_name['LookupResponse'] = _LOOKUPRESPONSE +DESCRIPTOR.message_types_by_name['RunQueryRequest'] = _RUNQUERYREQUEST +DESCRIPTOR.message_types_by_name['RunQueryResponse'] = _RUNQUERYRESPONSE +DESCRIPTOR.message_types_by_name['BeginTransactionRequest'] = _BEGINTRANSACTIONREQUEST +DESCRIPTOR.message_types_by_name['BeginTransactionResponse'] = _BEGINTRANSACTIONRESPONSE +DESCRIPTOR.message_types_by_name['RollbackRequest'] = _ROLLBACKREQUEST +DESCRIPTOR.message_types_by_name['RollbackResponse'] = _ROLLBACKRESPONSE +DESCRIPTOR.message_types_by_name['CommitRequest'] = _COMMITREQUEST +DESCRIPTOR.message_types_by_name['CommitResponse'] = _COMMITRESPONSE +DESCRIPTOR.message_types_by_name['AllocateIdsRequest'] = _ALLOCATEIDSREQUEST +DESCRIPTOR.message_types_by_name['AllocateIdsResponse'] = _ALLOCATEIDSRESPONSE +DESCRIPTOR.message_types_by_name['Mutation'] = _MUTATION +DESCRIPTOR.message_types_by_name['MutationResult'] = _MUTATIONRESULT +DESCRIPTOR.message_types_by_name['ReadOptions'] = _READOPTIONS + +LookupRequest = _reflection.GeneratedProtocolMessageType('LookupRequest', (_message.Message,), dict( + DESCRIPTOR = _LOOKUPREQUEST, + __module__ = 'google.datastore.v1.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.LookupRequest) + )) +_sym_db.RegisterMessage(LookupRequest) + +LookupResponse = _reflection.GeneratedProtocolMessageType('LookupResponse', (_message.Message,), dict( + DESCRIPTOR = _LOOKUPRESPONSE, + __module__ = 'google.datastore.v1.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.LookupResponse) + )) +_sym_db.RegisterMessage(LookupResponse) + +RunQueryRequest = _reflection.GeneratedProtocolMessageType('RunQueryRequest', (_message.Message,), dict( + DESCRIPTOR = _RUNQUERYREQUEST, + __module__ = 'google.datastore.v1.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.RunQueryRequest) + )) +_sym_db.RegisterMessage(RunQueryRequest) + +RunQueryResponse = _reflection.GeneratedProtocolMessageType('RunQueryResponse', (_message.Message,), dict( + DESCRIPTOR = _RUNQUERYRESPONSE, + __module__ = 'google.datastore.v1.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.RunQueryResponse) + )) +_sym_db.RegisterMessage(RunQueryResponse) + +BeginTransactionRequest = _reflection.GeneratedProtocolMessageType('BeginTransactionRequest', (_message.Message,), dict( + DESCRIPTOR = _BEGINTRANSACTIONREQUEST, + __module__ = 'google.datastore.v1.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.BeginTransactionRequest) + )) +_sym_db.RegisterMessage(BeginTransactionRequest) + +BeginTransactionResponse = _reflection.GeneratedProtocolMessageType('BeginTransactionResponse', (_message.Message,), dict( + DESCRIPTOR = _BEGINTRANSACTIONRESPONSE, + __module__ = 'google.datastore.v1.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.BeginTransactionResponse) + )) +_sym_db.RegisterMessage(BeginTransactionResponse) + +RollbackRequest = _reflection.GeneratedProtocolMessageType('RollbackRequest', (_message.Message,), dict( + DESCRIPTOR = _ROLLBACKREQUEST, + __module__ = 'google.datastore.v1.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.RollbackRequest) + )) +_sym_db.RegisterMessage(RollbackRequest) + +RollbackResponse = _reflection.GeneratedProtocolMessageType('RollbackResponse', (_message.Message,), dict( + DESCRIPTOR = _ROLLBACKRESPONSE, + __module__ = 'google.datastore.v1.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.RollbackResponse) + )) +_sym_db.RegisterMessage(RollbackResponse) + +CommitRequest = _reflection.GeneratedProtocolMessageType('CommitRequest', (_message.Message,), dict( + DESCRIPTOR = _COMMITREQUEST, + __module__ = 'google.datastore.v1.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.CommitRequest) + )) +_sym_db.RegisterMessage(CommitRequest) + +CommitResponse = _reflection.GeneratedProtocolMessageType('CommitResponse', (_message.Message,), dict( + DESCRIPTOR = _COMMITRESPONSE, + __module__ = 'google.datastore.v1.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.CommitResponse) + )) +_sym_db.RegisterMessage(CommitResponse) + +AllocateIdsRequest = _reflection.GeneratedProtocolMessageType('AllocateIdsRequest', (_message.Message,), dict( + DESCRIPTOR = _ALLOCATEIDSREQUEST, + __module__ = 'google.datastore.v1.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.AllocateIdsRequest) + )) +_sym_db.RegisterMessage(AllocateIdsRequest) + +AllocateIdsResponse = _reflection.GeneratedProtocolMessageType('AllocateIdsResponse', (_message.Message,), dict( + DESCRIPTOR = _ALLOCATEIDSRESPONSE, + __module__ = 'google.datastore.v1.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.AllocateIdsResponse) + )) +_sym_db.RegisterMessage(AllocateIdsResponse) + +Mutation = _reflection.GeneratedProtocolMessageType('Mutation', (_message.Message,), dict( + DESCRIPTOR = _MUTATION, + __module__ = 'google.datastore.v1.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.Mutation) + )) +_sym_db.RegisterMessage(Mutation) + +MutationResult = _reflection.GeneratedProtocolMessageType('MutationResult', (_message.Message,), dict( + DESCRIPTOR = _MUTATIONRESULT, + __module__ = 'google.datastore.v1.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.MutationResult) + )) +_sym_db.RegisterMessage(MutationResult) + +ReadOptions = _reflection.GeneratedProtocolMessageType('ReadOptions', (_message.Message,), dict( + DESCRIPTOR = _READOPTIONS, + __module__ = 'google.datastore.v1.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.ReadOptions) + )) +_sym_db.RegisterMessage(ReadOptions) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.google.datastore.v1B\016DatastoreProtoP\001')) +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_generated/entity_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore/_generated/entity_pb2.py new file mode 100644 index 000000000000..44d530bdb74a --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/_generated/entity_pb2.py @@ -0,0 +1,495 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/datastore/v1/entity.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/datastore/v1/entity.proto', + package='google.datastore.v1', + syntax='proto3', + serialized_pb=_b('\n google/datastore/v1/entity.proto\x12\x13google.datastore.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x18google/type/latlng.proto\"7\n\x0bPartitionId\x12\x12\n\nproject_id\x18\x02 \x01(\t\x12\x14\n\x0cnamespace_id\x18\x04 \x01(\t\"\xb7\x01\n\x03Key\x12\x36\n\x0cpartition_id\x18\x01 \x01(\x0b\x32 .google.datastore.v1.PartitionId\x12\x32\n\x04path\x18\x02 \x03(\x0b\x32$.google.datastore.v1.Key.PathElement\x1a\x44\n\x0bPathElement\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x0c\n\x02id\x18\x02 \x01(\x03H\x00\x12\x0e\n\x04name\x18\x03 \x01(\tH\x00\x42\t\n\x07id_type\"8\n\nArrayValue\x12*\n\x06values\x18\x01 \x03(\x0b\x32\x1a.google.datastore.v1.Value\"\xf1\x03\n\x05Value\x12\x30\n\nnull_value\x18\x0b \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x17\n\rboolean_value\x18\x01 \x01(\x08H\x00\x12\x17\n\rinteger_value\x18\x02 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x03 \x01(\x01H\x00\x12\x35\n\x0ftimestamp_value\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12-\n\tkey_value\x18\x05 \x01(\x0b\x32\x18.google.datastore.v1.KeyH\x00\x12\x16\n\x0cstring_value\x18\x11 \x01(\tH\x00\x12\x14\n\nblob_value\x18\x12 \x01(\x0cH\x00\x12.\n\x0fgeo_point_value\x18\x08 \x01(\x0b\x32\x13.google.type.LatLngH\x00\x12\x33\n\x0c\x65ntity_value\x18\x06 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12\x36\n\x0b\x61rray_value\x18\t \x01(\x0b\x32\x1f.google.datastore.v1.ArrayValueH\x00\x12\x0f\n\x07meaning\x18\x0e \x01(\x05\x12\x1c\n\x14\x65xclude_from_indexes\x18\x13 \x01(\x08\x42\x0c\n\nvalue_type\"\xbf\x01\n\x06\x45ntity\x12%\n\x03key\x18\x01 \x01(\x0b\x32\x18.google.datastore.v1.Key\x12?\n\nproperties\x18\x03 \x03(\x0b\x32+.google.datastore.v1.Entity.PropertiesEntry\x1aM\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.Value:\x02\x38\x01\x42(\n\x17\x63om.google.datastore.v1B\x0b\x45ntityProtoP\x01\x62\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_type_dot_latlng__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_PARTITIONID = _descriptor.Descriptor( + name='PartitionId', + full_name='google.datastore.v1.PartitionId', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='google.datastore.v1.PartitionId.project_id', index=0, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='namespace_id', full_name='google.datastore.v1.PartitionId.namespace_id', index=1, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=176, + serialized_end=231, +) + + +_KEY_PATHELEMENT = _descriptor.Descriptor( + name='PathElement', + full_name='google.datastore.v1.Key.PathElement', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='kind', full_name='google.datastore.v1.Key.PathElement.kind', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='id', full_name='google.datastore.v1.Key.PathElement.id', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='name', full_name='google.datastore.v1.Key.PathElement.name', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='id_type', full_name='google.datastore.v1.Key.PathElement.id_type', + index=0, containing_type=None, fields=[]), + ], + serialized_start=349, + serialized_end=417, +) + +_KEY = _descriptor.Descriptor( + name='Key', + full_name='google.datastore.v1.Key', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='partition_id', full_name='google.datastore.v1.Key.partition_id', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='path', full_name='google.datastore.v1.Key.path', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_KEY_PATHELEMENT, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=234, + serialized_end=417, +) + + +_ARRAYVALUE = _descriptor.Descriptor( + name='ArrayValue', + full_name='google.datastore.v1.ArrayValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='values', full_name='google.datastore.v1.ArrayValue.values', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=419, + serialized_end=475, +) + + +_VALUE = _descriptor.Descriptor( + name='Value', + full_name='google.datastore.v1.Value', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='null_value', full_name='google.datastore.v1.Value.null_value', index=0, + number=11, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='boolean_value', full_name='google.datastore.v1.Value.boolean_value', index=1, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='integer_value', full_name='google.datastore.v1.Value.integer_value', index=2, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='double_value', full_name='google.datastore.v1.Value.double_value', index=3, + number=3, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timestamp_value', full_name='google.datastore.v1.Value.timestamp_value', index=4, + number=10, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key_value', full_name='google.datastore.v1.Value.key_value', index=5, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='string_value', full_name='google.datastore.v1.Value.string_value', index=6, + number=17, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='blob_value', full_name='google.datastore.v1.Value.blob_value', index=7, + number=18, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='geo_point_value', full_name='google.datastore.v1.Value.geo_point_value', index=8, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='entity_value', full_name='google.datastore.v1.Value.entity_value', index=9, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='array_value', full_name='google.datastore.v1.Value.array_value', index=10, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='meaning', full_name='google.datastore.v1.Value.meaning', index=11, + number=14, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='exclude_from_indexes', full_name='google.datastore.v1.Value.exclude_from_indexes', index=12, + number=19, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='value_type', full_name='google.datastore.v1.Value.value_type', + index=0, containing_type=None, fields=[]), + ], + serialized_start=478, + serialized_end=975, +) + + +_ENTITY_PROPERTIESENTRY = _descriptor.Descriptor( + name='PropertiesEntry', + full_name='google.datastore.v1.Entity.PropertiesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.datastore.v1.Entity.PropertiesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.datastore.v1.Entity.PropertiesEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1092, + serialized_end=1169, +) + +_ENTITY = _descriptor.Descriptor( + name='Entity', + full_name='google.datastore.v1.Entity', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.datastore.v1.Entity.key', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='properties', full_name='google.datastore.v1.Entity.properties', index=1, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_ENTITY_PROPERTIESENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=978, + serialized_end=1169, +) + +_KEY_PATHELEMENT.containing_type = _KEY +_KEY_PATHELEMENT.oneofs_by_name['id_type'].fields.append( + _KEY_PATHELEMENT.fields_by_name['id']) +_KEY_PATHELEMENT.fields_by_name['id'].containing_oneof = _KEY_PATHELEMENT.oneofs_by_name['id_type'] +_KEY_PATHELEMENT.oneofs_by_name['id_type'].fields.append( + _KEY_PATHELEMENT.fields_by_name['name']) +_KEY_PATHELEMENT.fields_by_name['name'].containing_oneof = _KEY_PATHELEMENT.oneofs_by_name['id_type'] +_KEY.fields_by_name['partition_id'].message_type = _PARTITIONID +_KEY.fields_by_name['path'].message_type = _KEY_PATHELEMENT +_ARRAYVALUE.fields_by_name['values'].message_type = _VALUE +_VALUE.fields_by_name['null_value'].enum_type = google_dot_protobuf_dot_struct__pb2._NULLVALUE +_VALUE.fields_by_name['timestamp_value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_VALUE.fields_by_name['key_value'].message_type = _KEY +_VALUE.fields_by_name['geo_point_value'].message_type = google_dot_type_dot_latlng__pb2._LATLNG +_VALUE.fields_by_name['entity_value'].message_type = _ENTITY +_VALUE.fields_by_name['array_value'].message_type = _ARRAYVALUE +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['null_value']) +_VALUE.fields_by_name['null_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['boolean_value']) +_VALUE.fields_by_name['boolean_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['integer_value']) +_VALUE.fields_by_name['integer_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['double_value']) +_VALUE.fields_by_name['double_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['timestamp_value']) +_VALUE.fields_by_name['timestamp_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['key_value']) +_VALUE.fields_by_name['key_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['string_value']) +_VALUE.fields_by_name['string_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['blob_value']) +_VALUE.fields_by_name['blob_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['geo_point_value']) +_VALUE.fields_by_name['geo_point_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['entity_value']) +_VALUE.fields_by_name['entity_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['array_value']) +_VALUE.fields_by_name['array_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_ENTITY_PROPERTIESENTRY.fields_by_name['value'].message_type = _VALUE +_ENTITY_PROPERTIESENTRY.containing_type = _ENTITY +_ENTITY.fields_by_name['key'].message_type = _KEY +_ENTITY.fields_by_name['properties'].message_type = _ENTITY_PROPERTIESENTRY +DESCRIPTOR.message_types_by_name['PartitionId'] = _PARTITIONID +DESCRIPTOR.message_types_by_name['Key'] = _KEY +DESCRIPTOR.message_types_by_name['ArrayValue'] = _ARRAYVALUE +DESCRIPTOR.message_types_by_name['Value'] = _VALUE +DESCRIPTOR.message_types_by_name['Entity'] = _ENTITY + +PartitionId = _reflection.GeneratedProtocolMessageType('PartitionId', (_message.Message,), dict( + DESCRIPTOR = _PARTITIONID, + __module__ = 'google.datastore.v1.entity_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.PartitionId) + )) +_sym_db.RegisterMessage(PartitionId) + +Key = _reflection.GeneratedProtocolMessageType('Key', (_message.Message,), dict( + + PathElement = _reflection.GeneratedProtocolMessageType('PathElement', (_message.Message,), dict( + DESCRIPTOR = _KEY_PATHELEMENT, + __module__ = 'google.datastore.v1.entity_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.Key.PathElement) + )) + , + DESCRIPTOR = _KEY, + __module__ = 'google.datastore.v1.entity_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.Key) + )) +_sym_db.RegisterMessage(Key) +_sym_db.RegisterMessage(Key.PathElement) + +ArrayValue = _reflection.GeneratedProtocolMessageType('ArrayValue', (_message.Message,), dict( + DESCRIPTOR = _ARRAYVALUE, + __module__ = 'google.datastore.v1.entity_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.ArrayValue) + )) +_sym_db.RegisterMessage(ArrayValue) + +Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), dict( + DESCRIPTOR = _VALUE, + __module__ = 'google.datastore.v1.entity_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.Value) + )) +_sym_db.RegisterMessage(Value) + +Entity = _reflection.GeneratedProtocolMessageType('Entity', (_message.Message,), dict( + + PropertiesEntry = _reflection.GeneratedProtocolMessageType('PropertiesEntry', (_message.Message,), dict( + DESCRIPTOR = _ENTITY_PROPERTIESENTRY, + __module__ = 'google.datastore.v1.entity_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.Entity.PropertiesEntry) + )) + , + DESCRIPTOR = _ENTITY, + __module__ = 'google.datastore.v1.entity_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.Entity) + )) +_sym_db.RegisterMessage(Entity) +_sym_db.RegisterMessage(Entity.PropertiesEntry) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.google.datastore.v1B\013EntityProtoP\001')) +_ENTITY_PROPERTIESENTRY.has_options = True +_ENTITY_PROPERTIESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_generated/query_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore/_generated/query_pb2.py new file mode 100644 index 000000000000..7569f225d53a --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/_generated/query_pb2.py @@ -0,0 +1,934 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/datastore/v1/query.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.cloud.datastore._generated import entity_pb2 as google_dot_datastore_dot_v1_dot_entity__pb2 +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 +from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/datastore/v1/query.proto', + package='google.datastore.v1', + syntax='proto3', + serialized_pb=_b('\n\x1fgoogle/datastore/v1/query.proto\x12\x13google.datastore.v1\x1a\x1cgoogle/api/annotations.proto\x1a google/datastore/v1/entity.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x18google/type/latlng.proto\"\xaf\x01\n\x0c\x45ntityResult\x12+\n\x06\x65ntity\x18\x01 \x01(\x0b\x32\x1b.google.datastore.v1.Entity\x12\x0f\n\x07version\x18\x04 \x01(\x03\x12\x0e\n\x06\x63ursor\x18\x03 \x01(\x0c\"Q\n\nResultType\x12\x1b\n\x17RESULT_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x46ULL\x10\x01\x12\x0e\n\nPROJECTION\x10\x02\x12\x0c\n\x08KEY_ONLY\x10\x03\"\xf2\x02\n\x05Query\x12\x33\n\nprojection\x18\x02 \x03(\x0b\x32\x1f.google.datastore.v1.Projection\x12\x31\n\x04kind\x18\x03 \x03(\x0b\x32#.google.datastore.v1.KindExpression\x12+\n\x06\x66ilter\x18\x04 \x01(\x0b\x32\x1b.google.datastore.v1.Filter\x12\x31\n\x05order\x18\x05 \x03(\x0b\x32\".google.datastore.v1.PropertyOrder\x12;\n\x0b\x64istinct_on\x18\x06 \x03(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x14\n\x0cstart_cursor\x18\x07 \x01(\x0c\x12\x12\n\nend_cursor\x18\x08 \x01(\x0c\x12\x0e\n\x06offset\x18\n \x01(\x05\x12*\n\x05limit\x18\x0c \x01(\x0b\x32\x1b.google.protobuf.Int32Value\"\x1e\n\x0eKindExpression\x12\x0c\n\x04name\x18\x01 \x01(\t\"!\n\x11PropertyReference\x12\x0c\n\x04name\x18\x02 \x01(\t\"F\n\nProjection\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\"\xd1\x01\n\rPropertyOrder\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12?\n\tdirection\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyOrder.Direction\"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02\"\x99\x01\n\x06\x46ilter\x12@\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32$.google.datastore.v1.CompositeFilterH\x00\x12>\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32#.google.datastore.v1.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type\"\xa9\x01\n\x0f\x43ompositeFilter\x12\x39\n\x02op\x18\x01 \x01(\x0e\x32-.google.datastore.v1.CompositeFilter.Operator\x12,\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x1b.google.datastore.v1.Filter\"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\"\xc7\x02\n\x0ePropertyFilter\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x38\n\x02op\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyFilter.Operator\x12)\n\x05value\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.Value\"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b\"\xa5\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12H\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x30.google.datastore.v1.GqlQuery.NamedBindingsEntry\x12\x43\n\x13positional_bindings\x18\x04 \x03(\x0b\x32&.google.datastore.v1.GqlQueryParameter\x1a\\\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.datastore.v1.GqlQueryParameter:\x02\x38\x01\"d\n\x11GqlQueryParameter\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type\"\xde\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12H\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32,.google.datastore.v1.EntityResult.ResultType\x12\x39\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12K\n\x0cmore_results\x18\x05 \x01(\x0e\x32\x35.google.datastore.v1.QueryResultBatch.MoreResultsType\x12\x18\n\x10snapshot_version\x18\x07 \x01(\x03\"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42\'\n\x17\x63om.google.datastore.v1B\nQueryProtoP\x01\x62\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_datastore_dot_v1_dot_entity__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,google_dot_type_dot_latlng__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_ENTITYRESULT_RESULTTYPE = _descriptor.EnumDescriptor( + name='ResultType', + full_name='google.datastore.v1.EntityResult.ResultType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='RESULT_TYPE_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FULL', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PROJECTION', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='KEY_ONLY', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=273, + serialized_end=354, +) +_sym_db.RegisterEnumDescriptor(_ENTITYRESULT_RESULTTYPE) + +_PROPERTYORDER_DIRECTION = _descriptor.EnumDescriptor( + name='Direction', + full_name='google.datastore.v1.PropertyOrder.Direction', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='DIRECTION_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ASCENDING', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DESCENDING', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1009, + serialized_end=1078, +) +_sym_db.RegisterEnumDescriptor(_PROPERTYORDER_DIRECTION) + +_COMPOSITEFILTER_OPERATOR = _descriptor.EnumDescriptor( + name='Operator', + full_name='google.datastore.v1.CompositeFilter.Operator', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='OPERATOR_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AND', index=1, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1361, + serialized_end=1406, +) +_sym_db.RegisterEnumDescriptor(_COMPOSITEFILTER_OPERATOR) + +_PROPERTYFILTER_OPERATOR = _descriptor.EnumDescriptor( + name='Operator', + full_name='google.datastore.v1.PropertyFilter.Operator', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='OPERATOR_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LESS_THAN', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LESS_THAN_OR_EQUAL', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GREATER_THAN', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GREATER_THAN_OR_EQUAL', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='EQUAL', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='HAS_ANCESTOR', index=6, number=11, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1587, + serialized_end=1736, +) +_sym_db.RegisterEnumDescriptor(_PROPERTYFILTER_OPERATOR) + +_QUERYRESULTBATCH_MORERESULTSTYPE = _descriptor.EnumDescriptor( + name='MoreResultsType', + full_name='google.datastore.v1.QueryResultBatch.MoreResultsType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='MORE_RESULTS_TYPE_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOT_FINISHED', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MORE_RESULTS_AFTER_LIMIT', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MORE_RESULTS_AFTER_CURSOR', index=3, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NO_MORE_RESULTS', index=4, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2463, + serialized_end=2615, +) +_sym_db.RegisterEnumDescriptor(_QUERYRESULTBATCH_MORERESULTSTYPE) + + +_ENTITYRESULT = _descriptor.Descriptor( + name='EntityResult', + full_name='google.datastore.v1.EntityResult', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entity', full_name='google.datastore.v1.EntityResult.entity', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='version', full_name='google.datastore.v1.EntityResult.version', index=1, + number=4, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cursor', full_name='google.datastore.v1.EntityResult.cursor', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _ENTITYRESULT_RESULTTYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=179, + serialized_end=354, +) + + +_QUERY = _descriptor.Descriptor( + name='Query', + full_name='google.datastore.v1.Query', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='projection', full_name='google.datastore.v1.Query.projection', index=0, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='kind', full_name='google.datastore.v1.Query.kind', index=1, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filter', full_name='google.datastore.v1.Query.filter', index=2, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='order', full_name='google.datastore.v1.Query.order', index=3, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='distinct_on', full_name='google.datastore.v1.Query.distinct_on', index=4, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_cursor', full_name='google.datastore.v1.Query.start_cursor', index=5, + number=7, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_cursor', full_name='google.datastore.v1.Query.end_cursor', index=6, + number=8, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='offset', full_name='google.datastore.v1.Query.offset', index=7, + number=10, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='limit', full_name='google.datastore.v1.Query.limit', index=8, + number=12, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=357, + serialized_end=727, +) + + +_KINDEXPRESSION = _descriptor.Descriptor( + name='KindExpression', + full_name='google.datastore.v1.KindExpression', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.datastore.v1.KindExpression.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=729, + serialized_end=759, +) + + +_PROPERTYREFERENCE = _descriptor.Descriptor( + name='PropertyReference', + full_name='google.datastore.v1.PropertyReference', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.datastore.v1.PropertyReference.name', index=0, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=761, + serialized_end=794, +) + + +_PROJECTION = _descriptor.Descriptor( + name='Projection', + full_name='google.datastore.v1.Projection', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='property', full_name='google.datastore.v1.Projection.property', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=796, + serialized_end=866, +) + + +_PROPERTYORDER = _descriptor.Descriptor( + name='PropertyOrder', + full_name='google.datastore.v1.PropertyOrder', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='property', full_name='google.datastore.v1.PropertyOrder.property', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='direction', full_name='google.datastore.v1.PropertyOrder.direction', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _PROPERTYORDER_DIRECTION, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=869, + serialized_end=1078, +) + + +_FILTER = _descriptor.Descriptor( + name='Filter', + full_name='google.datastore.v1.Filter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='composite_filter', full_name='google.datastore.v1.Filter.composite_filter', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='property_filter', full_name='google.datastore.v1.Filter.property_filter', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='filter_type', full_name='google.datastore.v1.Filter.filter_type', + index=0, containing_type=None, fields=[]), + ], + serialized_start=1081, + serialized_end=1234, +) + + +_COMPOSITEFILTER = _descriptor.Descriptor( + name='CompositeFilter', + full_name='google.datastore.v1.CompositeFilter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='op', full_name='google.datastore.v1.CompositeFilter.op', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filters', full_name='google.datastore.v1.CompositeFilter.filters', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _COMPOSITEFILTER_OPERATOR, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1237, + serialized_end=1406, +) + + +_PROPERTYFILTER = _descriptor.Descriptor( + name='PropertyFilter', + full_name='google.datastore.v1.PropertyFilter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='property', full_name='google.datastore.v1.PropertyFilter.property', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='op', full_name='google.datastore.v1.PropertyFilter.op', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.datastore.v1.PropertyFilter.value', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _PROPERTYFILTER_OPERATOR, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1409, + serialized_end=1736, +) + + +_GQLQUERY_NAMEDBINDINGSENTRY = _descriptor.Descriptor( + name='NamedBindingsEntry', + full_name='google.datastore.v1.GqlQuery.NamedBindingsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.datastore.v1.GqlQuery.NamedBindingsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.datastore.v1.GqlQuery.NamedBindingsEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1940, + serialized_end=2032, +) + +_GQLQUERY = _descriptor.Descriptor( + name='GqlQuery', + full_name='google.datastore.v1.GqlQuery', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='query_string', full_name='google.datastore.v1.GqlQuery.query_string', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='allow_literals', full_name='google.datastore.v1.GqlQuery.allow_literals', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='named_bindings', full_name='google.datastore.v1.GqlQuery.named_bindings', index=2, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='positional_bindings', full_name='google.datastore.v1.GqlQuery.positional_bindings', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_GQLQUERY_NAMEDBINDINGSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1739, + serialized_end=2032, +) + + +_GQLQUERYPARAMETER = _descriptor.Descriptor( + name='GqlQueryParameter', + full_name='google.datastore.v1.GqlQueryParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='google.datastore.v1.GqlQueryParameter.value', index=0, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cursor', full_name='google.datastore.v1.GqlQueryParameter.cursor', index=1, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='parameter_type', full_name='google.datastore.v1.GqlQueryParameter.parameter_type', + index=0, containing_type=None, fields=[]), + ], + serialized_start=2034, + serialized_end=2134, +) + + +_QUERYRESULTBATCH = _descriptor.Descriptor( + name='QueryResultBatch', + full_name='google.datastore.v1.QueryResultBatch', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='skipped_results', full_name='google.datastore.v1.QueryResultBatch.skipped_results', index=0, + number=6, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='skipped_cursor', full_name='google.datastore.v1.QueryResultBatch.skipped_cursor', index=1, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='entity_result_type', full_name='google.datastore.v1.QueryResultBatch.entity_result_type', index=2, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='entity_results', full_name='google.datastore.v1.QueryResultBatch.entity_results', index=3, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_cursor', full_name='google.datastore.v1.QueryResultBatch.end_cursor', index=4, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='more_results', full_name='google.datastore.v1.QueryResultBatch.more_results', index=5, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='snapshot_version', full_name='google.datastore.v1.QueryResultBatch.snapshot_version', index=6, + number=7, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _QUERYRESULTBATCH_MORERESULTSTYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2137, + serialized_end=2615, +) + +_ENTITYRESULT.fields_by_name['entity'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._ENTITY +_ENTITYRESULT_RESULTTYPE.containing_type = _ENTITYRESULT +_QUERY.fields_by_name['projection'].message_type = _PROJECTION +_QUERY.fields_by_name['kind'].message_type = _KINDEXPRESSION +_QUERY.fields_by_name['filter'].message_type = _FILTER +_QUERY.fields_by_name['order'].message_type = _PROPERTYORDER +_QUERY.fields_by_name['distinct_on'].message_type = _PROPERTYREFERENCE +_QUERY.fields_by_name['limit'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE +_PROJECTION.fields_by_name['property'].message_type = _PROPERTYREFERENCE +_PROPERTYORDER.fields_by_name['property'].message_type = _PROPERTYREFERENCE +_PROPERTYORDER.fields_by_name['direction'].enum_type = _PROPERTYORDER_DIRECTION +_PROPERTYORDER_DIRECTION.containing_type = _PROPERTYORDER +_FILTER.fields_by_name['composite_filter'].message_type = _COMPOSITEFILTER +_FILTER.fields_by_name['property_filter'].message_type = _PROPERTYFILTER +_FILTER.oneofs_by_name['filter_type'].fields.append( + _FILTER.fields_by_name['composite_filter']) +_FILTER.fields_by_name['composite_filter'].containing_oneof = _FILTER.oneofs_by_name['filter_type'] +_FILTER.oneofs_by_name['filter_type'].fields.append( + _FILTER.fields_by_name['property_filter']) +_FILTER.fields_by_name['property_filter'].containing_oneof = _FILTER.oneofs_by_name['filter_type'] +_COMPOSITEFILTER.fields_by_name['op'].enum_type = _COMPOSITEFILTER_OPERATOR +_COMPOSITEFILTER.fields_by_name['filters'].message_type = _FILTER +_COMPOSITEFILTER_OPERATOR.containing_type = _COMPOSITEFILTER +_PROPERTYFILTER.fields_by_name['property'].message_type = _PROPERTYREFERENCE +_PROPERTYFILTER.fields_by_name['op'].enum_type = _PROPERTYFILTER_OPERATOR +_PROPERTYFILTER.fields_by_name['value'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._VALUE +_PROPERTYFILTER_OPERATOR.containing_type = _PROPERTYFILTER +_GQLQUERY_NAMEDBINDINGSENTRY.fields_by_name['value'].message_type = _GQLQUERYPARAMETER +_GQLQUERY_NAMEDBINDINGSENTRY.containing_type = _GQLQUERY +_GQLQUERY.fields_by_name['named_bindings'].message_type = _GQLQUERY_NAMEDBINDINGSENTRY +_GQLQUERY.fields_by_name['positional_bindings'].message_type = _GQLQUERYPARAMETER +_GQLQUERYPARAMETER.fields_by_name['value'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._VALUE +_GQLQUERYPARAMETER.oneofs_by_name['parameter_type'].fields.append( + _GQLQUERYPARAMETER.fields_by_name['value']) +_GQLQUERYPARAMETER.fields_by_name['value'].containing_oneof = _GQLQUERYPARAMETER.oneofs_by_name['parameter_type'] +_GQLQUERYPARAMETER.oneofs_by_name['parameter_type'].fields.append( + _GQLQUERYPARAMETER.fields_by_name['cursor']) +_GQLQUERYPARAMETER.fields_by_name['cursor'].containing_oneof = _GQLQUERYPARAMETER.oneofs_by_name['parameter_type'] +_QUERYRESULTBATCH.fields_by_name['entity_result_type'].enum_type = _ENTITYRESULT_RESULTTYPE +_QUERYRESULTBATCH.fields_by_name['entity_results'].message_type = _ENTITYRESULT +_QUERYRESULTBATCH.fields_by_name['more_results'].enum_type = _QUERYRESULTBATCH_MORERESULTSTYPE +_QUERYRESULTBATCH_MORERESULTSTYPE.containing_type = _QUERYRESULTBATCH +DESCRIPTOR.message_types_by_name['EntityResult'] = _ENTITYRESULT +DESCRIPTOR.message_types_by_name['Query'] = _QUERY +DESCRIPTOR.message_types_by_name['KindExpression'] = _KINDEXPRESSION +DESCRIPTOR.message_types_by_name['PropertyReference'] = _PROPERTYREFERENCE +DESCRIPTOR.message_types_by_name['Projection'] = _PROJECTION +DESCRIPTOR.message_types_by_name['PropertyOrder'] = _PROPERTYORDER +DESCRIPTOR.message_types_by_name['Filter'] = _FILTER +DESCRIPTOR.message_types_by_name['CompositeFilter'] = _COMPOSITEFILTER +DESCRIPTOR.message_types_by_name['PropertyFilter'] = _PROPERTYFILTER +DESCRIPTOR.message_types_by_name['GqlQuery'] = _GQLQUERY +DESCRIPTOR.message_types_by_name['GqlQueryParameter'] = _GQLQUERYPARAMETER +DESCRIPTOR.message_types_by_name['QueryResultBatch'] = _QUERYRESULTBATCH + +EntityResult = _reflection.GeneratedProtocolMessageType('EntityResult', (_message.Message,), dict( + DESCRIPTOR = _ENTITYRESULT, + __module__ = 'google.datastore.v1.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.EntityResult) + )) +_sym_db.RegisterMessage(EntityResult) + +Query = _reflection.GeneratedProtocolMessageType('Query', (_message.Message,), dict( + DESCRIPTOR = _QUERY, + __module__ = 'google.datastore.v1.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.Query) + )) +_sym_db.RegisterMessage(Query) + +KindExpression = _reflection.GeneratedProtocolMessageType('KindExpression', (_message.Message,), dict( + DESCRIPTOR = _KINDEXPRESSION, + __module__ = 'google.datastore.v1.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.KindExpression) + )) +_sym_db.RegisterMessage(KindExpression) + +PropertyReference = _reflection.GeneratedProtocolMessageType('PropertyReference', (_message.Message,), dict( + DESCRIPTOR = _PROPERTYREFERENCE, + __module__ = 'google.datastore.v1.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyReference) + )) +_sym_db.RegisterMessage(PropertyReference) + +Projection = _reflection.GeneratedProtocolMessageType('Projection', (_message.Message,), dict( + DESCRIPTOR = _PROJECTION, + __module__ = 'google.datastore.v1.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.Projection) + )) +_sym_db.RegisterMessage(Projection) + +PropertyOrder = _reflection.GeneratedProtocolMessageType('PropertyOrder', (_message.Message,), dict( + DESCRIPTOR = _PROPERTYORDER, + __module__ = 'google.datastore.v1.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyOrder) + )) +_sym_db.RegisterMessage(PropertyOrder) + +Filter = _reflection.GeneratedProtocolMessageType('Filter', (_message.Message,), dict( + DESCRIPTOR = _FILTER, + __module__ = 'google.datastore.v1.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.Filter) + )) +_sym_db.RegisterMessage(Filter) + +CompositeFilter = _reflection.GeneratedProtocolMessageType('CompositeFilter', (_message.Message,), dict( + DESCRIPTOR = _COMPOSITEFILTER, + __module__ = 'google.datastore.v1.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.CompositeFilter) + )) +_sym_db.RegisterMessage(CompositeFilter) + +PropertyFilter = _reflection.GeneratedProtocolMessageType('PropertyFilter', (_message.Message,), dict( + DESCRIPTOR = _PROPERTYFILTER, + __module__ = 'google.datastore.v1.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyFilter) + )) +_sym_db.RegisterMessage(PropertyFilter) + +GqlQuery = _reflection.GeneratedProtocolMessageType('GqlQuery', (_message.Message,), dict( + + NamedBindingsEntry = _reflection.GeneratedProtocolMessageType('NamedBindingsEntry', (_message.Message,), dict( + DESCRIPTOR = _GQLQUERY_NAMEDBINDINGSENTRY, + __module__ = 'google.datastore.v1.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery.NamedBindingsEntry) + )) + , + DESCRIPTOR = _GQLQUERY, + __module__ = 'google.datastore.v1.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery) + )) +_sym_db.RegisterMessage(GqlQuery) +_sym_db.RegisterMessage(GqlQuery.NamedBindingsEntry) + +GqlQueryParameter = _reflection.GeneratedProtocolMessageType('GqlQueryParameter', (_message.Message,), dict( + DESCRIPTOR = _GQLQUERYPARAMETER, + __module__ = 'google.datastore.v1.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQueryParameter) + )) +_sym_db.RegisterMessage(GqlQueryParameter) + +QueryResultBatch = _reflection.GeneratedProtocolMessageType('QueryResultBatch', (_message.Message,), dict( + DESCRIPTOR = _QUERYRESULTBATCH, + __module__ = 'google.datastore.v1.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.QueryResultBatch) + )) +_sym_db.RegisterMessage(QueryResultBatch) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.google.datastore.v1B\nQueryProtoP\001')) +_GQLQUERY_NAMEDBINDINGSENTRY.has_options = True +_GQLQUERY_NAMEDBINDINGSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/batch.py b/packages/google-cloud-datastore/google/cloud/datastore/batch.py new file mode 100644 index 000000000000..f27d67255525 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/batch.py @@ -0,0 +1,322 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Create / interact with a batch of updates / deletes. + +Batches provide the ability to execute multiple operations +in a single request to the Cloud Datastore API. + +See +https://cloud.google.com/datastore/docs/concepts/entities#Datastore_Batch_operations +""" + +from google.cloud.datastore import helpers +from google.cloud.datastore._generated import datastore_pb2 as _datastore_pb2 + + +class Batch(object): + """An abstraction representing a collected group of updates / deletes. + + Used to build up a bulk mutation. + + For example, the following snippet of code will put the two ``save`` + operations and the ``delete`` operation into the same mutation, and send + them to the server in a single API request:: + + >>> from google.cloud import datastore + >>> client = datastore.Client() + >>> batch = client.batch() + >>> batch.put(entity1) + >>> batch.put(entity2) + >>> batch.delete(key3) + >>> batch.commit() + + You can also use a batch as a context manager, in which case + :meth:`commit` will be called automatically if its block exits without + raising an exception:: + + >>> with batch: + ... batch.put(entity1) + ... batch.put(entity2) + ... batch.delete(key3) + + By default, no updates will be sent if the block exits with an error:: + + >>> with batch: + ... do_some_work(batch) + ... raise Exception() # rolls back + + :type client: :class:`google.cloud.datastore.client.Client` + :param client: The client used to connect to datastore. + """ + + _id = None # "protected" attribute, always None for non-transactions + + _INITIAL = 0 + """Enum value for _INITIAL status of batch/transaction.""" + + _IN_PROGRESS = 1 + """Enum value for _IN_PROGRESS status of batch/transaction.""" + + _ABORTED = 2 + """Enum value for _ABORTED status of batch/transaction.""" + + _FINISHED = 3 + """Enum value for _FINISHED status of batch/transaction.""" + + def __init__(self, client): + self._client = client + self._commit_request = _datastore_pb2.CommitRequest() + self._partial_key_entities = [] + self._status = self._INITIAL + + def current(self): + """Return the topmost batch / transaction, or None.""" + return self._client.current_batch + + @property + def project(self): + """Getter for project in which the batch will run. + + :rtype: :class:`str` + :returns: The project in which the batch will run. + """ + return self._client.project + + @property + def namespace(self): + """Getter for namespace in which the batch will run. + + :rtype: :class:`str` + :returns: The namespace in which the batch will run. + """ + return self._client.namespace + + @property + def connection(self): + """Getter for connection over which the batch will run. + + :rtype: :class:`google.cloud.datastore.connection.Connection` + :returns: The connection over which the batch will run. + """ + return self._client.connection + + def _add_partial_key_entity_pb(self): + """Adds a new mutation for an entity with a partial key. + + :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Entity` + :returns: The newly created entity protobuf that will be + updated and sent with a commit. + """ + new_mutation = self.mutations.add() + return new_mutation.insert + + def _add_complete_key_entity_pb(self): + """Adds a new mutation for an entity with a completed key. + + :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Entity` + :returns: The newly created entity protobuf that will be + updated and sent with a commit. + """ + # We use ``upsert`` for entities with completed keys, rather than + # ``insert`` or ``update``, in order not to create race conditions + # based on prior existence / removal of the entity. + new_mutation = self.mutations.add() + return new_mutation.upsert + + def _add_delete_key_pb(self): + """Adds a new mutation for a key to be deleted. + + :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Key` + :returns: The newly created key protobuf that will be + deleted when sent with a commit. + """ + new_mutation = self.mutations.add() + return new_mutation.delete + + @property + def mutations(self): + """Getter for the changes accumulated by this batch. + + Every batch is committed with a single commit request containing all + the work to be done as mutations. Inside a batch, calling :meth:`put` + with an entity, or :meth:`delete` with a key, builds up the request by + adding a new mutation. This getter returns the protobuf that has been + built-up so far. + + :rtype: iterable + :returns: The list of :class:`._generated.datastore_pb2.Mutation` + protobufs to be sent in the commit request. + """ + return self._commit_request.mutations + + def put(self, entity): + """Remember an entity's state to be saved during :meth:`commit`. + + .. note:: + Any existing properties for the entity will be replaced by those + currently set on this instance. Already-stored properties which do + not correspond to keys set on this instance will be removed from + the datastore. + + .. note:: + Property values which are "text" ('unicode' in Python2, 'str' in + Python3) map to 'string_value' in the datastore; values which are + "bytes" ('str' in Python2, 'bytes' in Python3) map to 'blob_value'. + + When an entity has a partial key, calling :meth:`commit` sends it as + an ``insert`` mutation and the key is completed. On return, + the key for the ``entity`` passed in is updated to match the key ID + assigned by the server. + + :type entity: :class:`google.cloud.datastore.entity.Entity` + :param entity: the entity to be saved. + + :raises: :class:`~exceptions.ValueError` if the batch is not in + progress, if entity has no key assigned, or if the key's + ``project`` does not match ours. + """ + if self._status != self._IN_PROGRESS: + raise ValueError('Batch must be in progress to put()') + + if entity.key is None: + raise ValueError("Entity must have a key") + + if self.project != entity.key.project: + raise ValueError("Key must be from same project as batch") + + if entity.key.is_partial: + entity_pb = self._add_partial_key_entity_pb() + self._partial_key_entities.append(entity) + else: + entity_pb = self._add_complete_key_entity_pb() + + _assign_entity_to_pb(entity_pb, entity) + + def delete(self, key): + """Remember a key to be deleted during :meth:`commit`. + + :type key: :class:`google.cloud.datastore.key.Key` + :param key: the key to be deleted. + + :raises: :class:`~exceptions.ValueError` if the batch is not in + progress, if key is not complete, or if the key's + ``project`` does not match ours. + """ + if self._status != self._IN_PROGRESS: + raise ValueError('Batch must be in progress to delete()') + + if key.is_partial: + raise ValueError("Key must be complete") + + if self.project != key.project: + raise ValueError("Key must be from same project as batch") + + key_pb = key.to_protobuf() + self._add_delete_key_pb().CopyFrom(key_pb) + + def begin(self): + """Begins a batch. + + This method is called automatically when entering a with + statement, however it can be called explicitly if you don't want + to use a context manager. + + Overridden by :class:`google.cloud.datastore.transaction.Transaction`. + + :raises: :class:`ValueError` if the batch has already begun. + """ + if self._status != self._INITIAL: + raise ValueError('Batch already started previously.') + self._status = self._IN_PROGRESS + + def _commit(self): + """Commits the batch. + + This is called by :meth:`commit`. + """ + # NOTE: ``self._commit_request`` will be modified. + _, updated_keys = self.connection.commit( + self.project, self._commit_request, self._id) + # If the back-end returns without error, we are guaranteed that + # :meth:`Connection.commit` will return keys that match (length and + # order) directly ``_partial_key_entities``. + for new_key_pb, entity in zip(updated_keys, + self._partial_key_entities): + new_id = new_key_pb.path[-1].id + entity.key = entity.key.completed_key(new_id) + + def commit(self): + """Commits the batch. + + This is called automatically upon exiting a with statement, + however it can be called explicitly if you don't want to use a + context manager. + + :raises: :class:`~exceptions.ValueError` if the batch is not + in progress. + """ + if self._status != self._IN_PROGRESS: + raise ValueError('Batch must be in progress to commit()') + + try: + self._commit() + finally: + self._status = self._FINISHED + + def rollback(self): + """Rolls back the current batch. + + Marks the batch as aborted (can't be used again). + + Overridden by :class:`google.cloud.datastore.transaction.Transaction`. + + :raises: :class:`~exceptions.ValueError` if the batch is not + in progress. + """ + if self._status != self._IN_PROGRESS: + raise ValueError('Batch must be in progress to rollback()') + + self._status = self._ABORTED + + def __enter__(self): + self.begin() + # NOTE: We make sure begin() succeeds before pushing onto the stack. + self._client._push_batch(self) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + try: + if exc_type is None: + self.commit() + else: + self.rollback() + finally: + self._client._pop_batch() + + +def _assign_entity_to_pb(entity_pb, entity): + """Copy ``entity`` into ``entity_pb``. + + Helper method for ``Batch.put``. + + :type entity_pb: :class:`._generated.entity_pb2.Entity` + :param entity_pb: The entity owned by a mutation. + + :type entity: :class:`google.cloud.datastore.entity.Entity` + :param entity: The entity being updated within the batch / transaction. + """ + bare_entity_pb = helpers.entity_to_protobuf(entity) + bare_entity_pb.key.CopyFrom(bare_entity_pb.key) + entity_pb.CopyFrom(bare_entity_pb) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py new file mode 100644 index 000000000000..5df32cc42970 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -0,0 +1,488 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Convenience wrapper for invoking APIs/factories w/ a project.""" + +import os + +from google.cloud._helpers import _LocalStack +from google.cloud._helpers import ( + _determine_default_project as _base_default_project) +from google.cloud.client import _ClientProjectMixin +from google.cloud.client import Client as _BaseClient +from google.cloud.datastore import helpers +from google.cloud.datastore.connection import Connection +from google.cloud.datastore.batch import Batch +from google.cloud.datastore.entity import Entity +from google.cloud.datastore.key import Key +from google.cloud.datastore.query import Query +from google.cloud.datastore.transaction import Transaction +from google.cloud.environment_vars import GCD_DATASET + + +_MAX_LOOPS = 128 +"""Maximum number of iterations to wait for deferred keys.""" + + +def _get_gcd_project(): + """Gets the GCD application ID if it can be inferred.""" + return os.getenv(GCD_DATASET) + + +def _determine_default_project(project=None): + """Determine default project explicitly or implicitly as fall-back. + + In implicit case, supports four environments. In order of precedence, the + implicit environments are: + + * DATASTORE_DATASET environment variable (for ``gcd`` / emulator testing) + * GOOGLE_CLOUD_PROJECT environment variable + * Google App Engine application ID + * Google Compute Engine project ID (from metadata server) + + :type project: string + :param project: Optional. The project to use as default. + + :rtype: string or ``NoneType`` + :returns: Default project if it can be determined. + """ + if project is None: + project = _get_gcd_project() + + if project is None: + project = _base_default_project(project=project) + + return project + + +def _extended_lookup(connection, project, key_pbs, + missing=None, deferred=None, + eventual=False, transaction_id=None): + """Repeat lookup until all keys found (unless stop requested). + + Helper function for :meth:`Client.get_multi`. + + :type connection: :class:`google.cloud.datastore.connection.Connection` + :param connection: The connection used to connect to datastore. + + :type project: string + :param project: The project to make the request for. + + :type key_pbs: list of :class:`._generated.entity_pb2.Key` + :param key_pbs: The keys to retrieve from the datastore. + + :type missing: list + :param missing: (Optional) If a list is passed, the key-only entity + protobufs returned by the backend as "missing" will be + copied into it. + + :type deferred: list + :param deferred: (Optional) If a list is passed, the key protobufs returned + by the backend as "deferred" will be copied into it. + + :type eventual: bool + :param eventual: If False (the default), request ``STRONG`` read + consistency. If True, request ``EVENTUAL`` read + consistency. + + :type transaction_id: string + :param transaction_id: If passed, make the request in the scope of + the given transaction. Incompatible with + ``eventual==True``. + + :rtype: list of :class:`._generated.entity_pb2.Entity` + :returns: The requested entities. + :raises: :class:`ValueError` if missing / deferred are not null or + empty list. + """ + if missing is not None and missing != []: + raise ValueError('missing must be None or an empty list') + + if deferred is not None and deferred != []: + raise ValueError('deferred must be None or an empty list') + + results = [] + + loop_num = 0 + while loop_num < _MAX_LOOPS: # loop against possible deferred. + loop_num += 1 + + results_found, missing_found, deferred_found = connection.lookup( + project=project, + key_pbs=key_pbs, + eventual=eventual, + transaction_id=transaction_id, + ) + + results.extend(results_found) + + if missing is not None: + missing.extend(missing_found) + + if deferred is not None: + deferred.extend(deferred_found) + break + + if len(deferred_found) == 0: + break + + # We have deferred keys, and the user didn't ask to know about + # them, so retry (but only with the deferred ones). + key_pbs = deferred_found + + return results + + +class Client(_BaseClient, _ClientProjectMixin): + """Convenience wrapper for invoking APIs/factories w/ a project. + + :type project: string + :param project: (optional) The project to pass to proxied API methods. + + :type namespace: string + :param namespace: (optional) namespace to pass to proxied API methods. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :param credentials: The OAuth2 Credentials to use for the connection + owned by this client. If not passed (and if no ``http`` + object is passed), falls back to the default inferred + from the environment. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: An optional HTTP object to make requests. If not passed, an + ``http`` object is created that is bound to the + ``credentials`` for the current object. + """ + _connection_class = Connection + + def __init__(self, project=None, namespace=None, + credentials=None, http=None): + _ClientProjectMixin.__init__(self, project=project) + self.namespace = namespace + self._batch_stack = _LocalStack() + super(Client, self).__init__(credentials, http) + + @staticmethod + def _determine_default(project): + """Helper: override default project detection.""" + return _determine_default_project(project) + + def _push_batch(self, batch): + """Push a batch/transaction onto our stack. + + "Protected", intended for use by batch / transaction context mgrs. + + :type batch: :class:`google.cloud.datastore.batch.Batch`, or an object + implementing its API. + :param batch: newly-active batch/transaction. + """ + self._batch_stack.push(batch) + + def _pop_batch(self): + """Pop a batch/transaction from our stack. + + "Protected", intended for use by batch / transaction context mgrs. + + :raises: IndexError if the stack is empty. + :rtype: :class:`google.cloud.datastore.batch.Batch`, or an object + implementing its API. + :returns: the top-most batch/transaction, after removing it. + """ + return self._batch_stack.pop() + + @property + def current_batch(self): + """Currently-active batch. + + :rtype: :class:`google.cloud.datastore.batch.Batch`, or an object + implementing its API, or ``NoneType`` (if no batch is active). + :returns: The batch/transaction at the top of the batch stack. + """ + return self._batch_stack.top + + @property + def current_transaction(self): + """Currently-active transaction. + + :rtype: :class:`google.cloud.datastore.transaction.Transaction`, or an + object implementing its API, or ``NoneType`` (if no transaction + is active). + :returns: The transaction at the top of the batch stack. + """ + transaction = self.current_batch + if isinstance(transaction, Transaction): + return transaction + + def get(self, key, missing=None, deferred=None, transaction=None): + """Retrieve an entity from a single key (if it exists). + + .. note:: + + This is just a thin wrapper over :meth:`get_multi`. + The backend API does not make a distinction between a single key or + multiple keys in a lookup request. + + :type key: :class:`google.cloud.datastore.key.Key` + :param key: The key to be retrieved from the datastore. + + :type missing: list + :param missing: (Optional) If a list is passed, the key-only entities + returned by the backend as "missing" will be copied + into it. + + :type deferred: list + :param deferred: (Optional) If a list is passed, the keys returned + by the backend as "deferred" will be copied into it. + + :type transaction: :class:`~.transaction.Transaction` + :param transaction: (Optional) Transaction to use for read consistency. + If not passed, uses current transaction, if set. + + :rtype: :class:`google.cloud.datastore.entity.Entity` or ``NoneType`` + :returns: The requested entity if it exists. + """ + entities = self.get_multi(keys=[key], missing=missing, + deferred=deferred, transaction=transaction) + if entities: + return entities[0] + + def get_multi(self, keys, missing=None, deferred=None, transaction=None): + """Retrieve entities, along with their attributes. + + :type keys: list of :class:`google.cloud.datastore.key.Key` + :param keys: The keys to be retrieved from the datastore. + + :type missing: list + :param missing: (Optional) If a list is passed, the key-only entities + returned by the backend as "missing" will be copied + into it. If the list is not empty, an error will occur. + + :type deferred: list + :param deferred: (Optional) If a list is passed, the keys returned + by the backend as "deferred" will be copied into it. + If the list is not empty, an error will occur. + + :type transaction: :class:`~.transaction.Transaction` + :param transaction: (Optional) Transaction to use for read consistency. + If not passed, uses current transaction, if set. + + :rtype: list of :class:`google.cloud.datastore.entity.Entity` + :returns: The requested entities. + :raises: :class:`ValueError` if one or more of ``keys`` has a project + which does not match our project. + """ + if not keys: + return [] + + ids = set(key.project for key in keys) + for current_id in ids: + if current_id != self.project: + raise ValueError('Keys do not match project') + + if transaction is None: + transaction = self.current_transaction + + entity_pbs = _extended_lookup( + connection=self.connection, + project=self.project, + key_pbs=[k.to_protobuf() for k in keys], + missing=missing, + deferred=deferred, + transaction_id=transaction and transaction.id, + ) + + if missing is not None: + missing[:] = [ + helpers.entity_from_protobuf(missed_pb) + for missed_pb in missing] + + if deferred is not None: + deferred[:] = [ + helpers.key_from_protobuf(deferred_pb) + for deferred_pb in deferred] + + return [helpers.entity_from_protobuf(entity_pb) + for entity_pb in entity_pbs] + + def put(self, entity): + """Save an entity in the Cloud Datastore. + + .. note:: + + This is just a thin wrapper over :meth:`put_multi`. + The backend API does not make a distinction between a single + entity or multiple entities in a commit request. + + :type entity: :class:`google.cloud.datastore.entity.Entity` + :param entity: The entity to be saved to the datastore. + """ + self.put_multi(entities=[entity]) + + def put_multi(self, entities): + """Save entities in the Cloud Datastore. + + :type entities: list of :class:`google.cloud.datastore.entity.Entity` + :param entities: The entities to be saved to the datastore. + + :raises: :class:`ValueError` if ``entities`` is a single entity. + """ + if isinstance(entities, Entity): + raise ValueError("Pass a sequence of entities") + + if not entities: + return + + current = self.current_batch + in_batch = current is not None + + if not in_batch: + current = self.batch() + current.begin() + + for entity in entities: + current.put(entity) + + if not in_batch: + current.commit() + + def delete(self, key): + """Delete the key in the Cloud Datastore. + + .. note:: + + This is just a thin wrapper over :meth:`delete_multi`. + The backend API does not make a distinction between a single key or + multiple keys in a commit request. + + :type key: :class:`google.cloud.datastore.key.Key` + :param key: The key to be deleted from the datastore. + """ + self.delete_multi(keys=[key]) + + def delete_multi(self, keys): + """Delete keys from the Cloud Datastore. + + :type keys: list of :class:`google.cloud.datastore.key.Key` + :param keys: The keys to be deleted from the Datastore. + """ + if not keys: + return + + # We allow partial keys to attempt a delete, the backend will fail. + current = self.current_batch + in_batch = current is not None + + if not in_batch: + current = self.batch() + current.begin() + + for key in keys: + current.delete(key) + + if not in_batch: + current.commit() + + def allocate_ids(self, incomplete_key, num_ids): + """Allocate a list of IDs from a partial key. + + :type incomplete_key: :class:`google.cloud.datastore.key.Key` + :param incomplete_key: Partial key to use as base for allocated IDs. + + :type num_ids: int + :param num_ids: The number of IDs to allocate. + + :rtype: list of :class:`google.cloud.datastore.key.Key` + :returns: The (complete) keys allocated with ``incomplete_key`` as + root. + :raises: :class:`ValueError` if ``incomplete_key`` is not a + partial key. + """ + if not incomplete_key.is_partial: + raise ValueError(('Key is not partial.', incomplete_key)) + + incomplete_key_pb = incomplete_key.to_protobuf() + incomplete_key_pbs = [incomplete_key_pb] * num_ids + + conn = self.connection + allocated_key_pbs = conn.allocate_ids(incomplete_key.project, + incomplete_key_pbs) + allocated_ids = [allocated_key_pb.path[-1].id + for allocated_key_pb in allocated_key_pbs] + return [incomplete_key.completed_key(allocated_id) + for allocated_id in allocated_ids] + + def key(self, *path_args, **kwargs): + """Proxy to :class:`google.cloud.datastore.key.Key`. + + Passes our ``project``. + """ + if 'project' in kwargs: + raise TypeError('Cannot pass project') + kwargs['project'] = self.project + if 'namespace' not in kwargs: + kwargs['namespace'] = self.namespace + return Key(*path_args, **kwargs) + + def batch(self): + """Proxy to :class:`google.cloud.datastore.batch.Batch`.""" + return Batch(self) + + def transaction(self): + """Proxy to :class:`google.cloud.datastore.transaction.Transaction`.""" + return Transaction(self) + + def query(self, **kwargs): + """Proxy to :class:`google.cloud.datastore.query.Query`. + + Passes our ``project``. + + Using query to search a datastore:: + + >>> from google.cloud import datastore + >>> client = datastore.Client() + >>> query = client.query(kind='MyKind') + >>> query.add_filter('property', '=', 'val') + + Using the query iterator's + :meth:`~google.cloud.datastore.query.Iterator.next_page` method: + + >>> query_iter = query.fetch() + >>> entities, more_results, cursor = query_iter.next_page() + >>> entities + [] + >>> more_results + + >>> cursor + + + Under the hood this is doing: + + >>> connection.run_query('project', query.to_protobuf()) + [], cursor, more_results, skipped_results + + :type kwargs: dict + :param kwargs: Parameters for initializing and instance of + :class:`google.cloud.datastore.query.Query`. + + :rtype: :class:`google.cloud.datastore.query.Query` + :returns: An instance of :class:`google.cloud.datastore.query.Query` + """ + if 'client' in kwargs: + raise TypeError('Cannot pass client') + if 'project' in kwargs: + raise TypeError('Cannot pass project') + kwargs['project'] = self.project + if 'namespace' not in kwargs: + kwargs['namespace'] = self.namespace + return Query(self, **kwargs) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/connection.py b/packages/google-cloud-datastore/google/cloud/datastore/connection.py new file mode 100644 index 000000000000..ef0eeb066d1c --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/connection.py @@ -0,0 +1,672 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Connections to Google Cloud Datastore API servers.""" + +import os + +from google.rpc import status_pb2 + +from google.cloud._helpers import make_insecure_stub +from google.cloud._helpers import make_secure_stub +from google.cloud import connection as connection_module +from google.cloud.environment_vars import DISABLE_GRPC +from google.cloud.environment_vars import GCD_HOST +from google.cloud.exceptions import Conflict +from google.cloud.exceptions import GrpcRendezvous +from google.cloud.exceptions import make_exception +from google.cloud.datastore._generated import datastore_pb2 as _datastore_pb2 +# pylint: disable=ungrouped-imports +try: + from grpc import StatusCode + from google.cloud.datastore._generated import datastore_grpc_pb2 +except ImportError: # pragma: NO COVER + _HAVE_GRPC = False + datastore_grpc_pb2 = None + StatusCode = None +else: + _HAVE_GRPC = True +# pylint: enable=ungrouped-imports + + +DATASTORE_API_HOST = 'datastore.googleapis.com' +"""Datastore API request host.""" + +_DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) +_USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC + + +class _DatastoreAPIOverHttp(object): + """Helper mapping datastore API methods. + + Makes requests to send / receive protobuf content over HTTP/1.1. + + Methods make bare API requests without any helpers for constructing + the requests or parsing the responses. + + :type connection: :class:`google.cloud.datastore.connection.Connection` + :param connection: A connection object that contains helpful + information for making requests. + """ + + def __init__(self, connection): + self.connection = connection + + def _request(self, project, method, data): + """Make a request over the Http transport to the Cloud Datastore API. + + :type project: string + :param project: The project to make the request for. + + :type method: string + :param method: The API call method name (ie, ``runQuery``, + ``lookup``, etc) + + :type data: string + :param data: The data to send with the API call. + Typically this is a serialized Protobuf string. + + :rtype: string + :returns: The string response content from the API call. + :raises: :class:`google.cloud.exceptions.GoogleCloudError` if the + response code is not 200 OK. + """ + headers = { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': str(len(data)), + 'User-Agent': self.connection.USER_AGENT, + } + headers, content = self.connection.http.request( + uri=self.connection.build_api_url(project=project, method=method), + method='POST', headers=headers, body=data) + + status = headers['status'] + if status != '200': + error_status = status_pb2.Status.FromString(content) + raise make_exception(headers, error_status.message, use_json=False) + + return content + + def _rpc(self, project, method, request_pb, response_pb_cls): + """Make a protobuf RPC request. + + :type project: string + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type method: string + :param method: The name of the method to invoke. + + :type request_pb: :class:`google.protobuf.message.Message` instance + :param request_pb: the protobuf instance representing the request. + + :type response_pb_cls: A :class:`google.protobuf.message.Message` + subclass. + :param response_pb_cls: The class used to unmarshall the response + protobuf. + + :rtype: :class:`google.protobuf.message.Message` + :returns: The RPC message parsed from the response. + """ + response = self._request(project=project, method=method, + data=request_pb.SerializeToString()) + return response_pb_cls.FromString(response) + + def lookup(self, project, request_pb): + """Perform a ``lookup`` request. + + :type project: string + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type request_pb: :class:`._generated.datastore_pb2.LookupRequest` + :param request_pb: The request protobuf object. + + :rtype: :class:`._generated.datastore_pb2.LookupResponse` + :returns: The returned protobuf response object. + """ + return self._rpc(project, 'lookup', request_pb, + _datastore_pb2.LookupResponse) + + def run_query(self, project, request_pb): + """Perform a ``runQuery`` request. + + :type project: string + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type request_pb: :class:`._generated.datastore_pb2.RunQueryRequest` + :param request_pb: The request protobuf object. + + :rtype: :class:`._generated.datastore_pb2.RunQueryResponse` + :returns: The returned protobuf response object. + """ + return self._rpc(project, 'runQuery', request_pb, + _datastore_pb2.RunQueryResponse) + + def begin_transaction(self, project, request_pb): + """Perform a ``beginTransaction`` request. + + :type project: string + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type request_pb: + :class:`._generated.datastore_pb2.BeginTransactionRequest` + :param request_pb: The request protobuf object. + + :rtype: :class:`._generated.datastore_pb2.BeginTransactionResponse` + :returns: The returned protobuf response object. + """ + return self._rpc(project, 'beginTransaction', request_pb, + _datastore_pb2.BeginTransactionResponse) + + def commit(self, project, request_pb): + """Perform a ``commit`` request. + + :type project: string + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type request_pb: :class:`._generated.datastore_pb2.CommitRequest` + :param request_pb: The request protobuf object. + + :rtype: :class:`._generated.datastore_pb2.CommitResponse` + :returns: The returned protobuf response object. + """ + return self._rpc(project, 'commit', request_pb, + _datastore_pb2.CommitResponse) + + def rollback(self, project, request_pb): + """Perform a ``rollback`` request. + + :type project: string + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type request_pb: :class:`._generated.datastore_pb2.RollbackRequest` + :param request_pb: The request protobuf object. + + :rtype: :class:`._generated.datastore_pb2.RollbackResponse` + :returns: The returned protobuf response object. + """ + return self._rpc(project, 'rollback', request_pb, + _datastore_pb2.RollbackResponse) + + def allocate_ids(self, project, request_pb): + """Perform an ``allocateIds`` request. + + :type project: string + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type request_pb: :class:`._generated.datastore_pb2.AllocateIdsRequest` + :param request_pb: The request protobuf object. + + :rtype: :class:`._generated.datastore_pb2.AllocateIdsResponse` + :returns: The returned protobuf response object. + """ + return self._rpc(project, 'allocateIds', request_pb, + _datastore_pb2.AllocateIdsResponse) + + +class _DatastoreAPIOverGRPC(object): + """Helper mapping datastore API methods. + + Makes requests to send / receive protobuf content over gRPC. + + Methods make bare API requests without any helpers for constructing + the requests or parsing the responses. + + :type connection: :class:`google.cloud.datastore.connection.Connection` + :param connection: A connection object that contains helpful + information for making requests. + + :type secure: bool + :param secure: Flag indicating if a secure stub connection is needed. + """ + + def __init__(self, connection, secure): + if secure: + self._stub = make_secure_stub(connection.credentials, + connection.USER_AGENT, + datastore_grpc_pb2.DatastoreStub, + connection.host) + else: + self._stub = make_insecure_stub(datastore_grpc_pb2.DatastoreStub, + connection.host) + + def lookup(self, project, request_pb): + """Perform a ``lookup`` request. + + :type project: string + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type request_pb: :class:`._generated.datastore_pb2.LookupRequest` + :param request_pb: The request protobuf object. + + :rtype: :class:`._generated.datastore_pb2.LookupResponse` + :returns: The returned protobuf response object. + """ + request_pb.project_id = project + return self._stub.Lookup(request_pb) + + def run_query(self, project, request_pb): + """Perform a ``runQuery`` request. + + :type project: string + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type request_pb: :class:`._generated.datastore_pb2.RunQueryRequest` + :param request_pb: The request protobuf object. + + :rtype: :class:`._generated.datastore_pb2.RunQueryResponse` + :returns: The returned protobuf response object. + """ + request_pb.project_id = project + return self._stub.RunQuery(request_pb) + + def begin_transaction(self, project, request_pb): + """Perform a ``beginTransaction`` request. + + :type project: string + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type request_pb: + :class:`._generated.datastore_pb2.BeginTransactionRequest` + :param request_pb: The request protobuf object. + + :rtype: :class:`._generated.datastore_pb2.BeginTransactionResponse` + :returns: The returned protobuf response object. + """ + request_pb.project_id = project + return self._stub.BeginTransaction(request_pb) + + def commit(self, project, request_pb): + """Perform a ``commit`` request. + + :type project: string + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type request_pb: :class:`._generated.datastore_pb2.CommitRequest` + :param request_pb: The request protobuf object. + + :rtype: :class:`._generated.datastore_pb2.CommitResponse` + :returns: The returned protobuf response object. + """ + request_pb.project_id = project + try: + return self._stub.Commit(request_pb) + except GrpcRendezvous as exc: + if exc.code() == StatusCode.ABORTED: + raise Conflict(exc.details()) + raise + + def rollback(self, project, request_pb): + """Perform a ``rollback`` request. + + :type project: string + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type request_pb: :class:`._generated.datastore_pb2.RollbackRequest` + :param request_pb: The request protobuf object. + + :rtype: :class:`._generated.datastore_pb2.RollbackResponse` + :returns: The returned protobuf response object. + """ + request_pb.project_id = project + return self._stub.Rollback(request_pb) + + def allocate_ids(self, project, request_pb): + """Perform an ``allocateIds`` request. + + :type project: string + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type request_pb: :class:`._generated.datastore_pb2.AllocateIdsRequest` + :param request_pb: The request protobuf object. + + :rtype: :class:`._generated.datastore_pb2.AllocateIdsResponse` + :returns: The returned protobuf response object. + """ + request_pb.project_id = project + return self._stub.AllocateIds(request_pb) + + +class Connection(connection_module.Connection): + """A connection to the Google Cloud Datastore via the Protobuf API. + + This class should understand only the basic types (and protobufs) + in method arguments, however it should be capable of returning advanced + types. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` + :param credentials: The OAuth2 Credentials to use for this connection. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: An optional HTTP object to make requests. + """ + + API_BASE_URL = 'https://' + DATASTORE_API_HOST + """The base of the API call URL.""" + + API_VERSION = 'v1' + """The version of the API, used in building the API call's URL.""" + + API_URL_TEMPLATE = ('{api_base}/{api_version}/projects' + '/{project}:{method}') + """A template for the URL of a particular API call.""" + + SCOPE = ('https://www.googleapis.com/auth/datastore',) + """The scopes required for authenticating as a Cloud Datastore consumer.""" + + def __init__(self, credentials=None, http=None): + super(Connection, self).__init__(credentials=credentials, http=http) + try: + self.host = os.environ[GCD_HOST] + self.api_base_url = 'http://' + self.host + secure = False + except KeyError: + self.host = DATASTORE_API_HOST + self.api_base_url = self.__class__.API_BASE_URL + secure = True + if _USE_GRPC: + self._datastore_api = _DatastoreAPIOverGRPC(self, secure=secure) + else: + self._datastore_api = _DatastoreAPIOverHttp(self) + + def build_api_url(self, project, method, base_url=None, + api_version=None): + """Construct the URL for a particular API call. + + This method is used internally to come up with the URL to use when + making RPCs to the Cloud Datastore API. + + :type project: string + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type method: string + :param method: The API method to call (e.g. 'runQuery', 'lookup'). + + :type base_url: string + :param base_url: The base URL where the API lives. + You shouldn't have to provide this. + + :type api_version: string + :param api_version: The version of the API to connect to. + You shouldn't have to provide this. + + :rtype: str + :returns: The API URL created. + """ + return self.API_URL_TEMPLATE.format( + api_base=(base_url or self.api_base_url), + api_version=(api_version or self.API_VERSION), + project=project, method=method) + + def lookup(self, project, key_pbs, + eventual=False, transaction_id=None): + """Lookup keys from a project in the Cloud Datastore. + + Maps the ``DatastoreService.Lookup`` protobuf RPC. + + This uses mostly protobufs + (:class:`google.cloud.datastore._generated.entity_pb2.Key` as input + and :class:`google.cloud.datastore._generated.entity_pb2.Entity` + as output). It is used under the hood in + :meth:`Client.get() <.datastore.client.Client.get>`: + + >>> from google.cloud import datastore + >>> client = datastore.Client(project='project') + >>> key = client.key('MyKind', 1234) + >>> client.get(key) + [] + + Using a :class:`Connection` directly: + + >>> connection.lookup('project', [key.to_protobuf()]) + [] + + :type project: string + :param project: The project to look up the keys in. + + :type key_pbs: list of + :class:`google.cloud.datastore._generated.entity_pb2.Key` + :param key_pbs: The keys to retrieve from the datastore. + + :type eventual: bool + :param eventual: If False (the default), request ``STRONG`` read + consistency. If True, request ``EVENTUAL`` read + consistency. + + :type transaction_id: string + :param transaction_id: If passed, make the request in the scope of + the given transaction. Incompatible with + ``eventual==True``. + + :rtype: tuple + :returns: A triple of (``results``, ``missing``, ``deferred``) where + both ``results`` and ``missing`` are lists of + :class:`google.cloud.datastore._generated.entity_pb2.Entity` + and ``deferred`` is a list of + :class:`google.cloud.datastore._generated.entity_pb2.Key`. + """ + lookup_request = _datastore_pb2.LookupRequest() + _set_read_options(lookup_request, eventual, transaction_id) + _add_keys_to_request(lookup_request.keys, key_pbs) + + lookup_response = self._datastore_api.lookup(project, lookup_request) + + results = [result.entity for result in lookup_response.found] + missing = [result.entity for result in lookup_response.missing] + + return results, missing, list(lookup_response.deferred) + + def run_query(self, project, query_pb, namespace=None, + eventual=False, transaction_id=None): + """Run a query on the Cloud Datastore. + + Maps the ``DatastoreService.RunQuery`` protobuf RPC. + + Given a Query protobuf, sends a ``runQuery`` request to the + Cloud Datastore API and returns a list of entity protobufs + matching the query. + + You typically wouldn't use this method directly, in favor of the + :meth:`google.cloud.datastore.query.Query.fetch` method. + + Under the hood, the :class:`google.cloud.datastore.query.Query` class + uses this method to fetch data. + + :type project: string + :param project: The project over which to run the query. + + :type query_pb: :class:`.datastore._generated.query_pb2.Query` + :param query_pb: The Protobuf representing the query to run. + + :type namespace: string + :param namespace: The namespace over which to run the query. + + :type eventual: bool + :param eventual: If False (the default), request ``STRONG`` read + consistency. If True, request ``EVENTUAL`` read + consistency. + + :type transaction_id: string + :param transaction_id: If passed, make the request in the scope of + the given transaction. Incompatible with + ``eventual==True``. + + :rtype: tuple + :returns: Four-tuple containing the entities returned, + the end cursor of the query, a ``more_results`` + enum and a count of the number of skipped results. + """ + request = _datastore_pb2.RunQueryRequest() + _set_read_options(request, eventual, transaction_id) + + if namespace: + request.partition_id.namespace_id = namespace + + request.query.CopyFrom(query_pb) + response = self._datastore_api.run_query(project, request) + return ( + [e.entity for e in response.batch.entity_results], + response.batch.end_cursor, # Assume response always has cursor. + response.batch.more_results, + response.batch.skipped_results, + ) + + def begin_transaction(self, project): + """Begin a transaction. + + Maps the ``DatastoreService.BeginTransaction`` protobuf RPC. + + :type project: string + :param project: The project to which the transaction applies. + + :rtype: bytes + :returns: The serialized transaction that was begun. + """ + request = _datastore_pb2.BeginTransactionRequest() + response = self._datastore_api.begin_transaction(project, request) + return response.transaction + + def commit(self, project, request, transaction_id): + """Commit mutations in context of current transaction (if any). + + Maps the ``DatastoreService.Commit`` protobuf RPC. + + :type project: string + :param project: The project to which the transaction applies. + + :type request: :class:`._generated.datastore_pb2.CommitRequest` + :param request: The protobuf with the mutations being committed. + + :type transaction_id: string or None + :param transaction_id: The transaction ID returned from + :meth:`begin_transaction`. Non-transactional + batches must pass ``None``. + + .. note:: + + This method will mutate ``request`` before using it. + + :rtype: tuple + :returns: The pair of the number of index updates and a list of + :class:`._generated.entity_pb2.Key` for each incomplete key + that was completed in the commit. + """ + if transaction_id: + request.mode = _datastore_pb2.CommitRequest.TRANSACTIONAL + request.transaction = transaction_id + else: + request.mode = _datastore_pb2.CommitRequest.NON_TRANSACTIONAL + + response = self._datastore_api.commit(project, request) + return _parse_commit_response(response) + + def rollback(self, project, transaction_id): + """Rollback the connection's existing transaction. + + Maps the ``DatastoreService.Rollback`` protobuf RPC. + + :type project: string + :param project: The project to which the transaction belongs. + + :type transaction_id: string + :param transaction_id: The transaction ID returned from + :meth:`begin_transaction`. + """ + request = _datastore_pb2.RollbackRequest() + request.transaction = transaction_id + # Nothing to do with this response, so just execute the method. + self._datastore_api.rollback(project, request) + + def allocate_ids(self, project, key_pbs): + """Obtain backend-generated IDs for a set of keys. + + Maps the ``DatastoreService.AllocateIds`` protobuf RPC. + + :type project: string + :param project: The project to which the transaction belongs. + + :type key_pbs: list of + :class:`google.cloud.datastore._generated.entity_pb2.Key` + :param key_pbs: The keys for which the backend should allocate IDs. + + :rtype: list of :class:`.datastore._generated.entity_pb2.Key` + :returns: An equal number of keys, with IDs filled in by the backend. + """ + request = _datastore_pb2.AllocateIdsRequest() + _add_keys_to_request(request.keys, key_pbs) + # Nothing to do with this response, so just execute the method. + response = self._datastore_api.allocate_ids(project, request) + return list(response.keys) + + +def _set_read_options(request, eventual, transaction_id): + """Validate rules for read options, and assign to the request. + + Helper method for ``lookup()`` and ``run_query``. + + :raises: :class:`ValueError` if ``eventual`` is ``True`` and the + ``transaction_id`` is not ``None``. + """ + if eventual and (transaction_id is not None): + raise ValueError('eventual must be False when in a transaction') + + opts = request.read_options + if eventual: + opts.read_consistency = _datastore_pb2.ReadOptions.EVENTUAL + elif transaction_id: + opts.transaction = transaction_id + + +def _add_keys_to_request(request_field_pb, key_pbs): + """Add protobuf keys to a request object. + + :type request_field_pb: `RepeatedCompositeFieldContainer` + :param request_field_pb: A repeated proto field that contains keys. + + :type key_pbs: list of :class:`.datastore._generated.entity_pb2.Key` + :param key_pbs: The keys to add to a request. + """ + for key_pb in key_pbs: + request_field_pb.add().CopyFrom(key_pb) + + +def _parse_commit_response(commit_response_pb): + """Extract response data from a commit response. + + :type commit_response_pb: :class:`._generated.datastore_pb2.CommitResponse` + :param commit_response_pb: The protobuf response from a commit request. + + :rtype: tuple + :returns: The pair of the number of index updates and a list of + :class:`._generated.entity_pb2.Key` for each incomplete key + that was completed in the commit. + """ + mut_results = commit_response_pb.mutation_results + index_updates = commit_response_pb.index_updates + completed_keys = [mut_result.key for mut_result in mut_results + if mut_result.HasField('key')] # Message field (Key) + return index_updates, completed_keys diff --git a/packages/google-cloud-datastore/google/cloud/datastore/entity.py b/packages/google-cloud-datastore/google/cloud/datastore/entity.py new file mode 100644 index 000000000000..6da069ccf432 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/entity.py @@ -0,0 +1,143 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Class for representing a single entity in the Cloud Datastore.""" + + +from google.cloud._helpers import _ensure_tuple_or_list + + +class Entity(dict): + """Entities are akin to rows in a relational database + + An entity storing the actual instance of data. + + Each entity is officially represented with a + :class:`google.cloud.datastore.key.Key` class, however it is possible that + you might create an Entity with only a partial Key (that is, a Key + with a Kind, and possibly a parent, but without an ID). In such a + case, the datastore service will automatically assign an ID to the + partial key. + + Entities in this API act like dictionaries with extras built in that + allow you to delete or persist the data stored on the entity. + + Entities are mutable and act like a subclass of a dictionary. + This means you could take an existing entity and change the key + to duplicate the object. + + Use :func:`google.cloud.datastore.get` to retrieve an existing entity. + + >>> from google.cloud import datastore + >>> client = datastore.Client() + >>> client.get(key) + + + You can the set values on the entity just like you would on any + other dictionary. + + >>> entity['age'] = 20 + >>> entity['name'] = 'JJ' + >>> entity + + + And you can convert an entity to a regular Python dictionary with the + ``dict`` builtin: + + >>> dict(entity) + {'age': 20, 'name': 'JJ'} + + .. note:: + + When saving an entity to the backend, values which are "text" + (``unicode`` in Python2, ``str`` in Python3) will be saved using + the 'text_value' field, after being encoded to UTF-8. When + retrieved from the back-end, such values will be decoded to "text" + again. Values which are "bytes" (``str`` in Python2, ``bytes`` in + Python3), will be saved using the 'blob_value' field, without + any decoding / encoding step. + + :type key: :class:`google.cloud.datastore.key.Key` + :param key: Optional key to be set on entity. + + :type exclude_from_indexes: tuple of string + :param exclude_from_indexes: Names of fields whose values are not to be + indexed for this entity. + """ + + def __init__(self, key=None, exclude_from_indexes=()): + super(Entity, self).__init__() + self.key = key + self._exclude_from_indexes = set(_ensure_tuple_or_list( + 'exclude_from_indexes', exclude_from_indexes)) + # NOTE: This will be populated when parsing a protobuf in + # google.cloud.datastore.helpers.entity_from_protobuf. + self._meanings = {} + + def __eq__(self, other): + """Compare two entities for equality. + + Entities compare equal if their keys compare equal and their + properties compare equal. + + :rtype: boolean + :returns: True if the entities compare equal, else False. + """ + if not isinstance(other, Entity): + return False + + return (self.key == other.key and + self._exclude_from_indexes == other._exclude_from_indexes and + self._meanings == other._meanings and + super(Entity, self).__eq__(other)) + + def __ne__(self, other): + """Compare two entities for inequality. + + Entities compare equal if their keys compare equal and their + properties compare equal. + + :rtype: boolean + :returns: False if the entities compare equal, else True. + """ + return not self.__eq__(other) + + @property + def kind(self): + """Get the kind of the current entity. + + .. note:: + This relies entirely on the :class:`google.cloud.datastore.key.Key` + set on the entity. That means that we're not storing the kind + of the entity at all, just the properties and a pointer to a + Key which knows its Kind. + """ + if self.key: + return self.key.kind + + @property + def exclude_from_indexes(self): + """Names of fields which are *not* to be indexed for this entity. + + :rtype: sequence of field names + :returns: The set of fields excluded from indexes. + """ + return frozenset(self._exclude_from_indexes) + + def __repr__(self): + if self.key: + return '' % (self.key.path, + super(Entity, self).__repr__()) + else: + return '' % (super(Entity, self).__repr__()) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py new file mode 100644 index 000000000000..6bf6bcb1cb92 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -0,0 +1,472 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helper functions for dealing with Cloud Datastore's Protobuf API. + +The non-private functions are part of the API. +""" + +import datetime +import itertools + +from google.protobuf import struct_pb2 +from google.type import latlng_pb2 +import six + +# pylint: disable=ungrouped-imports +from google.cloud._helpers import _datetime_to_pb_timestamp +from google.cloud._helpers import _pb_timestamp_to_datetime +from google.cloud.datastore._generated import entity_pb2 as _entity_pb2 +from google.cloud.datastore.entity import Entity +from google.cloud.datastore.key import Key +# pylint: enable=ungrouped-imports + +__all__ = ('entity_from_protobuf', 'key_from_protobuf') + + +def _get_meaning(value_pb, is_list=False): + """Get the meaning from a protobuf value. + + :type value_pb: :class:`google.cloud.datastore._generated.entity_pb2.Value` + :param value_pb: The protobuf value to be checked for an + associated meaning. + + :type is_list: bool + :param is_list: Boolean indicating if the ``value_pb`` contains + a list value. + + :rtype: int + :returns: The meaning for the ``value_pb`` if one is set, else + :data:`None`. For a list value, if there are disagreeing + means it just returns a list of meanings. If all the + list meanings agree, it just condenses them. + """ + meaning = None + if is_list: + # An empty list will have no values, hence no shared meaning + # set among them. + if len(value_pb.array_value.values) == 0: + return None + + # We check among all the meanings, some of which may be None, + # the rest which may be enum/int values. + all_meanings = [_get_meaning(sub_value_pb) + for sub_value_pb in value_pb.array_value.values] + unique_meanings = set(all_meanings) + if len(unique_meanings) == 1: + # If there is a unique meaning, we preserve it. + meaning = unique_meanings.pop() + else: # We know len(value_pb.array_value.values) > 0. + # If the meaning is not unique, just return all of them. + meaning = all_meanings + elif value_pb.meaning: # Simple field (int32). + meaning = value_pb.meaning + + return meaning + + +def _new_value_pb(entity_pb, name): + """Add (by name) a new ``Value`` protobuf to an entity protobuf. + + :type entity_pb: :class:`.datastore._generated.entity_pb2.Entity` + :param entity_pb: An entity protobuf to add a new property to. + + :type name: string + :param name: The name of the new property. + + :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Value` + :returns: The new ``Value`` protobuf that was added to the entity. + """ + return entity_pb.properties.get_or_create(name) + + +def _property_tuples(entity_pb): + """Iterator of name, ``Value`` tuples from entity properties. + + :type entity_pb: :class:`.datastore._generated.entity_pb2.Entity` + :param entity_pb: An entity protobuf to add a new property to. + + :rtype: :class:`generator` + :returns: An iterator that yields tuples of a name and ``Value`` + corresponding to properties on the entity. + """ + return six.iteritems(entity_pb.properties) + + +def entity_from_protobuf(pb): + """Factory method for creating an entity based on a protobuf. + + The protobuf should be one returned from the Cloud Datastore + Protobuf API. + + :type pb: :class:`google.cloud.datastore._generated.entity_pb2.Entity` + :param pb: The Protobuf representing the entity. + + :rtype: :class:`google.cloud.datastore.entity.Entity` + :returns: The entity derived from the protobuf. + """ + key = None + if pb.HasField('key'): # Message field (Key) + key = key_from_protobuf(pb.key) + + entity_props = {} + entity_meanings = {} + exclude_from_indexes = [] + + for prop_name, value_pb in _property_tuples(pb): + value = _get_value_from_value_pb(value_pb) + entity_props[prop_name] = value + + # Check if the property has an associated meaning. + is_list = isinstance(value, list) + meaning = _get_meaning(value_pb, is_list=is_list) + if meaning is not None: + entity_meanings[prop_name] = (meaning, value) + + # Check if ``value_pb`` was excluded from index. Lists need to be + # special-cased and we require all ``exclude_from_indexes`` values + # in a list agree. + if is_list: + exclude_values = set(value_pb.exclude_from_indexes + for value_pb in value_pb.array_value.values) + if len(exclude_values) != 1: + raise ValueError('For an array_value, subvalues must either ' + 'all be indexed or all excluded from ' + 'indexes.') + + if exclude_values.pop(): + exclude_from_indexes.append(prop_name) + else: + if value_pb.exclude_from_indexes: + exclude_from_indexes.append(prop_name) + + entity = Entity(key=key, exclude_from_indexes=exclude_from_indexes) + entity.update(entity_props) + entity._meanings.update(entity_meanings) + return entity + + +def _set_pb_meaning_from_entity(entity, name, value, value_pb, + is_list=False): + """Add meaning information (from an entity) to a protobuf. + + :type entity: :class:`google.cloud.datastore.entity.Entity` + :param entity: The entity to be turned into a protobuf. + + :type name: string + :param name: The name of the property. + + :type value: object + :param value: The current value stored as property ``name``. + + :type value_pb: :class:`google.cloud.datastore._generated.entity_pb2.Value` + :param value_pb: The protobuf value to add meaning / meanings to. + + :type is_list: bool + :param is_list: (Optional) Boolean indicating if the ``value`` is + a list value. + """ + if name not in entity._meanings: + return + + meaning, orig_value = entity._meanings[name] + # Only add the meaning back to the protobuf if the value is + # unchanged from when it was originally read from the API. + if orig_value is not value: + return + + # For lists, we set meaning on each sub-element. + if is_list: + if not isinstance(meaning, list): + meaning = itertools.repeat(meaning) + val_iter = six.moves.zip(value_pb.array_value.values, + meaning) + for sub_value_pb, sub_meaning in val_iter: + if sub_meaning is not None: + sub_value_pb.meaning = sub_meaning + else: + value_pb.meaning = meaning + + +def entity_to_protobuf(entity): + """Converts an entity into a protobuf. + + :type entity: :class:`google.cloud.datastore.entity.Entity` + :param entity: The entity to be turned into a protobuf. + + :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Entity` + :returns: The protobuf representing the entity. + """ + entity_pb = _entity_pb2.Entity() + if entity.key is not None: + key_pb = entity.key.to_protobuf() + entity_pb.key.CopyFrom(key_pb) + + for name, value in entity.items(): + value_is_list = isinstance(value, list) + if value_is_list and len(value) == 0: + continue + + value_pb = _new_value_pb(entity_pb, name) + # Set the appropriate value. + _set_protobuf_value(value_pb, value) + + # Add index information to protobuf. + if name in entity.exclude_from_indexes: + if not value_is_list: + value_pb.exclude_from_indexes = True + + for sub_value in value_pb.array_value.values: + sub_value.exclude_from_indexes = True + + # Add meaning information to protobuf. + _set_pb_meaning_from_entity(entity, name, value, value_pb, + is_list=value_is_list) + + return entity_pb + + +def key_from_protobuf(pb): + """Factory method for creating a key based on a protobuf. + + The protobuf should be one returned from the Cloud Datastore + Protobuf API. + + :type pb: :class:`google.cloud.datastore._generated.entity_pb2.Key` + :param pb: The Protobuf representing the key. + + :rtype: :class:`google.cloud.datastore.key.Key` + :returns: a new `Key` instance + """ + path_args = [] + for element in pb.path: + path_args.append(element.kind) + if element.id: # Simple field (int64) + path_args.append(element.id) + # This is safe: we expect proto objects returned will only have + # one of `name` or `id` set. + if element.name: # Simple field (string) + path_args.append(element.name) + + project = None + if pb.partition_id.project_id: # Simple field (string) + project = pb.partition_id.project_id + namespace = None + if pb.partition_id.namespace_id: # Simple field (string) + namespace = pb.partition_id.namespace_id + + return Key(*path_args, namespace=namespace, project=project) + + +def _pb_attr_value(val): + """Given a value, return the protobuf attribute name and proper value. + + The Protobuf API uses different attribute names based on value types + rather than inferring the type. This function simply determines the + proper attribute name based on the type of the value provided and + returns the attribute name as well as a properly formatted value. + + Certain value types need to be coerced into a different type (such + as a `datetime.datetime` into an integer timestamp, or a + `google.cloud.datastore.key.Key` into a Protobuf representation. This + function handles that for you. + + .. note:: + Values which are "text" ('unicode' in Python2, 'str' in Python3) map + to 'string_value' in the datastore; values which are "bytes" + ('str' in Python2, 'bytes' in Python3) map to 'blob_value'. + + For example: + + >>> _pb_attr_value(1234) + ('integer_value', 1234) + >>> _pb_attr_value('my_string') + ('string_value', 'my_string') + + :type val: `datetime.datetime`, :class:`google.cloud.datastore.key.Key`, + bool, float, integer, string + :param val: The value to be scrutinized. + + :rtype: tuple + :returns: A tuple of the attribute name and proper value type. + """ + + if isinstance(val, datetime.datetime): + name = 'timestamp' + value = _datetime_to_pb_timestamp(val) + elif isinstance(val, Key): + name, value = 'key', val.to_protobuf() + elif isinstance(val, bool): + name, value = 'boolean', val + elif isinstance(val, float): + name, value = 'double', val + elif isinstance(val, six.integer_types): + name, value = 'integer', val + elif isinstance(val, six.text_type): + name, value = 'string', val + elif isinstance(val, (bytes, str)): + name, value = 'blob', val + elif isinstance(val, Entity): + name, value = 'entity', val + elif isinstance(val, list): + name, value = 'array', val + elif isinstance(val, GeoPoint): + name, value = 'geo_point', val.to_protobuf() + elif val is None: + name, value = 'null', struct_pb2.NULL_VALUE + else: + raise ValueError("Unknown protobuf attr type %s" % type(val)) + + return name + '_value', value + + +def _get_value_from_value_pb(value_pb): + """Given a protobuf for a Value, get the correct value. + + The Cloud Datastore Protobuf API returns a Property Protobuf which + has one value set and the rest blank. This function retrieves the + the one value provided. + + Some work is done to coerce the return value into a more useful type + (particularly in the case of a timestamp value, or a key value). + + :type value_pb: :class:`google.cloud.datastore._generated.entity_pb2.Value` + :param value_pb: The Value Protobuf. + + :rtype: object + :returns: The value provided by the Protobuf. + :raises: :class:`ValueError ` if no value type + has been set. + """ + value_type = value_pb.WhichOneof('value_type') + + if value_type == 'timestamp_value': + result = _pb_timestamp_to_datetime(value_pb.timestamp_value) + + elif value_type == 'key_value': + result = key_from_protobuf(value_pb.key_value) + + elif value_type == 'boolean_value': + result = value_pb.boolean_value + + elif value_type == 'double_value': + result = value_pb.double_value + + elif value_type == 'integer_value': + result = value_pb.integer_value + + elif value_type == 'string_value': + result = value_pb.string_value + + elif value_type == 'blob_value': + result = value_pb.blob_value + + elif value_type == 'entity_value': + result = entity_from_protobuf(value_pb.entity_value) + + elif value_type == 'array_value': + result = [_get_value_from_value_pb(value) + for value in value_pb.array_value.values] + + elif value_type == 'geo_point_value': + result = GeoPoint(value_pb.geo_point_value.latitude, + value_pb.geo_point_value.longitude) + + elif value_type == 'null_value': + result = None + + else: + raise ValueError('Value protobuf did not have any value set') + + return result + + +def _set_protobuf_value(value_pb, val): + """Assign 'val' to the correct subfield of 'value_pb'. + + The Protobuf API uses different attribute names based on value types + rather than inferring the type. + + Some value types (entities, keys, lists) cannot be directly + assigned; this function handles them correctly. + + :type value_pb: :class:`google.cloud.datastore._generated.entity_pb2.Value` + :param value_pb: The value protobuf to which the value is being assigned. + + :type val: :class:`datetime.datetime`, boolean, float, integer, string, + :class:`google.cloud.datastore.key.Key`, + :class:`google.cloud.datastore.entity.Entity` + :param val: The value to be assigned. + """ + attr, val = _pb_attr_value(val) + if attr == 'key_value': + value_pb.key_value.CopyFrom(val) + elif attr == 'timestamp_value': + value_pb.timestamp_value.CopyFrom(val) + elif attr == 'entity_value': + entity_pb = entity_to_protobuf(val) + value_pb.entity_value.CopyFrom(entity_pb) + elif attr == 'array_value': + l_pb = value_pb.array_value.values + for item in val: + i_pb = l_pb.add() + _set_protobuf_value(i_pb, item) + elif attr == 'geo_point_value': + value_pb.geo_point_value.CopyFrom(val) + else: # scalar, just assign + setattr(value_pb, attr, val) + + +class GeoPoint(object): + """Simple container for a geo point value. + + :type latitude: float + :param latitude: Latitude of a point. + + :type longitude: float + :param longitude: Longitude of a point. + """ + + def __init__(self, latitude, longitude): + self.latitude = latitude + self.longitude = longitude + + def to_protobuf(self): + """Convert the current object to protobuf. + + :rtype: :class:`google.type.latlng_pb2.LatLng`. + :returns: The current point as a protobuf. + """ + return latlng_pb2.LatLng(latitude=self.latitude, + longitude=self.longitude) + + def __eq__(self, other): + """Compare two geo points for equality. + + :rtype: boolean + :returns: True if the points compare equal, else False. + """ + if not isinstance(other, GeoPoint): + return False + + return (self.latitude == other.latitude and + self.longitude == other.longitude) + + def __ne__(self, other): + """Compare two geo points for inequality. + + :rtype: boolean + :returns: False if the points compare equal, else True. + """ + return not self.__eq__(other) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py new file mode 100644 index 000000000000..be6ca3875f16 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -0,0 +1,403 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Create / interact with Google Cloud Datastore keys.""" + +import copy +import six + +from google.cloud.datastore._generated import entity_pb2 as _entity_pb2 + + +class Key(object): + """An immutable representation of a datastore Key. + + To create a basic key: + + >>> Key('EntityKind', 1234) + + >>> Key('EntityKind', 'foo') + + + To create a key with a parent: + + >>> Key('Parent', 'foo', 'Child', 1234) + + >>> Key('Child', 1234, parent=parent_key) + + + To create a partial key: + + >>> Key('Parent', 'foo', 'Child') + + + :type path_args: tuple of string and integer + :param path_args: May represent a partial (odd length) or full (even + length) key path. + + :type kwargs: dict + :param kwargs: Keyword arguments to be passed in. + + Accepted keyword arguments are + + * namespace (string): A namespace identifier for the key. + * project (string): The project associated with the key. + * parent (:class:`google.cloud.datastore.key.Key`): The parent of the key. + + The project argument is required unless it has been set implicitly. + """ + + def __init__(self, *path_args, **kwargs): + self._flat_path = path_args + parent = self._parent = kwargs.get('parent') + self._namespace = kwargs.get('namespace') + project = kwargs.get('project') + self._project = _validate_project(project, parent) + # _flat_path, _parent, _namespace and _project must be set before + # _combine_args() is called. + self._path = self._combine_args() + + def __eq__(self, other): + """Compare two keys for equality. + + Incomplete keys never compare equal to any other key. + + Completed keys compare equal if they have the same path, project, + and namespace. + + :rtype: bool + :returns: True if the keys compare equal, else False. + """ + if not isinstance(other, Key): + return False + + if self.is_partial or other.is_partial: + return False + + return (self.flat_path == other.flat_path and + self.project == other.project and + self.namespace == other.namespace) + + def __ne__(self, other): + """Compare two keys for inequality. + + Incomplete keys never compare equal to any other key. + + Completed keys compare equal if they have the same path, project, + and namespace. + + :rtype: bool + :returns: False if the keys compare equal, else True. + """ + return not self.__eq__(other) + + def __hash__(self): + """Hash a keys for use in a dictionary lookp. + + :rtype: integer + :returns: a hash of the key's state. + """ + return (hash(self.flat_path) + + hash(self.project) + + hash(self.namespace)) + + @staticmethod + def _parse_path(path_args): + """Parses positional arguments into key path with kinds and IDs. + + :type path_args: tuple + :param path_args: A tuple from positional arguments. Should be + alternating list of kinds (string) and ID/name + parts (int or string). + + :rtype: :class:`list` of :class:`dict` + :returns: A list of key parts with kind and ID or name set. + :raises: :class:`ValueError` if there are no ``path_args``, if one of + the kinds is not a string or if one of the IDs/names is not + a string or an integer. + """ + if len(path_args) == 0: + raise ValueError('Key path must not be empty.') + + kind_list = path_args[::2] + id_or_name_list = path_args[1::2] + # Dummy sentinel value to pad incomplete key to even length path. + partial_ending = object() + if len(path_args) % 2 == 1: + id_or_name_list += (partial_ending,) + + result = [] + for kind, id_or_name in zip(kind_list, id_or_name_list): + curr_key_part = {} + if isinstance(kind, six.string_types): + curr_key_part['kind'] = kind + else: + raise ValueError(kind, 'Kind was not a string.') + + if isinstance(id_or_name, six.string_types): + curr_key_part['name'] = id_or_name + elif isinstance(id_or_name, six.integer_types): + curr_key_part['id'] = id_or_name + elif id_or_name is not partial_ending: + raise ValueError(id_or_name, + 'ID/name was not a string or integer.') + + result.append(curr_key_part) + + return result + + def _combine_args(self): + """Sets protected data by combining raw data set from the constructor. + + If a ``_parent`` is set, updates the ``_flat_path`` and sets the + ``_namespace`` and ``_project`` if not already set. + + :rtype: :class:`list` of :class:`dict` + :returns: A list of key parts with kind and ID or name set. + :raises: :class:`ValueError` if the parent key is not complete. + """ + child_path = self._parse_path(self._flat_path) + + if self._parent is not None: + if self._parent.is_partial: + raise ValueError('Parent key must be complete.') + + # We know that _parent.path() will return a copy. + child_path = self._parent.path + child_path + self._flat_path = self._parent.flat_path + self._flat_path + if (self._namespace is not None and + self._namespace != self._parent.namespace): + raise ValueError('Child namespace must agree with parent\'s.') + self._namespace = self._parent.namespace + if (self._project is not None and + self._project != self._parent.project): + raise ValueError('Child project must agree with parent\'s.') + self._project = self._parent.project + + return child_path + + def _clone(self): + """Duplicates the Key. + + Most attributes are simple types, so don't require copying. Other + attributes like ``parent`` are long-lived and so we re-use them. + + :rtype: :class:`google.cloud.datastore.key.Key` + :returns: A new ``Key`` instance with the same data as the current one. + """ + cloned_self = self.__class__(*self.flat_path, + project=self.project, + namespace=self.namespace) + # If the current parent has already been set, we re-use + # the same instance + cloned_self._parent = self._parent + return cloned_self + + def completed_key(self, id_or_name): + """Creates new key from existing partial key by adding final ID/name. + + :type id_or_name: string or integer + :param id_or_name: ID or name to be added to the key. + + :rtype: :class:`google.cloud.datastore.key.Key` + :returns: A new ``Key`` instance with the same data as the current one + and an extra ID or name added. + :raises: :class:`ValueError` if the current key is not partial or if + ``id_or_name`` is not a string or integer. + """ + if not self.is_partial: + raise ValueError('Only a partial key can be completed.') + + if isinstance(id_or_name, six.string_types): + id_or_name_key = 'name' + elif isinstance(id_or_name, six.integer_types): + id_or_name_key = 'id' + else: + raise ValueError(id_or_name, + 'ID/name was not a string or integer.') + + new_key = self._clone() + new_key._path[-1][id_or_name_key] = id_or_name + new_key._flat_path += (id_or_name,) + return new_key + + def to_protobuf(self): + """Return a protobuf corresponding to the key. + + :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Key` + :returns: The protobuf representing the key. + """ + key = _entity_pb2.Key() + key.partition_id.project_id = self.project + + if self.namespace: + key.partition_id.namespace_id = self.namespace + + for item in self.path: + element = key.path.add() + if 'kind' in item: + element.kind = item['kind'] + if 'id' in item: + element.id = item['id'] + if 'name' in item: + element.name = item['name'] + + return key + + @property + def is_partial(self): + """Boolean indicating if the key has an ID (or name). + + :rtype: bool + :returns: ``True`` if the last element of the key's path does not have + an ``id`` or a ``name``. + """ + return self.id_or_name is None + + @property + def namespace(self): + """Namespace getter. + + :rtype: string + :returns: The namespace of the current key. + """ + return self._namespace + + @property + def path(self): + """Path getter. + + Returns a copy so that the key remains immutable. + + :rtype: :class:`list` of :class:`dict` + :returns: The (key) path of the current key. + """ + return copy.deepcopy(self._path) + + @property + def flat_path(self): + """Getter for the key path as a tuple. + + :rtype: tuple of string and integer + :returns: The tuple of elements in the path. + """ + return self._flat_path + + @property + def kind(self): + """Kind getter. Based on the last element of path. + + :rtype: string + :returns: The kind of the current key. + """ + return self.path[-1]['kind'] + + @property + def id(self): + """ID getter. Based on the last element of path. + + :rtype: integer + :returns: The (integer) ID of the key. + """ + return self.path[-1].get('id') + + @property + def name(self): + """Name getter. Based on the last element of path. + + :rtype: string + :returns: The (string) name of the key. + """ + return self.path[-1].get('name') + + @property + def id_or_name(self): + """Getter. Based on the last element of path. + + :rtype: integer (if ``id``) or string (if ``name``) + :returns: The last element of the key's path if it is either an ``id`` + or a ``name``. + """ + return self.id or self.name + + @property + def project(self): + """Project getter. + + :rtype: string + :returns: The key's project. + """ + return self._project + + def _make_parent(self): + """Creates a parent key for the current path. + + Extracts all but the last element in the key path and creates a new + key, while still matching the namespace and the project. + + :rtype: :class:`google.cloud.datastore.key.Key` or :class:`NoneType` + :returns: A new ``Key`` instance, whose path consists of all but the + last element of current path. If the current key has only + one path element, returns ``None``. + """ + if self.is_partial: + parent_args = self.flat_path[:-1] + else: + parent_args = self.flat_path[:-2] + if parent_args: + return self.__class__(*parent_args, project=self.project, + namespace=self.namespace) + + @property + def parent(self): + """The parent of the current key. + + :rtype: :class:`google.cloud.datastore.key.Key` or :class:`NoneType` + :returns: A new ``Key`` instance, whose path consists of all but the + last element of current path. If the current key has only + one path element, returns ``None``. + """ + if self._parent is None: + self._parent = self._make_parent() + + return self._parent + + def __repr__(self): + return '' % (self.path, self.project) + + +def _validate_project(project, parent): + """Ensure the project is set appropriately. + + If ``parent`` is passed, skip the test (it will be checked / fixed up + later). + + If ``project`` is unset, attempt to infer the project from the environment. + + :type project: string + :param project: A project. + + :type parent: :class:`google.cloud.datastore.key.Key` or ``NoneType`` + :param parent: The parent of the key or ``None``. + + :rtype: string + :returns: The ``project`` passed in, or implied from the environment. + :raises: :class:`ValueError` if ``project`` is ``None`` and no project + can be inferred from the parent. + """ + if parent is None: + if project is None: + raise ValueError("A Key must have a project set.") + + return project diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py new file mode 100644 index 000000000000..954a320116a5 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -0,0 +1,539 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Create / interact with Google Cloud Datastore queries.""" + +import base64 + +from google.cloud._helpers import _ensure_tuple_or_list +from google.cloud.datastore._generated import query_pb2 as _query_pb2 +from google.cloud.datastore import helpers +from google.cloud.datastore.key import Key + + +class Query(object): + """A Query against the Cloud Datastore. + + This class serves as an abstraction for creating a query over data + stored in the Cloud Datastore. + + :type client: :class:`google.cloud.datastore.client.Client` + :param client: The client used to connect to Datastore. + + :type kind: string + :param kind: The kind to query. + + :type project: string + :param project: The project associated with the query. If not passed, + uses the client's value. + + :type namespace: string or None + :param namespace: The namespace to which to restrict results. If not + passed, uses the client's value. + + :type ancestor: :class:`google.cloud.datastore.key.Key` or None + :param ancestor: key of the ancestor to which this query's results are + restricted. + + :type filters: sequence of (property_name, operator, value) tuples + :param filters: property filters applied by this query. + + :type projection: sequence of string + :param projection: fields returned as part of query results. + + :type order: sequence of string + :param order: field names used to order query results. Prepend '-' + to a field name to sort it in descending order. + + :type distinct_on: sequence of string + :param distinct_on: field names used to group query results. + + :raises: ValueError if ``project`` is not passed and no implicit + default is set. + """ + + OPERATORS = { + '<=': _query_pb2.PropertyFilter.LESS_THAN_OR_EQUAL, + '>=': _query_pb2.PropertyFilter.GREATER_THAN_OR_EQUAL, + '<': _query_pb2.PropertyFilter.LESS_THAN, + '>': _query_pb2.PropertyFilter.GREATER_THAN, + '=': _query_pb2.PropertyFilter.EQUAL, + } + """Mapping of operator strings and their protobuf equivalents.""" + + def __init__(self, + client, + kind=None, + project=None, + namespace=None, + ancestor=None, + filters=(), + projection=(), + order=(), + distinct_on=()): + + self._client = client + self._kind = kind + self._project = project or client.project + self._namespace = namespace or client.namespace + self._ancestor = ancestor + self._filters = [] + # Verify filters passed in. + for property_name, operator, value in filters: + self.add_filter(property_name, operator, value) + self._projection = _ensure_tuple_or_list('projection', projection) + self._order = _ensure_tuple_or_list('order', order) + self._distinct_on = _ensure_tuple_or_list('distinct_on', distinct_on) + + @property + def project(self): + """Get the project for this Query. + + :rtype: str + :returns: The project for the query. + """ + return self._project or self._client.project + + @property + def namespace(self): + """This query's namespace + + :rtype: string or None + :returns: the namespace assigned to this query + """ + return self._namespace or self._client.namespace + + @namespace.setter + def namespace(self, value): + """Update the query's namespace. + + :type value: string + """ + if not isinstance(value, str): + raise ValueError("Namespace must be a string") + self._namespace = value + + @property + def kind(self): + """Get the Kind of the Query. + + :rtype: string + :returns: The kind for the query. + """ + return self._kind + + @kind.setter + def kind(self, value): + """Update the Kind of the Query. + + :type value: string + :param value: updated kind for the query. + + .. note:: + + The protobuf specification allows for ``kind`` to be repeated, + but the current implementation returns an error if more than + one value is passed. If the back-end changes in the future to + allow multiple values, this method will be updated to allow passing + either a string or a sequence of strings. + """ + if not isinstance(value, str): + raise TypeError("Kind must be a string") + self._kind = value + + @property + def ancestor(self): + """The ancestor key for the query. + + :rtype: Key or None + :returns: The ancestor for the query. + """ + return self._ancestor + + @ancestor.setter + def ancestor(self, value): + """Set the ancestor for the query + + :type value: Key + :param value: the new ancestor key + """ + if not isinstance(value, Key): + raise TypeError("Ancestor must be a Key") + self._ancestor = value + + @ancestor.deleter + def ancestor(self): + """Remove the ancestor for the query.""" + self._ancestor = None + + @property + def filters(self): + """Filters set on the query. + + :rtype: sequence of (property_name, operator, value) tuples. + :returns: The filters set on the query. + """ + return self._filters[:] + + def add_filter(self, property_name, operator, value): + """Filter the query based on a property name, operator and a value. + + Expressions take the form of:: + + .add_filter('', '', ) + + where property is a property stored on the entity in the datastore + and operator is one of ``OPERATORS`` + (ie, ``=``, ``<``, ``<=``, ``>``, ``>=``):: + + >>> from google.cloud import datastore + >>> client = datastore.Client() + >>> query = client.query(kind='Person') + >>> query.add_filter('name', '=', 'James') + >>> query.add_filter('age', '>', 50) + + :type property_name: string + :param property_name: A property name. + + :type operator: string + :param operator: One of ``=``, ``<``, ``<=``, ``>``, ``>=``. + + :type value: :class:`int`, :class:`str`, :class:`bool`, + :class:`float`, :class:`NoneType`, + :class:`datetime.datetime`, + :class:`google.cloud.datastore.key.Key` + :param value: The value to filter on. + + :raises: :class:`ValueError` if ``operation`` is not one of the + specified values, or if a filter names ``'__key__'`` but + passes an invalid value (a key is required). + """ + if self.OPERATORS.get(operator) is None: + error_message = 'Invalid expression: "%s"' % (operator,) + choices_message = 'Please use one of: =, <, <=, >, >=.' + raise ValueError(error_message, choices_message) + + if property_name == '__key__' and not isinstance(value, Key): + raise ValueError('Invalid key: "%s"' % value) + + self._filters.append((property_name, operator, value)) + + @property + def projection(self): + """Fields names returned by the query. + + :rtype: sequence of string + :returns: Names of fields in query results. + """ + return self._projection[:] + + @projection.setter + def projection(self, projection): + """Set the fields returned the query. + + :type projection: string or sequence of strings + :param projection: Each value is a string giving the name of a + property to be included in the projection query. + """ + if isinstance(projection, str): + projection = [projection] + self._projection[:] = projection + + def keys_only(self): + """Set the projection to include only keys.""" + self._projection[:] = ['__key__'] + + def key_filter(self, key, operator='='): + """Filter on a key. + + :type key: :class:`google.cloud.datastore.key.Key` + :param key: The key to filter on. + + :type operator: string + :param operator: (Optional) One of ``=``, ``<``, ``<=``, ``>``, ``>=``. + Defaults to ``=``. + """ + self.add_filter('__key__', operator, key) + + @property + def order(self): + """Names of fields used to sort query results. + + :rtype: sequence of string + :returns: The order(s) set on the query. + """ + return self._order[:] + + @order.setter + def order(self, value): + """Set the fields used to sort query results. + + Sort fields will be applied in the order specified. + + :type value: string or sequence of strings + :param value: Each value is a string giving the name of the + property on which to sort, optionally preceded by a + hyphen (-) to specify descending order. + Omitting the hyphen implies ascending order. + """ + if isinstance(value, str): + value = [value] + self._order[:] = value + + @property + def distinct_on(self): + """Names of fields used to group query results. + + :rtype: sequence of string + :returns: The "distinct on" fields set on the query. + """ + return self._distinct_on[:] + + @distinct_on.setter + def distinct_on(self, value): + """Set fields used to group query results. + + :type value: string or sequence of strings + :param value: Each value is a string giving the name of a + property to use to group results together. + """ + if isinstance(value, str): + value = [value] + self._distinct_on[:] = value + + def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, + client=None): + """Execute the Query; return an iterator for the matching entities. + + For example:: + + >>> from google.cloud import datastore + >>> client = datastore.Client() + >>> query = client.query(kind='Person') + >>> query.add_filter('name', '=', 'Sally') + >>> list(query.fetch()) + [, , ...] + >>> list(query.fetch(1)) + [] + + :type limit: integer or None + :param limit: An optional limit passed through to the iterator. + + :type offset: integer + :param offset: An optional offset passed through to the iterator. + + :type start_cursor: bytes + :param start_cursor: An optional cursor passed through to the iterator. + + :type end_cursor: bytes + :param end_cursor: An optional cursor passed through to the iterator. + + :type client: :class:`google.cloud.datastore.client.Client` + :param client: client used to connect to datastore. + If not supplied, uses the query's value. + + :rtype: :class:`Iterator` + :returns: The iterator for the query. + :raises: ValueError if ``connection`` is not passed and no implicit + default has been set. + """ + if client is None: + client = self._client + + return Iterator( + self, client, limit, offset, start_cursor, end_cursor) + + +class Iterator(object): + """Represent the state of a given execution of a Query. + + :type query: :class:`google.cloud.datastore.query.Query` + :param query: Query object holding permanent configuration (i.e. + things that don't change on with each page in + a results set). + + :type client: :class:`google.cloud.datastore.client.Client` + :param client: The client used to make a request. + + :type limit: integer + :param limit: (Optional) Limit the number of results returned. + + :type offset: integer + :param offset: (Optional) Offset used to begin a query. + + :type start_cursor: bytes + :param start_cursor: (Optional) Cursor to begin paging through + query results. + + :type end_cursor: bytes + :param end_cursor: (Optional) Cursor to end paging through + query results. + """ + + _NOT_FINISHED = _query_pb2.QueryResultBatch.NOT_FINISHED + + _FINISHED = ( + _query_pb2.QueryResultBatch.NO_MORE_RESULTS, + _query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT, + _query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_CURSOR, + ) + + def __init__(self, query, client, limit=None, offset=None, + start_cursor=None, end_cursor=None): + self._query = query + self._client = client + self._limit = limit + self._offset = offset + self._start_cursor = start_cursor + self._end_cursor = end_cursor + self._page = self._more_results = None + self._skipped_results = None + + def next_page(self): + """Fetch a single "page" of query results. + + Low-level API for fine control: the more convenient API is + to iterate on the current Iterator. + + :rtype: tuple, (entities, more_results, cursor) + :returns: The next page of results. + """ + pb = _pb_from_query(self._query) + + start_cursor = self._start_cursor + if start_cursor is not None: + pb.start_cursor = base64.urlsafe_b64decode(start_cursor) + + end_cursor = self._end_cursor + if end_cursor is not None: + pb.end_cursor = base64.urlsafe_b64decode(end_cursor) + + if self._limit is not None: + pb.limit.value = self._limit + + if self._offset is not None: + pb.offset = self._offset + + transaction = self._client.current_transaction + + query_results = self._client.connection.run_query( + query_pb=pb, + project=self._query.project, + namespace=self._query.namespace, + transaction_id=transaction and transaction.id, + ) + (entity_pbs, cursor_as_bytes, + more_results_enum, self._skipped_results) = query_results + + if cursor_as_bytes == b'': + self._start_cursor = None + else: + self._start_cursor = base64.urlsafe_b64encode(cursor_as_bytes) + self._end_cursor = None + + if more_results_enum == self._NOT_FINISHED: + self._more_results = True + elif more_results_enum in self._FINISHED: + self._more_results = False + else: + raise ValueError('Unexpected value returned for `more_results`.') + + self._page = [ + helpers.entity_from_protobuf(entity) + for entity in entity_pbs] + return self._page, self._more_results, self._start_cursor + + def __iter__(self): + """Generator yielding all results matching our query. + + :rtype: sequence of :class:`google.cloud.datastore.entity.Entity` + """ + while True: + self.next_page() + for entity in self._page: + yield entity + if not self._more_results: + break + num_results = len(self._page) + if self._limit is not None: + self._limit -= num_results + if self._offset is not None and self._skipped_results is not None: + # NOTE: The offset goes down relative to the location + # because we are updating the cursor each time. + self._offset -= self._skipped_results + + +def _pb_from_query(query): + """Convert a Query instance to the corresponding protobuf. + + :type query: :class:`Query` + :param query: The source query. + + :rtype: :class:`google.cloud.datastore._generated.query_pb2.Query` + :returns: A protobuf that can be sent to the protobuf API. N.b. that + it does not contain "in-flight" fields for ongoing query + executions (cursors, offset, limit). + """ + pb = _query_pb2.Query() + + for projection_name in query.projection: + pb.projection.add().property.name = projection_name + + if query.kind: + pb.kind.add().name = query.kind + + composite_filter = pb.filter.composite_filter + composite_filter.op = _query_pb2.CompositeFilter.AND + + if query.ancestor: + ancestor_pb = query.ancestor.to_protobuf() + + # Filter on __key__ HAS_ANCESTOR == ancestor. + ancestor_filter = composite_filter.filters.add().property_filter + ancestor_filter.property.name = '__key__' + ancestor_filter.op = _query_pb2.PropertyFilter.HAS_ANCESTOR + ancestor_filter.value.key_value.CopyFrom(ancestor_pb) + + for property_name, operator, value in query.filters: + pb_op_enum = query.OPERATORS.get(operator) + + # Add the specific filter + property_filter = composite_filter.filters.add().property_filter + property_filter.property.name = property_name + property_filter.op = pb_op_enum + + # Set the value to filter on based on the type. + if property_name == '__key__': + key_pb = value.to_protobuf() + property_filter.value.key_value.CopyFrom(key_pb) + else: + helpers._set_protobuf_value(property_filter.value, value) + + if not composite_filter.filters: + pb.ClearField('filter') + + for prop in query.order: + property_order = pb.order.add() + + if prop.startswith('-'): + property_order.property.name = prop[1:] + property_order.direction = property_order.DESCENDING + else: + property_order.property.name = prop + property_order.direction = property_order.ASCENDING + + for distinct_on_name in query.distinct_on: + pb.distinct_on.add().name = distinct_on_name + + return pb diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py new file mode 100644 index 000000000000..700068dfe7cd --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -0,0 +1,170 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Create / interact with Google Cloud Datastore transactions.""" + +from google.cloud.datastore.batch import Batch + + +class Transaction(Batch): + """An abstraction representing datastore Transactions. + + Transactions can be used to build up a bulk mutation and ensure all + or none succeed (transactionally). + + For example, the following snippet of code will put the two ``save`` + operations (either ``insert`` or ``upsert``) into the same + mutation, and execute those within a transaction:: + + >>> from google.cloud import datastore + >>> client = datastore.Client() + >>> with client.transaction(): + ... client.put_multi([entity1, entity2]) + + Because it derives from :class:`Batch <.datastore.batch.Batch>`, + :class:`Transaction` also provides :meth:`put` and :meth:`delete` methods:: + + >>> with client.transaction() as xact: + ... xact.put(entity1) + ... xact.delete(entity2.key) + + By default, the transaction is rolled back if the transaction block + exits with an error:: + + >>> with client.transaction(): + ... do_some_work() + ... raise SomeException() # rolls back + + If the transaction block exists without an exception, it will commit + by default. + + .. warning:: Inside a transaction, automatically assigned IDs for + entities will not be available at save time! That means, if you + try:: + + >>> with client.transaction(): + ... entity = datastore.Entity(key=client.key('Thing')) + ... client.put(entity) + + ``entity`` won't have a complete key until the transaction is + committed. + + Once you exit the transaction (or call :meth:`commit`), the + automatically generated ID will be assigned to the entity:: + + >>> with client.transaction(): + ... entity = datastore.Entity(key=client.key('Thing')) + ... client.put(entity) + ... print(entity.key.is_partial) # There is no ID on this key. + ... + True + >>> print(entity.key.is_partial) # There *is* an ID. + False + + If you don't want to use the context manager you can initialize a + transaction manually:: + + >>> transaction = client.transaction() + >>> transaction.begin() + >>> + >>> entity = datastore.Entity(key=client.key('Thing')) + >>> transaction.put(entity) + >>> + >>> if error: + ... transaction.rollback() + ... else: + ... transaction.commit() + + :type client: :class:`google.cloud.datastore.client.Client` + :param client: the client used to connect to datastore. + """ + + _status = None + + def __init__(self, client): + super(Transaction, self).__init__(client) + self._id = None + + @property + def id(self): + """Getter for the transaction ID. + + :rtype: string + :returns: The ID of the current transaction. + """ + return self._id + + def current(self): + """Return the topmost transaction. + + .. note:: + + If the topmost element on the stack is not a transaction, + returns None. + + :rtype: :class:`google.cloud.datastore.transaction.Transaction` or None + :returns: The current transaction (if any are active). + """ + top = super(Transaction, self).current() + if isinstance(top, Transaction): + return top + + def begin(self): + """Begins a transaction. + + This method is called automatically when entering a with + statement, however it can be called explicitly if you don't want + to use a context manager. + + :raises: :class:`~exceptions.ValueError` if the transaction has + already begun. + """ + super(Transaction, self).begin() + try: + self._id = self.connection.begin_transaction(self.project) + except: + self._status = self._ABORTED + raise + + def rollback(self): + """Rolls back the current transaction. + + This method has necessary side-effects: + + - Sets the current connection's transaction reference to None. + - Sets the current transaction's ID to None. + """ + try: + self.connection.rollback(self.project, self._id) + finally: + super(Transaction, self).rollback() + # Clear our own ID in case this gets accidentally reused. + self._id = None + + def commit(self): + """Commits the transaction. + + This is called automatically upon exiting a with statement, + however it can be called explicitly if you don't want to use a + context manager. + + This method has necessary side-effects: + + - Sets the current transaction's ID to None. + """ + try: + super(Transaction, self).commit() + finally: + # Clear our own ID in case this gets accidentally reused. + self._id = None diff --git a/packages/google-cloud-datastore/unit_tests/__init__.py b/packages/google-cloud-datastore/unit_tests/__init__.py new file mode 100644 index 000000000000..58e0d9153632 --- /dev/null +++ b/packages/google-cloud-datastore/unit_tests/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-datastore/unit_tests/test_batch.py b/packages/google-cloud-datastore/unit_tests/test_batch.py new file mode 100644 index 000000000000..e7ce9dd609b0 --- /dev/null +++ b/packages/google-cloud-datastore/unit_tests/test_batch.py @@ -0,0 +1,471 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestBatch(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.datastore.batch import Batch + + return Batch + + def _makeOne(self, client): + return self._getTargetClass()(client) + + def test_ctor(self): + from google.cloud.datastore._generated import datastore_pb2 + _PROJECT = 'PROJECT' + _NAMESPACE = 'NAMESPACE' + connection = _Connection() + client = _Client(_PROJECT, connection, _NAMESPACE) + batch = self._makeOne(client) + + self.assertEqual(batch.project, _PROJECT) + self.assertEqual(batch.connection, connection) + self.assertEqual(batch.namespace, _NAMESPACE) + self.assertIsNone(batch._id) + self.assertEqual(batch._status, batch._INITIAL) + self.assertIsInstance(batch._commit_request, + datastore_pb2.CommitRequest) + self.assertIs(batch.mutations, batch._commit_request.mutations) + self.assertEqual(batch._partial_key_entities, []) + + def test_current(self): + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + batch1 = self._makeOne(client) + batch2 = self._makeOne(client) + self.assertIsNone(batch1.current()) + self.assertIsNone(batch2.current()) + with batch1: + self.assertIs(batch1.current(), batch1) + self.assertIs(batch2.current(), batch1) + with batch2: + self.assertIs(batch1.current(), batch2) + self.assertIs(batch2.current(), batch2) + self.assertIs(batch1.current(), batch1) + self.assertIs(batch2.current(), batch1) + self.assertIsNone(batch1.current()) + self.assertIsNone(batch2.current()) + + def test_put_entity_wo_key(self): + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + + batch.begin() + self.assertRaises(ValueError, batch.put, _Entity()) + + def test_put_entity_wrong_status(self): + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + entity = _Entity() + entity.key = _Key('OTHER') + + self.assertEqual(batch._status, batch._INITIAL) + self.assertRaises(ValueError, batch.put, entity) + + def test_put_entity_w_key_wrong_project(self): + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + entity = _Entity() + entity.key = _Key('OTHER') + + batch.begin() + self.assertRaises(ValueError, batch.put, entity) + + def test_put_entity_w_partial_key(self): + _PROJECT = 'PROJECT' + _PROPERTIES = {'foo': 'bar'} + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + entity = _Entity(_PROPERTIES) + key = entity.key = _Key(_PROJECT) + key._id = None + + batch.begin() + batch.put(entity) + + mutated_entity = _mutated_pb(self, batch.mutations, 'insert') + self.assertEqual(mutated_entity.key, key._key) + self.assertEqual(batch._partial_key_entities, [entity]) + + def test_put_entity_w_completed_key(self): + from google.cloud.datastore.helpers import _property_tuples + + _PROJECT = 'PROJECT' + _PROPERTIES = { + 'foo': 'bar', + 'baz': 'qux', + 'spam': [1, 2, 3], + 'frotz': [], # will be ignored + } + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + entity = _Entity(_PROPERTIES) + entity.exclude_from_indexes = ('baz', 'spam') + key = entity.key = _Key(_PROJECT) + + batch.begin() + batch.put(entity) + + mutated_entity = _mutated_pb(self, batch.mutations, 'upsert') + self.assertEqual(mutated_entity.key, key._key) + + prop_dict = dict(_property_tuples(mutated_entity)) + self.assertEqual(len(prop_dict), 3) + self.assertFalse(prop_dict['foo'].exclude_from_indexes) + self.assertTrue(prop_dict['baz'].exclude_from_indexes) + self.assertFalse(prop_dict['spam'].exclude_from_indexes) + spam_values = prop_dict['spam'].array_value.values + self.assertTrue(spam_values[0].exclude_from_indexes) + self.assertTrue(spam_values[1].exclude_from_indexes) + self.assertTrue(spam_values[2].exclude_from_indexes) + self.assertFalse('frotz' in prop_dict) + + def test_delete_wrong_status(self): + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + key = _Key(_PROJECT) + key._id = None + + self.assertEqual(batch._status, batch._INITIAL) + self.assertRaises(ValueError, batch.delete, key) + + def test_delete_w_partial_key(self): + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + key = _Key(_PROJECT) + key._id = None + + batch.begin() + self.assertRaises(ValueError, batch.delete, key) + + def test_delete_w_key_wrong_project(self): + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + key = _Key('OTHER') + + batch.begin() + self.assertRaises(ValueError, batch.delete, key) + + def test_delete_w_completed_key(self): + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + key = _Key(_PROJECT) + + batch.begin() + batch.delete(key) + + mutated_key = _mutated_pb(self, batch.mutations, 'delete') + self.assertEqual(mutated_key, key._key) + + def test_begin(self): + _PROJECT = 'PROJECT' + client = _Client(_PROJECT, None) + batch = self._makeOne(client) + self.assertEqual(batch._status, batch._INITIAL) + batch.begin() + self.assertEqual(batch._status, batch._IN_PROGRESS) + + def test_begin_fail(self): + _PROJECT = 'PROJECT' + client = _Client(_PROJECT, None) + batch = self._makeOne(client) + batch._status = batch._IN_PROGRESS + with self.assertRaises(ValueError): + batch.begin() + + def test_rollback(self): + _PROJECT = 'PROJECT' + client = _Client(_PROJECT, None) + batch = self._makeOne(client) + batch.begin() + self.assertEqual(batch._status, batch._IN_PROGRESS) + batch.rollback() + self.assertEqual(batch._status, batch._ABORTED) + + def test_rollback_wrong_status(self): + _PROJECT = 'PROJECT' + client = _Client(_PROJECT, None) + batch = self._makeOne(client) + + self.assertEqual(batch._status, batch._INITIAL) + self.assertRaises(ValueError, batch.rollback) + + def test_commit(self): + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + + self.assertEqual(batch._status, batch._INITIAL) + batch.begin() + self.assertEqual(batch._status, batch._IN_PROGRESS) + batch.commit() + self.assertEqual(batch._status, batch._FINISHED) + + self.assertEqual(connection._committed, + [(_PROJECT, batch._commit_request, None)]) + + def test_commit_wrong_status(self): + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + + self.assertEqual(batch._status, batch._INITIAL) + self.assertRaises(ValueError, batch.commit) + + def test_commit_w_partial_key_entities(self): + _PROJECT = 'PROJECT' + _NEW_ID = 1234 + connection = _Connection(_NEW_ID) + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + entity = _Entity({}) + key = entity.key = _Key(_PROJECT) + key._id = None + batch._partial_key_entities.append(entity) + + self.assertEqual(batch._status, batch._INITIAL) + batch.begin() + self.assertEqual(batch._status, batch._IN_PROGRESS) + batch.commit() + self.assertEqual(batch._status, batch._FINISHED) + + self.assertEqual(connection._committed, + [(_PROJECT, batch._commit_request, None)]) + self.assertFalse(entity.key.is_partial) + self.assertEqual(entity.key._id, _NEW_ID) + + def test_as_context_mgr_wo_error(self): + _PROJECT = 'PROJECT' + _PROPERTIES = {'foo': 'bar'} + connection = _Connection() + entity = _Entity(_PROPERTIES) + key = entity.key = _Key(_PROJECT) + + client = _Client(_PROJECT, connection) + self.assertEqual(list(client._batches), []) + + with self._makeOne(client) as batch: + self.assertEqual(list(client._batches), [batch]) + batch.put(entity) + + self.assertEqual(list(client._batches), []) + + mutated_entity = _mutated_pb(self, batch.mutations, 'upsert') + self.assertEqual(mutated_entity.key, key._key) + self.assertEqual(connection._committed, + [(_PROJECT, batch._commit_request, None)]) + + def test_as_context_mgr_nested(self): + _PROJECT = 'PROJECT' + _PROPERTIES = {'foo': 'bar'} + connection = _Connection() + entity1 = _Entity(_PROPERTIES) + key1 = entity1.key = _Key(_PROJECT) + entity2 = _Entity(_PROPERTIES) + key2 = entity2.key = _Key(_PROJECT) + + client = _Client(_PROJECT, connection) + self.assertEqual(list(client._batches), []) + + with self._makeOne(client) as batch1: + self.assertEqual(list(client._batches), [batch1]) + batch1.put(entity1) + with self._makeOne(client) as batch2: + self.assertEqual(list(client._batches), [batch2, batch1]) + batch2.put(entity2) + + self.assertEqual(list(client._batches), [batch1]) + + self.assertEqual(list(client._batches), []) + + mutated_entity1 = _mutated_pb(self, batch1.mutations, 'upsert') + self.assertEqual(mutated_entity1.key, key1._key) + + mutated_entity2 = _mutated_pb(self, batch2.mutations, 'upsert') + self.assertEqual(mutated_entity2.key, key2._key) + + self.assertEqual(connection._committed, + [(_PROJECT, batch2._commit_request, None), + (_PROJECT, batch1._commit_request, None)]) + + def test_as_context_mgr_w_error(self): + _PROJECT = 'PROJECT' + _PROPERTIES = {'foo': 'bar'} + connection = _Connection() + entity = _Entity(_PROPERTIES) + key = entity.key = _Key(_PROJECT) + + client = _Client(_PROJECT, connection) + self.assertEqual(list(client._batches), []) + + try: + with self._makeOne(client) as batch: + self.assertEqual(list(client._batches), [batch]) + batch.put(entity) + raise ValueError("testing") + except ValueError: + pass + + self.assertEqual(list(client._batches), []) + + mutated_entity = _mutated_pb(self, batch.mutations, 'upsert') + self.assertEqual(mutated_entity.key, key._key) + self.assertEqual(connection._committed, []) + + def test_as_context_mgr_enter_fails(self): + klass = self._getTargetClass() + + class FailedBegin(klass): + + def begin(self): + raise RuntimeError + + client = _Client(None, None) + self.assertEqual(client._batches, []) + + batch = FailedBegin(client) + with self.assertRaises(RuntimeError): + # The context manager will never be entered because + # of the failure. + with batch: # pragma: NO COVER + pass + # Make sure no batch was added. + self.assertEqual(client._batches, []) + + +class _PathElementPB(object): + + def __init__(self, id_): + self.id = id_ + + +class _KeyPB(object): + + def __init__(self, id_): + self.path = [_PathElementPB(id_)] + + +class _Connection(object): + _marker = object() + _save_result = (False, None) + + def __init__(self, *new_keys): + self._completed_keys = [_KeyPB(key) for key in new_keys] + self._committed = [] + self._index_updates = 0 + + def commit(self, project, commit_request, transaction_id): + self._committed.append((project, commit_request, transaction_id)) + return self._index_updates, self._completed_keys + + +class _Entity(dict): + key = None + exclude_from_indexes = () + _meanings = {} + + +class _Key(object): + _MARKER = object() + _kind = 'KIND' + _key = 'KEY' + _path = None + _id = 1234 + _stored = None + + def __init__(self, project): + self.project = project + + @property + def is_partial(self): + return self._id is None + + def to_protobuf(self): + from google.cloud.datastore._generated import entity_pb2 + key = self._key = entity_pb2.Key() + # Don't assign it, because it will just get ripped out + # key.partition_id.project_id = self.project + + element = key.path.add() + element.kind = self._kind + if self._id is not None: + element.id = self._id + + return key + + def completed_key(self, new_id): + assert self.is_partial + new_key = self.__class__(self.project) + new_key._id = new_id + return new_key + + +class _Client(object): + + def __init__(self, project, connection, namespace=None): + self.project = project + self.connection = connection + self.namespace = namespace + self._batches = [] + + def _push_batch(self, batch): + self._batches.insert(0, batch) + + def _pop_batch(self): + return self._batches.pop(0) + + @property + def current_batch(self): + if self._batches: + return self._batches[0] + + +def _assert_num_mutations(test_case, mutation_pb_list, num_mutations): + test_case.assertEqual(len(mutation_pb_list), num_mutations) + + +def _mutated_pb(test_case, mutation_pb_list, mutation_type): + # Make sure there is only one mutation. + _assert_num_mutations(test_case, mutation_pb_list, 1) + + # We grab the only mutation. + mutated_pb = mutation_pb_list[0] + # Then check if it is the correct type. + test_case.assertEqual(mutated_pb.WhichOneof('operation'), + mutation_type) + + return getattr(mutated_pb, mutation_type) diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py new file mode 100644 index 000000000000..79dd7b6b8b05 --- /dev/null +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -0,0 +1,1058 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +def _make_entity_pb(project, kind, integer_id, name=None, str_val=None): + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore.helpers import _new_value_pb + + entity_pb = entity_pb2.Entity() + entity_pb.key.partition_id.project_id = project + path_element = entity_pb.key.path.add() + path_element.kind = kind + path_element.id = integer_id + if name is not None and str_val is not None: + value_pb = _new_value_pb(entity_pb, name) + value_pb.string_value = str_val + + return entity_pb + + +class Test__get_gcd_project(unittest.TestCase): + + def _callFUT(self): + from google.cloud.datastore.client import _get_gcd_project + return _get_gcd_project() + + def test_no_value(self): + import os + from google.cloud._testing import _Monkey + + environ = {} + with _Monkey(os, getenv=environ.get): + project = self._callFUT() + self.assertIsNone(project) + + def test_value_set(self): + import os + from google.cloud._testing import _Monkey + from google.cloud.datastore.client import GCD_DATASET + + MOCK_PROJECT = object() + environ = {GCD_DATASET: MOCK_PROJECT} + with _Monkey(os, getenv=environ.get): + project = self._callFUT() + self.assertEqual(project, MOCK_PROJECT) + + +class Test__determine_default_project(unittest.TestCase): + + def _callFUT(self, project=None): + from google.cloud.datastore.client import ( + _determine_default_project) + return _determine_default_project(project=project) + + def _determine_default_helper(self, gcd=None, fallback=None, + project_called=None): + from google.cloud._testing import _Monkey + from google.cloud.datastore import client + + _callers = [] + + def gcd_mock(): + _callers.append('gcd_mock') + return gcd + + def fallback_mock(project=None): + _callers.append(('fallback_mock', project)) + return fallback + + patched_methods = { + '_get_gcd_project': gcd_mock, + '_base_default_project': fallback_mock, + } + + with _Monkey(client, **patched_methods): + returned_project = self._callFUT(project_called) + + return returned_project, _callers + + def test_no_value(self): + project, callers = self._determine_default_helper() + self.assertIsNone(project) + self.assertEqual(callers, ['gcd_mock', ('fallback_mock', None)]) + + def test_explicit(self): + PROJECT = object() + project, callers = self._determine_default_helper( + project_called=PROJECT) + self.assertEqual(project, PROJECT) + self.assertEqual(callers, []) + + def test_gcd(self): + PROJECT = object() + project, callers = self._determine_default_helper(gcd=PROJECT) + self.assertEqual(project, PROJECT) + self.assertEqual(callers, ['gcd_mock']) + + def test_fallback(self): + PROJECT = object() + project, callers = self._determine_default_helper(fallback=PROJECT) + self.assertEqual(project, PROJECT) + self.assertEqual(callers, ['gcd_mock', ('fallback_mock', None)]) + + +class TestClient(unittest.TestCase): + + PROJECT = 'PROJECT' + + def setUp(self): + KLASS = self._getTargetClass() + self.original_cnxn_class = KLASS._connection_class + KLASS._connection_class = _MockConnection + + def tearDown(self): + KLASS = self._getTargetClass() + KLASS._connection_class = self.original_cnxn_class + + def _getTargetClass(self): + from google.cloud.datastore.client import Client + return Client + + def _makeOne(self, project=PROJECT, namespace=None, + credentials=None, http=None): + return self._getTargetClass()(project=project, + namespace=namespace, + credentials=credentials, + http=http) + + def test_ctor_w_project_no_environ(self): + from google.cloud._testing import _Monkey + from google.cloud.datastore import client as _MUT + + # Some environments (e.g. AppVeyor CI) run in GCE, so + # this test would fail artificially. + with _Monkey(_MUT, _base_default_project=lambda project: None): + self.assertRaises(EnvironmentError, self._makeOne, None) + + def test_ctor_w_implicit_inputs(self): + from google.cloud._testing import _Monkey + from google.cloud.datastore import client as _MUT + from google.cloud import client as _base_client + + OTHER = 'other' + creds = object() + default_called = [] + + def fallback_mock(project): + default_called.append(project) + return project or OTHER + + klass = self._getTargetClass() + with _Monkey(_MUT, + _determine_default_project=fallback_mock): + with _Monkey(_base_client, + get_credentials=lambda: creds): + client = klass() + self.assertEqual(client.project, OTHER) + self.assertIsNone(client.namespace) + self.assertIsInstance(client.connection, _MockConnection) + self.assertIs(client.connection.credentials, creds) + self.assertIsNone(client.connection.http) + self.assertIsNone(client.current_batch) + self.assertIsNone(client.current_transaction) + self.assertEqual(default_called, [None]) + + def test_ctor_w_explicit_inputs(self): + OTHER = 'other' + NAMESPACE = 'namespace' + creds = object() + http = object() + client = self._makeOne(project=OTHER, + namespace=NAMESPACE, + credentials=creds, + http=http) + self.assertEqual(client.project, OTHER) + self.assertEqual(client.namespace, NAMESPACE) + self.assertIsInstance(client.connection, _MockConnection) + self.assertIs(client.connection.credentials, creds) + self.assertIs(client.connection.http, http) + self.assertIsNone(client.current_batch) + self.assertEqual(list(client._batch_stack), []) + + def test__push_batch_and__pop_batch(self): + creds = object() + client = self._makeOne(credentials=creds) + batch = client.batch() + xact = client.transaction() + client._push_batch(batch) + self.assertEqual(list(client._batch_stack), [batch]) + self.assertIs(client.current_batch, batch) + self.assertIsNone(client.current_transaction) + client._push_batch(xact) + self.assertIs(client.current_batch, xact) + self.assertIs(client.current_transaction, xact) + # list(_LocalStack) returns in reverse order. + self.assertEqual(list(client._batch_stack), [xact, batch]) + self.assertIs(client._pop_batch(), xact) + self.assertEqual(list(client._batch_stack), [batch]) + self.assertIs(client._pop_batch(), batch) + self.assertEqual(list(client._batch_stack), []) + + def test_get_miss(self): + _called_with = [] + + def _get_multi(*args, **kw): + _called_with.append((args, kw)) + return [] + + creds = object() + client = self._makeOne(credentials=creds) + client.get_multi = _get_multi + + key = object() + + self.assertIsNone(client.get(key)) + + self.assertEqual(_called_with[0][0], ()) + self.assertEqual(_called_with[0][1]['keys'], [key]) + self.assertIsNone(_called_with[0][1]['missing']) + self.assertIsNone(_called_with[0][1]['deferred']) + self.assertIsNone(_called_with[0][1]['transaction']) + + def test_get_hit(self): + TXN_ID = '123' + _called_with = [] + _entity = object() + + def _get_multi(*args, **kw): + _called_with.append((args, kw)) + return [_entity] + + creds = object() + client = self._makeOne(credentials=creds) + client.get_multi = _get_multi + + key, missing, deferred = object(), [], [] + + self.assertIs(client.get(key, missing, deferred, TXN_ID), _entity) + + self.assertEqual(_called_with[0][0], ()) + self.assertEqual(_called_with[0][1]['keys'], [key]) + self.assertIs(_called_with[0][1]['missing'], missing) + self.assertIs(_called_with[0][1]['deferred'], deferred) + self.assertEqual(_called_with[0][1]['transaction'], TXN_ID) + + def test_get_multi_no_keys(self): + creds = object() + client = self._makeOne(credentials=creds) + results = client.get_multi([]) + self.assertEqual(results, []) + + def test_get_multi_miss(self): + from google.cloud.datastore.key import Key + + creds = object() + client = self._makeOne(credentials=creds) + client.connection._add_lookup_result() + key = Key('Kind', 1234, project=self.PROJECT) + results = client.get_multi([key]) + self.assertEqual(results, []) + + def test_get_multi_miss_w_missing(self): + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore.key import Key + + KIND = 'Kind' + ID = 1234 + + # Make a missing entity pb to be returned from mock backend. + missed = entity_pb2.Entity() + missed.key.partition_id.project_id = self.PROJECT + path_element = missed.key.path.add() + path_element.kind = KIND + path_element.id = ID + + creds = object() + client = self._makeOne(credentials=creds) + # Set missing entity on mock connection. + client.connection._add_lookup_result(missing=[missed]) + + key = Key(KIND, ID, project=self.PROJECT) + missing = [] + entities = client.get_multi([key], missing=missing) + self.assertEqual(entities, []) + self.assertEqual([missed.key.to_protobuf() for missed in missing], + [key.to_protobuf()]) + + def test_get_multi_w_missing_non_empty(self): + from google.cloud.datastore.key import Key + + creds = object() + client = self._makeOne(credentials=creds) + key = Key('Kind', 1234, project=self.PROJECT) + + missing = ['this', 'list', 'is', 'not', 'empty'] + self.assertRaises(ValueError, client.get_multi, + [key], missing=missing) + + def test_get_multi_w_deferred_non_empty(self): + from google.cloud.datastore.key import Key + + creds = object() + client = self._makeOne(credentials=creds) + key = Key('Kind', 1234, project=self.PROJECT) + + deferred = ['this', 'list', 'is', 'not', 'empty'] + self.assertRaises(ValueError, client.get_multi, + [key], deferred=deferred) + + def test_get_multi_miss_w_deferred(self): + from google.cloud.datastore.key import Key + + key = Key('Kind', 1234, project=self.PROJECT) + + # Set deferred entity on mock connection. + creds = object() + client = self._makeOne(credentials=creds) + client.connection._add_lookup_result(deferred=[key.to_protobuf()]) + + deferred = [] + entities = client.get_multi([key], deferred=deferred) + self.assertEqual(entities, []) + self.assertEqual([def_key.to_protobuf() for def_key in deferred], + [key.to_protobuf()]) + + def test_get_multi_w_deferred_from_backend_but_not_passed(self): + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.key import Key + + key1 = Key('Kind', project=self.PROJECT) + key1_pb = key1.to_protobuf() + key2 = Key('Kind', 2345, project=self.PROJECT) + key2_pb = key2.to_protobuf() + + entity1_pb = entity_pb2.Entity() + entity1_pb.key.CopyFrom(key1_pb) + entity2_pb = entity_pb2.Entity() + entity2_pb.key.CopyFrom(key2_pb) + + creds = object() + client = self._makeOne(credentials=creds) + # mock up two separate requests + client.connection._add_lookup_result([entity1_pb], deferred=[key2_pb]) + client.connection._add_lookup_result([entity2_pb]) + + missing = [] + found = client.get_multi([key1, key2], missing=missing) + self.assertEqual(len(found), 2) + self.assertEqual(len(missing), 0) + + # Check the actual contents on the response. + self.assertIsInstance(found[0], Entity) + self.assertEqual(found[0].key.path, key1.path) + self.assertEqual(found[0].key.project, key1.project) + + self.assertIsInstance(found[1], Entity) + self.assertEqual(found[1].key.path, key2.path) + self.assertEqual(found[1].key.project, key2.project) + + cw = client.connection._lookup_cw + self.assertEqual(len(cw), 2) + + ds_id, k_pbs, eventual, tid = cw[0] + self.assertEqual(ds_id, self.PROJECT) + self.assertEqual(len(k_pbs), 2) + self.assertEqual(key1_pb, k_pbs[0]) + self.assertEqual(key2_pb, k_pbs[1]) + self.assertFalse(eventual) + self.assertIsNone(tid) + + ds_id, k_pbs, eventual, tid = cw[1] + self.assertEqual(ds_id, self.PROJECT) + self.assertEqual(len(k_pbs), 1) + self.assertEqual(key2_pb, k_pbs[0]) + self.assertFalse(eventual) + self.assertIsNone(tid) + + def test_get_multi_hit(self): + from google.cloud.datastore.key import Key + + KIND = 'Kind' + ID = 1234 + PATH = [{'kind': KIND, 'id': ID}] + + # Make a found entity pb to be returned from mock backend. + entity_pb = _make_entity_pb(self.PROJECT, KIND, ID, 'foo', 'Foo') + + # Make a connection to return the entity pb. + creds = object() + client = self._makeOne(credentials=creds) + client.connection._add_lookup_result([entity_pb]) + + key = Key(KIND, ID, project=self.PROJECT) + result, = client.get_multi([key]) + new_key = result.key + + # Check the returned value is as expected. + self.assertIsNot(new_key, key) + self.assertEqual(new_key.project, self.PROJECT) + self.assertEqual(new_key.path, PATH) + self.assertEqual(list(result), ['foo']) + self.assertEqual(result['foo'], 'Foo') + + def test_get_multi_hit_w_transaction(self): + from google.cloud.datastore.key import Key + + TXN_ID = '123' + KIND = 'Kind' + ID = 1234 + PATH = [{'kind': KIND, 'id': ID}] + + # Make a found entity pb to be returned from mock backend. + entity_pb = _make_entity_pb(self.PROJECT, KIND, ID, 'foo', 'Foo') + + # Make a connection to return the entity pb. + creds = object() + client = self._makeOne(credentials=creds) + client.connection._add_lookup_result([entity_pb]) + + key = Key(KIND, ID, project=self.PROJECT) + txn = client.transaction() + txn._id = TXN_ID + result, = client.get_multi([key], transaction=txn) + new_key = result.key + + # Check the returned value is as expected. + self.assertIsNot(new_key, key) + self.assertEqual(new_key.project, self.PROJECT) + self.assertEqual(new_key.path, PATH) + self.assertEqual(list(result), ['foo']) + self.assertEqual(result['foo'], 'Foo') + + cw = client.connection._lookup_cw + self.assertEqual(len(cw), 1) + _, _, _, transaction_id = cw[0] + self.assertEqual(transaction_id, TXN_ID) + + def test_get_multi_hit_multiple_keys_same_project(self): + from google.cloud.datastore.key import Key + + KIND = 'Kind' + ID1 = 1234 + ID2 = 2345 + + # Make a found entity pb to be returned from mock backend. + entity_pb1 = _make_entity_pb(self.PROJECT, KIND, ID1) + entity_pb2 = _make_entity_pb(self.PROJECT, KIND, ID2) + + # Make a connection to return the entity pbs. + creds = object() + client = self._makeOne(credentials=creds) + client.connection._add_lookup_result([entity_pb1, entity_pb2]) + + key1 = Key(KIND, ID1, project=self.PROJECT) + key2 = Key(KIND, ID2, project=self.PROJECT) + retrieved1, retrieved2 = client.get_multi([key1, key2]) + + # Check values match. + self.assertEqual(retrieved1.key.path, key1.path) + self.assertEqual(dict(retrieved1), {}) + self.assertEqual(retrieved2.key.path, key2.path) + self.assertEqual(dict(retrieved2), {}) + + def test_get_multi_hit_multiple_keys_different_project(self): + from google.cloud.datastore.key import Key + + PROJECT1 = 'PROJECT' + PROJECT2 = 'PROJECT-ALT' + + # Make sure our IDs are actually different. + self.assertNotEqual(PROJECT1, PROJECT2) + + key1 = Key('KIND', 1234, project=PROJECT1) + key2 = Key('KIND', 1234, project=PROJECT2) + + creds = object() + client = self._makeOne(credentials=creds) + + with self.assertRaises(ValueError): + client.get_multi([key1, key2]) + + def test_get_multi_max_loops(self): + from google.cloud._testing import _Monkey + from google.cloud.datastore import client as _MUT + from google.cloud.datastore.key import Key + + KIND = 'Kind' + ID = 1234 + + # Make a found entity pb to be returned from mock backend. + entity_pb = _make_entity_pb(self.PROJECT, KIND, ID, 'foo', 'Foo') + + # Make a connection to return the entity pb. + creds = object() + client = self._makeOne(credentials=creds) + client.connection._add_lookup_result([entity_pb]) + + key = Key(KIND, ID, project=self.PROJECT) + deferred = [] + missing = [] + with _Monkey(_MUT, _MAX_LOOPS=-1): + result = client.get_multi([key], missing=missing, + deferred=deferred) + + # Make sure we have no results, even though the connection has been + # set up as in `test_hit` to return a single result. + self.assertEqual(result, []) + self.assertEqual(missing, []) + self.assertEqual(deferred, []) + + def test_put(self): + _called_with = [] + + def _put_multi(*args, **kw): + _called_with.append((args, kw)) + + creds = object() + client = self._makeOne(credentials=creds) + client.put_multi = _put_multi + entity = object() + + client.put(entity) + + self.assertEqual(_called_with[0][0], ()) + self.assertEqual(_called_with[0][1]['entities'], [entity]) + + def test_put_multi_no_entities(self): + creds = object() + client = self._makeOne(credentials=creds) + self.assertIsNone(client.put_multi([])) + + def test_put_multi_w_single_empty_entity(self): + # https://github.com/GoogleCloudPlatform/google-cloud-python/issues/649 + from google.cloud.datastore.entity import Entity + + creds = object() + client = self._makeOne(credentials=creds) + self.assertRaises(ValueError, client.put_multi, Entity()) + + def test_put_multi_no_batch_w_partial_key(self): + from google.cloud.datastore.helpers import _property_tuples + + entity = _Entity(foo=u'bar') + key = entity.key = _Key(self.PROJECT) + key._id = None + + creds = object() + client = self._makeOne(credentials=creds) + client.connection._commit.append([_KeyPB(key)]) + + result = client.put_multi([entity]) + self.assertIsNone(result) + + self.assertEqual(len(client.connection._commit_cw), 1) + (project, + commit_req, transaction_id) = client.connection._commit_cw[0] + self.assertEqual(project, self.PROJECT) + + mutated_entity = _mutated_pb(self, commit_req.mutations, 'insert') + self.assertEqual(mutated_entity.key, key.to_protobuf()) + + prop_list = list(_property_tuples(mutated_entity)) + self.assertTrue(len(prop_list), 1) + name, value_pb = prop_list[0] + self.assertEqual(name, 'foo') + self.assertEqual(value_pb.string_value, u'bar') + + self.assertIsNone(transaction_id) + + def test_put_multi_existing_batch_w_completed_key(self): + from google.cloud.datastore.helpers import _property_tuples + + creds = object() + client = self._makeOne(credentials=creds) + entity = _Entity(foo=u'bar') + key = entity.key = _Key(self.PROJECT) + + with _NoCommitBatch(client) as CURR_BATCH: + result = client.put_multi([entity]) + + self.assertIsNone(result) + mutated_entity = _mutated_pb(self, CURR_BATCH.mutations, 'upsert') + self.assertEqual(mutated_entity.key, key.to_protobuf()) + + prop_list = list(_property_tuples(mutated_entity)) + self.assertTrue(len(prop_list), 1) + name, value_pb = prop_list[0] + self.assertEqual(name, 'foo') + self.assertEqual(value_pb.string_value, u'bar') + + def test_delete(self): + _called_with = [] + + def _delete_multi(*args, **kw): + _called_with.append((args, kw)) + + creds = object() + client = self._makeOne(credentials=creds) + client.delete_multi = _delete_multi + key = object() + + client.delete(key) + + self.assertEqual(_called_with[0][0], ()) + self.assertEqual(_called_with[0][1]['keys'], [key]) + + def test_delete_multi_no_keys(self): + creds = object() + client = self._makeOne(credentials=creds) + result = client.delete_multi([]) + self.assertIsNone(result) + self.assertEqual(len(client.connection._commit_cw), 0) + + def test_delete_multi_no_batch(self): + key = _Key(self.PROJECT) + + creds = object() + client = self._makeOne(credentials=creds) + client.connection._commit.append([]) + + result = client.delete_multi([key]) + self.assertIsNone(result) + self.assertEqual(len(client.connection._commit_cw), 1) + (project, + commit_req, transaction_id) = client.connection._commit_cw[0] + self.assertEqual(project, self.PROJECT) + + mutated_key = _mutated_pb(self, commit_req.mutations, 'delete') + self.assertEqual(mutated_key, key.to_protobuf()) + self.assertIsNone(transaction_id) + + def test_delete_multi_w_existing_batch(self): + creds = object() + client = self._makeOne(credentials=creds) + key = _Key(self.PROJECT) + + with _NoCommitBatch(client) as CURR_BATCH: + result = client.delete_multi([key]) + + self.assertIsNone(result) + mutated_key = _mutated_pb(self, CURR_BATCH.mutations, 'delete') + self.assertEqual(mutated_key, key._key) + self.assertEqual(len(client.connection._commit_cw), 0) + + def test_delete_multi_w_existing_transaction(self): + creds = object() + client = self._makeOne(credentials=creds) + key = _Key(self.PROJECT) + + with _NoCommitTransaction(client) as CURR_XACT: + result = client.delete_multi([key]) + + self.assertIsNone(result) + mutated_key = _mutated_pb(self, CURR_XACT.mutations, 'delete') + self.assertEqual(mutated_key, key._key) + self.assertEqual(len(client.connection._commit_cw), 0) + + def test_allocate_ids_w_partial_key(self): + NUM_IDS = 2 + + INCOMPLETE_KEY = _Key(self.PROJECT) + INCOMPLETE_KEY._id = None + + creds = object() + client = self._makeOne(credentials=creds) + + result = client.allocate_ids(INCOMPLETE_KEY, NUM_IDS) + + # Check the IDs returned. + self.assertEqual([key._id for key in result], list(range(NUM_IDS))) + + def test_allocate_ids_with_completed_key(self): + creds = object() + client = self._makeOne(credentials=creds) + + COMPLETE_KEY = _Key(self.PROJECT) + self.assertRaises(ValueError, client.allocate_ids, COMPLETE_KEY, 2) + + def test_key_w_project(self): + KIND = 'KIND' + ID = 1234 + + creds = object() + client = self._makeOne(credentials=creds) + + self.assertRaises(TypeError, + client.key, KIND, ID, project=self.PROJECT) + + def test_key_wo_project(self): + from google.cloud.datastore import client as MUT + from google.cloud._testing import _Monkey + + KIND = 'KIND' + ID = 1234 + + creds = object() + client = self._makeOne(credentials=creds) + + with _Monkey(MUT, Key=_Dummy): + key = client.key(KIND, ID) + + self.assertIsInstance(key, _Dummy) + self.assertEqual(key.args, (KIND, ID)) + expected_kwargs = { + 'project': self.PROJECT, + 'namespace': None, + } + self.assertEqual(key.kwargs, expected_kwargs) + + def test_key_w_namespace(self): + from google.cloud.datastore import client as MUT + from google.cloud._testing import _Monkey + + KIND = 'KIND' + ID = 1234 + NAMESPACE = object() + + creds = object() + client = self._makeOne(namespace=NAMESPACE, credentials=creds) + + with _Monkey(MUT, Key=_Dummy): + key = client.key(KIND, ID) + + self.assertIsInstance(key, _Dummy) + expected_kwargs = { + 'project': self.PROJECT, + 'namespace': NAMESPACE, + } + self.assertEqual(key.kwargs, expected_kwargs) + + def test_key_w_namespace_collision(self): + from google.cloud.datastore import client as MUT + from google.cloud._testing import _Monkey + + KIND = 'KIND' + ID = 1234 + NAMESPACE1 = object() + NAMESPACE2 = object() + + creds = object() + client = self._makeOne(namespace=NAMESPACE1, credentials=creds) + + with _Monkey(MUT, Key=_Dummy): + key = client.key(KIND, ID, namespace=NAMESPACE2) + + self.assertIsInstance(key, _Dummy) + expected_kwargs = { + 'project': self.PROJECT, + 'namespace': NAMESPACE2, + } + self.assertEqual(key.kwargs, expected_kwargs) + + def test_batch(self): + from google.cloud.datastore import client as MUT + from google.cloud._testing import _Monkey + + creds = object() + client = self._makeOne(credentials=creds) + + with _Monkey(MUT, Batch=_Dummy): + batch = client.batch() + + self.assertIsInstance(batch, _Dummy) + self.assertEqual(batch.args, (client,)) + self.assertEqual(batch.kwargs, {}) + + def test_transaction_defaults(self): + from google.cloud.datastore import client as MUT + from google.cloud._testing import _Monkey + + creds = object() + client = self._makeOne(credentials=creds) + + with _Monkey(MUT, Transaction=_Dummy): + xact = client.transaction() + + self.assertIsInstance(xact, _Dummy) + self.assertEqual(xact.args, (client,)) + self.assertEqual(xact.kwargs, {}) + + def test_query_w_client(self): + KIND = 'KIND' + + creds = object() + client = self._makeOne(credentials=creds) + other = self._makeOne(credentials=object()) + + self.assertRaises(TypeError, client.query, kind=KIND, client=other) + + def test_query_w_project(self): + KIND = 'KIND' + + creds = object() + client = self._makeOne(credentials=creds) + + self.assertRaises(TypeError, + client.query, kind=KIND, project=self.PROJECT) + + def test_query_w_defaults(self): + from google.cloud.datastore import client as MUT + from google.cloud._testing import _Monkey + + creds = object() + client = self._makeOne(credentials=creds) + + with _Monkey(MUT, Query=_Dummy): + query = client.query() + + self.assertIsInstance(query, _Dummy) + self.assertEqual(query.args, (client,)) + expected_kwargs = { + 'project': self.PROJECT, + 'namespace': None, + } + self.assertEqual(query.kwargs, expected_kwargs) + + def test_query_explicit(self): + from google.cloud.datastore import client as MUT + from google.cloud._testing import _Monkey + + KIND = 'KIND' + NAMESPACE = 'NAMESPACE' + ANCESTOR = object() + FILTERS = [('PROPERTY', '==', 'VALUE')] + PROJECTION = ['__key__'] + ORDER = ['PROPERTY'] + DISTINCT_ON = ['DISTINCT_ON'] + + creds = object() + client = self._makeOne(credentials=creds) + + with _Monkey(MUT, Query=_Dummy): + query = client.query( + kind=KIND, + namespace=NAMESPACE, + ancestor=ANCESTOR, + filters=FILTERS, + projection=PROJECTION, + order=ORDER, + distinct_on=DISTINCT_ON, + ) + + self.assertIsInstance(query, _Dummy) + self.assertEqual(query.args, (client,)) + kwargs = { + 'project': self.PROJECT, + 'kind': KIND, + 'namespace': NAMESPACE, + 'ancestor': ANCESTOR, + 'filters': FILTERS, + 'projection': PROJECTION, + 'order': ORDER, + 'distinct_on': DISTINCT_ON, + } + self.assertEqual(query.kwargs, kwargs) + + def test_query_w_namespace(self): + from google.cloud.datastore import client as MUT + from google.cloud._testing import _Monkey + + KIND = 'KIND' + NAMESPACE = object() + + creds = object() + client = self._makeOne(namespace=NAMESPACE, credentials=creds) + + with _Monkey(MUT, Query=_Dummy): + query = client.query(kind=KIND) + + self.assertIsInstance(query, _Dummy) + self.assertEqual(query.args, (client,)) + expected_kwargs = { + 'project': self.PROJECT, + 'namespace': NAMESPACE, + 'kind': KIND, + } + self.assertEqual(query.kwargs, expected_kwargs) + + def test_query_w_namespace_collision(self): + from google.cloud.datastore import client as MUT + from google.cloud._testing import _Monkey + + KIND = 'KIND' + NAMESPACE1 = object() + NAMESPACE2 = object() + + creds = object() + client = self._makeOne(namespace=NAMESPACE1, credentials=creds) + + with _Monkey(MUT, Query=_Dummy): + query = client.query(kind=KIND, namespace=NAMESPACE2) + + self.assertIsInstance(query, _Dummy) + self.assertEqual(query.args, (client,)) + expected_kwargs = { + 'project': self.PROJECT, + 'namespace': NAMESPACE2, + 'kind': KIND, + } + self.assertEqual(query.kwargs, expected_kwargs) + + +class _Dummy(object): + + def __init__(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + + +class _MockConnection(object): + + def __init__(self, credentials=None, http=None): + self.credentials = credentials + self.http = http + self._lookup_cw = [] + self._lookup = [] + self._commit_cw = [] + self._commit = [] + self._alloc_cw = [] + self._alloc = [] + self._index_updates = 0 + + def _add_lookup_result(self, results=(), missing=(), deferred=()): + self._lookup.append((list(results), list(missing), list(deferred))) + + def lookup(self, project, key_pbs, eventual=False, transaction_id=None): + self._lookup_cw.append((project, key_pbs, eventual, transaction_id)) + triple, self._lookup = self._lookup[0], self._lookup[1:] + results, missing, deferred = triple + return results, missing, deferred + + def commit(self, project, commit_request, transaction_id): + self._commit_cw.append((project, commit_request, transaction_id)) + response, self._commit = self._commit[0], self._commit[1:] + return self._index_updates, response + + def allocate_ids(self, project, key_pbs): + self._alloc_cw.append((project, key_pbs)) + num_pbs = len(key_pbs) + return [_KeyPB(i) for i in list(range(num_pbs))] + + +class _NoCommitBatch(object): + + def __init__(self, client): + from google.cloud.datastore.batch import Batch + self._client = client + self._batch = Batch(client) + self._batch.begin() + + def __enter__(self): + self._client._push_batch(self._batch) + return self._batch + + def __exit__(self, *args): + self._client._pop_batch() + + +class _NoCommitTransaction(object): + + def __init__(self, client, transaction_id='TRANSACTION'): + from google.cloud.datastore.batch import Batch + from google.cloud.datastore.transaction import Transaction + self._client = client + xact = self._transaction = Transaction(client) + xact._id = transaction_id + Batch.begin(xact) + + def __enter__(self): + self._client._push_batch(self._transaction) + return self._transaction + + def __exit__(self, *args): + self._client._pop_batch() + + +class _Entity(dict): + key = None + exclude_from_indexes = () + _meanings = {} + + +class _Key(object): + _MARKER = object() + _kind = 'KIND' + _key = 'KEY' + _path = None + _id = 1234 + _stored = None + + def __init__(self, project): + self.project = project + + @property + def is_partial(self): + return self._id is None + + def to_protobuf(self): + from google.cloud.datastore._generated import entity_pb2 + key = self._key = entity_pb2.Key() + # Don't assign it, because it will just get ripped out + # key.partition_id.project_id = self.project + + element = key.path.add() + element.kind = self._kind + if self._id is not None: + element.id = self._id + + return key + + def completed_key(self, new_id): + assert self.is_partial + new_key = self.__class__(self.project) + new_key._id = new_id + return new_key + + +class _PathElementPB(object): + + def __init__(self, id_): + self.id = id_ + + +class _KeyPB(object): + + def __init__(self, id_): + self.path = [_PathElementPB(id_)] + + +def _assert_num_mutations(test_case, mutation_pb_list, num_mutations): + test_case.assertEqual(len(mutation_pb_list), num_mutations) + + +def _mutated_pb(test_case, mutation_pb_list, mutation_type): + # Make sure there is only one mutation. + _assert_num_mutations(test_case, mutation_pb_list, 1) + + # We grab the only mutation. + mutated_pb = mutation_pb_list[0] + # Then check if it is the correct type. + test_case.assertEqual(mutated_pb.WhichOneof('operation'), + mutation_type) + + return getattr(mutated_pb, mutation_type) diff --git a/packages/google-cloud-datastore/unit_tests/test_connection.py b/packages/google-cloud-datastore/unit_tests/test_connection.py new file mode 100644 index 000000000000..f1030a61c948 --- /dev/null +++ b/packages/google-cloud-datastore/unit_tests/test_connection.py @@ -0,0 +1,1140 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +from google.cloud.datastore.connection import _HAVE_GRPC + + +class Test_DatastoreAPIOverHttp(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.datastore.connection import _DatastoreAPIOverHttp + return _DatastoreAPIOverHttp + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test__rpc(self): + class ReqPB(object): + + def SerializeToString(self): + return REQPB + + class RspPB(object): + + def __init__(self, pb): + self._pb = pb + + @classmethod + def FromString(cls, pb): + return cls(pb) + + REQPB = b'REQPB' + PROJECT = 'PROJECT' + METHOD = 'METHOD' + URI = 'http://api-url' + conn = _Connection(URI) + datastore_api = self._makeOne(conn) + http = conn.http = Http({'status': '200'}, 'CONTENT') + response = datastore_api._rpc(PROJECT, METHOD, ReqPB(), RspPB) + self.assertIsInstance(response, RspPB) + self.assertEqual(response._pb, 'CONTENT') + called_with = http._called_with + self.assertEqual(called_with['uri'], URI) + self.assertEqual(called_with['method'], 'POST') + self.assertEqual(called_with['headers']['Content-Type'], + 'application/x-protobuf') + self.assertEqual(called_with['headers']['User-Agent'], + conn.USER_AGENT) + self.assertEqual(called_with['body'], REQPB) + self.assertEqual(conn.build_kwargs, + [{'method': METHOD, 'project': PROJECT}]) + + def test__request_w_200(self): + PROJECT = 'PROJECT' + METHOD = 'METHOD' + DATA = b'DATA' + URI = 'http://api-url' + conn = _Connection(URI) + datastore_api = self._makeOne(conn) + http = conn.http = Http({'status': '200'}, 'CONTENT') + self.assertEqual(datastore_api._request(PROJECT, METHOD, DATA), + 'CONTENT') + called_with = http._called_with + self.assertEqual(called_with['uri'], URI) + self.assertEqual(called_with['method'], 'POST') + self.assertEqual(called_with['headers']['Content-Type'], + 'application/x-protobuf') + self.assertEqual(called_with['headers']['User-Agent'], + conn.USER_AGENT) + self.assertEqual(called_with['body'], DATA) + self.assertEqual(conn.build_kwargs, + [{'method': METHOD, 'project': PROJECT}]) + + def test__request_not_200(self): + from google.cloud.exceptions import BadRequest + from google.rpc import status_pb2 + + error = status_pb2.Status() + error.message = 'Entity value is indexed.' + error.code = 9 # FAILED_PRECONDITION + + PROJECT = 'PROJECT' + METHOD = 'METHOD' + DATA = 'DATA' + URI = 'http://api-url' + conn = _Connection(URI) + datastore_api = self._makeOne(conn) + conn.http = Http({'status': '400'}, error.SerializeToString()) + with self.assertRaises(BadRequest) as exc: + datastore_api._request(PROJECT, METHOD, DATA) + expected_message = '400 Entity value is indexed.' + self.assertEqual(str(exc.exception), expected_message) + self.assertEqual(conn.build_kwargs, + [{'method': METHOD, 'project': PROJECT}]) + + +class Test_DatastoreAPIOverGRPC(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.datastore.connection import _DatastoreAPIOverGRPC + return _DatastoreAPIOverGRPC + + def _makeOne(self, stub, connection=None, secure=True, mock_args=None): + from google.cloud._testing import _Monkey + from google.cloud.datastore import connection as MUT + + if connection is None: + connection = _Connection(None) + connection.credentials = object() + connection.host = 'CURR_HOST' + + if mock_args is None: + mock_args = [] + + def mock_make_stub(*args): + mock_args.append(args) + return stub + + if secure: + to_monkey = {'make_secure_stub': mock_make_stub} + else: + to_monkey = {'make_insecure_stub': mock_make_stub} + with _Monkey(MUT, **to_monkey): + return self._getTargetClass()(connection, secure) + + def test_constructor(self): + from google.cloud.datastore import connection as MUT + + conn = _Connection(None) + conn.credentials = object() + conn.host = 'CURR_HOST' + + stub = _GRPCStub() + mock_args = [] + datastore_api = self._makeOne(stub, connection=conn, + mock_args=mock_args) + self.assertIs(datastore_api._stub, stub) + + self.assertEqual(mock_args, [( + conn.credentials, + conn.USER_AGENT, + MUT.datastore_grpc_pb2.DatastoreStub, + conn.host, + )]) + + def test_constructor_insecure(self): + from google.cloud.datastore import connection as MUT + + conn = _Connection(None) + conn.credentials = object() + conn.host = 'CURR_HOST:1234' + + stub = _GRPCStub() + mock_args = [] + datastore_api = self._makeOne(stub, connection=conn, + secure=False, + mock_args=mock_args) + self.assertIs(datastore_api._stub, stub) + + self.assertEqual(mock_args, [( + MUT.datastore_grpc_pb2.DatastoreStub, + conn.host, + )]) + + def test_lookup(self): + return_val = object() + stub = _GRPCStub(return_val) + datastore_api = self._makeOne(stub=stub) + + request_pb = _RequestPB() + project = 'PROJECT' + result = datastore_api.lookup(project, request_pb) + self.assertIs(result, return_val) + self.assertEqual(request_pb.project_id, project) + self.assertEqual(stub.method_calls, + [(request_pb, 'Lookup')]) + + def test_run_query(self): + return_val = object() + stub = _GRPCStub(return_val) + datastore_api = self._makeOne(stub=stub) + + request_pb = _RequestPB() + project = 'PROJECT' + result = datastore_api.run_query(project, request_pb) + self.assertIs(result, return_val) + self.assertEqual(request_pb.project_id, project) + self.assertEqual(stub.method_calls, + [(request_pb, 'RunQuery')]) + + def test_begin_transaction(self): + return_val = object() + stub = _GRPCStub(return_val) + datastore_api = self._makeOne(stub=stub) + + request_pb = _RequestPB() + project = 'PROJECT' + result = datastore_api.begin_transaction(project, request_pb) + self.assertIs(result, return_val) + self.assertEqual(request_pb.project_id, project) + self.assertEqual( + stub.method_calls, + [(request_pb, 'BeginTransaction')]) + + def test_commit_success(self): + return_val = object() + stub = _GRPCStub(return_val) + datastore_api = self._makeOne(stub=stub) + + request_pb = _RequestPB() + project = 'PROJECT' + result = datastore_api.commit(project, request_pb) + self.assertIs(result, return_val) + self.assertEqual(request_pb.project_id, project) + self.assertEqual(stub.method_calls, + [(request_pb, 'Commit')]) + + def _commit_failure_helper(self, exc, err_class): + stub = _GRPCStub(side_effect=exc) + datastore_api = self._makeOne(stub=stub) + + request_pb = _RequestPB() + project = 'PROJECT' + with self.assertRaises(err_class): + datastore_api.commit(project, request_pb) + + self.assertEqual(request_pb.project_id, project) + self.assertEqual(stub.method_calls, + [(request_pb, 'Commit')]) + + @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') + def test_commit_failure_aborted(self): + from grpc import StatusCode + from grpc._channel import _RPCState + from google.cloud.exceptions import Conflict + from google.cloud.exceptions import GrpcRendezvous + + details = 'Bad things.' + exc_state = _RPCState((), None, None, StatusCode.ABORTED, details) + exc = GrpcRendezvous(exc_state, None, None, None) + self._commit_failure_helper(exc, Conflict) + + @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') + def test_commit_failure_cancelled(self): + from grpc import StatusCode + from grpc._channel import _RPCState + from google.cloud.exceptions import GrpcRendezvous + + exc_state = _RPCState((), None, None, StatusCode.CANCELLED, None) + exc = GrpcRendezvous(exc_state, None, None, None) + self._commit_failure_helper(exc, GrpcRendezvous) + + @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') + def test_commit_failure_non_grpc_err(self): + exc = RuntimeError('Not a gRPC error') + self._commit_failure_helper(exc, RuntimeError) + + def test_rollback(self): + return_val = object() + stub = _GRPCStub(return_val) + datastore_api = self._makeOne(stub=stub) + + request_pb = _RequestPB() + project = 'PROJECT' + result = datastore_api.rollback(project, request_pb) + self.assertIs(result, return_val) + self.assertEqual(request_pb.project_id, project) + self.assertEqual(stub.method_calls, + [(request_pb, 'Rollback')]) + + def test_allocate_ids(self): + return_val = object() + stub = _GRPCStub(return_val) + datastore_api = self._makeOne(stub=stub) + + request_pb = _RequestPB() + project = 'PROJECT' + result = datastore_api.allocate_ids(project, request_pb) + self.assertIs(result, return_val) + self.assertEqual(request_pb.project_id, project) + self.assertEqual( + stub.method_calls, + [(request_pb, 'AllocateIds')]) + + +class TestConnection(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.datastore.connection import Connection + + return Connection + + def _make_key_pb(self, project, id_=1234): + from google.cloud.datastore.key import Key + path_args = ('Kind',) + if id_ is not None: + path_args += (id_,) + return Key(*path_args, project=project).to_protobuf() + + def _make_query_pb(self, kind): + from google.cloud.datastore._generated import query_pb2 + pb = query_pb2.Query() + pb.kind.add().name = kind + return pb + + def _makeOne(self, credentials=None, http=None, use_grpc=False): + from google.cloud._testing import _Monkey + from google.cloud.datastore import connection as MUT + with _Monkey(MUT, _USE_GRPC=use_grpc): + return self._getTargetClass()(credentials=credentials, http=http) + + def _verifyProtobufCall(self, called_with, URI, conn): + self.assertEqual(called_with['uri'], URI) + self.assertEqual(called_with['method'], 'POST') + self.assertEqual(called_with['headers']['Content-Type'], + 'application/x-protobuf') + self.assertEqual(called_with['headers']['User-Agent'], + conn.USER_AGENT) + + def test_default_url(self): + klass = self._getTargetClass() + conn = self._makeOne() + self.assertEqual(conn.api_base_url, klass.API_BASE_URL) + + def test_custom_url_from_env(self): + import os + from google.cloud._testing import _Monkey + from google.cloud.connection import API_BASE_URL + from google.cloud.environment_vars import GCD_HOST + + HOST = 'CURR_HOST' + fake_environ = {GCD_HOST: HOST} + + with _Monkey(os, environ=fake_environ): + conn = self._makeOne() + + self.assertNotEqual(conn.api_base_url, API_BASE_URL) + self.assertEqual(conn.api_base_url, 'http://' + HOST) + + def test_ctor_defaults(self): + conn = self._makeOne() + self.assertIsNone(conn.credentials) + + def test_ctor_without_grpc(self): + from google.cloud._testing import _Monkey + from google.cloud.datastore import connection as MUT + + connections = [] + return_val = object() + + def mock_api(connection): + connections.append(connection) + return return_val + + with _Monkey(MUT, _DatastoreAPIOverHttp=mock_api): + conn = self._makeOne(use_grpc=False) + + self.assertIsNone(conn.credentials) + self.assertIs(conn._datastore_api, return_val) + self.assertEqual(connections, [conn]) + + def test_ctor_with_grpc(self): + from google.cloud._testing import _Monkey + from google.cloud.datastore import connection as MUT + + api_args = [] + return_val = object() + + def mock_api(connection, secure): + api_args.append((connection, secure)) + return return_val + + with _Monkey(MUT, _DatastoreAPIOverGRPC=mock_api): + conn = self._makeOne(use_grpc=True) + + self.assertIsNone(conn.credentials) + self.assertIs(conn._datastore_api, return_val) + self.assertEqual(api_args, [(conn, True)]) + + def test_ctor_explicit(self): + class Creds(object): + + def create_scoped_required(self): + return False + + creds = Creds() + conn = self._makeOne(creds) + self.assertIs(conn.credentials, creds) + + def test_http_w_existing(self): + conn = self._makeOne() + conn._http = http = object() + self.assertIs(conn.http, http) + + def test_http_wo_creds(self): + import httplib2 + + conn = self._makeOne() + self.assertIsInstance(conn.http, httplib2.Http) + + def test_http_w_creds(self): + import httplib2 + + authorized = object() + + class Creds(object): + + def authorize(self, http): + self._called_with = http + return authorized + + def create_scoped_required(self): + return False + + creds = Creds() + conn = self._makeOne(creds) + self.assertIs(conn.http, authorized) + self.assertIsInstance(creds._called_with, httplib2.Http) + + def test_build_api_url_w_default_base_version(self): + PROJECT = 'PROJECT' + METHOD = 'METHOD' + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':' + METHOD, + ]) + self.assertEqual(conn.build_api_url(PROJECT, METHOD), URI) + + def test_build_api_url_w_explicit_base_version(self): + BASE = 'http://example.com/' + VER = '3.1415926' + PROJECT = 'PROJECT' + METHOD = 'METHOD' + conn = self._makeOne() + URI = '/'.join([ + BASE, + VER, + 'projects', + PROJECT + ':' + METHOD, + ]) + self.assertEqual(conn.build_api_url(PROJECT, METHOD, BASE, VER), + URI) + + def test_lookup_single_key_empty_response(self): + from google.cloud.datastore._generated import datastore_pb2 + + PROJECT = 'PROJECT' + key_pb = self._make_key_pb(PROJECT) + rsp_pb = datastore_pb2.LookupResponse() + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':lookup', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + found, missing, deferred = conn.lookup(PROJECT, [key_pb]) + self.assertEqual(len(found), 0) + self.assertEqual(len(missing), 0) + self.assertEqual(len(deferred), 0) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.LookupRequest + request = rq_class() + request.ParseFromString(cw['body']) + keys = list(request.keys) + self.assertEqual(len(keys), 1) + self.assertEqual(key_pb, keys[0]) + + def test_lookup_single_key_empty_response_w_eventual(self): + from google.cloud.datastore._generated import datastore_pb2 + + PROJECT = 'PROJECT' + key_pb = self._make_key_pb(PROJECT) + rsp_pb = datastore_pb2.LookupResponse() + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':lookup', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + found, missing, deferred = conn.lookup(PROJECT, [key_pb], + eventual=True) + self.assertEqual(len(found), 0) + self.assertEqual(len(missing), 0) + self.assertEqual(len(deferred), 0) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.LookupRequest + request = rq_class() + request.ParseFromString(cw['body']) + keys = list(request.keys) + self.assertEqual(len(keys), 1) + self.assertEqual(key_pb, keys[0]) + self.assertEqual(request.read_options.read_consistency, + datastore_pb2.ReadOptions.EVENTUAL) + self.assertEqual(request.read_options.transaction, b'') + + def test_lookup_single_key_empty_response_w_eventual_and_transaction(self): + PROJECT = 'PROJECT' + TRANSACTION = b'TRANSACTION' + key_pb = self._make_key_pb(PROJECT) + conn = self._makeOne() + self.assertRaises(ValueError, conn.lookup, PROJECT, key_pb, + eventual=True, transaction_id=TRANSACTION) + + def test_lookup_single_key_empty_response_w_transaction(self): + from google.cloud.datastore._generated import datastore_pb2 + + PROJECT = 'PROJECT' + TRANSACTION = b'TRANSACTION' + key_pb = self._make_key_pb(PROJECT) + rsp_pb = datastore_pb2.LookupResponse() + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':lookup', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + found, missing, deferred = conn.lookup(PROJECT, [key_pb], + transaction_id=TRANSACTION) + self.assertEqual(len(found), 0) + self.assertEqual(len(missing), 0) + self.assertEqual(len(deferred), 0) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.LookupRequest + request = rq_class() + request.ParseFromString(cw['body']) + keys = list(request.keys) + self.assertEqual(len(keys), 1) + self.assertEqual(key_pb, keys[0]) + self.assertEqual(request.read_options.transaction, TRANSACTION) + + def test_lookup_single_key_nonempty_response(self): + from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.datastore._generated import entity_pb2 + + PROJECT = 'PROJECT' + key_pb = self._make_key_pb(PROJECT) + rsp_pb = datastore_pb2.LookupResponse() + entity = entity_pb2.Entity() + entity.key.CopyFrom(key_pb) + rsp_pb.found.add(entity=entity) + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':lookup', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + (found,), missing, deferred = conn.lookup(PROJECT, [key_pb]) + self.assertEqual(len(missing), 0) + self.assertEqual(len(deferred), 0) + self.assertEqual(found.key.path[0].kind, 'Kind') + self.assertEqual(found.key.path[0].id, 1234) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.LookupRequest + request = rq_class() + request.ParseFromString(cw['body']) + keys = list(request.keys) + self.assertEqual(len(keys), 1) + self.assertEqual(key_pb, keys[0]) + + def test_lookup_multiple_keys_empty_response(self): + from google.cloud.datastore._generated import datastore_pb2 + + PROJECT = 'PROJECT' + key_pb1 = self._make_key_pb(PROJECT) + key_pb2 = self._make_key_pb(PROJECT, id_=2345) + rsp_pb = datastore_pb2.LookupResponse() + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':lookup', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + found, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2]) + self.assertEqual(len(found), 0) + self.assertEqual(len(missing), 0) + self.assertEqual(len(deferred), 0) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.LookupRequest + request = rq_class() + request.ParseFromString(cw['body']) + keys = list(request.keys) + self.assertEqual(len(keys), 2) + self.assertEqual(key_pb1, keys[0]) + self.assertEqual(key_pb2, keys[1]) + + def test_lookup_multiple_keys_w_missing(self): + from google.cloud.datastore._generated import datastore_pb2 + + PROJECT = 'PROJECT' + key_pb1 = self._make_key_pb(PROJECT) + key_pb2 = self._make_key_pb(PROJECT, id_=2345) + rsp_pb = datastore_pb2.LookupResponse() + er_1 = rsp_pb.missing.add() + er_1.entity.key.CopyFrom(key_pb1) + er_2 = rsp_pb.missing.add() + er_2.entity.key.CopyFrom(key_pb2) + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':lookup', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + result, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2]) + self.assertEqual(result, []) + self.assertEqual(len(deferred), 0) + self.assertEqual([missed.key for missed in missing], + [key_pb1, key_pb2]) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.LookupRequest + request = rq_class() + request.ParseFromString(cw['body']) + keys = list(request.keys) + self.assertEqual(len(keys), 2) + self.assertEqual(key_pb1, keys[0]) + self.assertEqual(key_pb2, keys[1]) + + def test_lookup_multiple_keys_w_deferred(self): + from google.cloud.datastore._generated import datastore_pb2 + + PROJECT = 'PROJECT' + key_pb1 = self._make_key_pb(PROJECT) + key_pb2 = self._make_key_pb(PROJECT, id_=2345) + rsp_pb = datastore_pb2.LookupResponse() + rsp_pb.deferred.add().CopyFrom(key_pb1) + rsp_pb.deferred.add().CopyFrom(key_pb2) + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':lookup', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + result, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2]) + self.assertEqual(result, []) + self.assertEqual(len(missing), 0) + self.assertEqual([def_key for def_key in deferred], [key_pb1, key_pb2]) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + self.assertEqual(cw['uri'], URI) + self.assertEqual(cw['method'], 'POST') + self.assertEqual(cw['headers']['Content-Type'], + 'application/x-protobuf') + self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT) + rq_class = datastore_pb2.LookupRequest + request = rq_class() + request.ParseFromString(cw['body']) + keys = list(request.keys) + self.assertEqual(len(keys), 2) + self.assertEqual(key_pb1, keys[0]) + self.assertEqual(key_pb2, keys[1]) + + def test_run_query_w_eventual_no_transaction(self): + from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.datastore._generated import query_pb2 + + PROJECT = 'PROJECT' + KIND = 'Nonesuch' + CURSOR = b'\x00' + q_pb = self._make_query_pb(KIND) + rsp_pb = datastore_pb2.RunQueryResponse() + rsp_pb.batch.end_cursor = CURSOR + no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS + rsp_pb.batch.more_results = no_more + rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':runQuery', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + pbs, end, more, skipped = conn.run_query(PROJECT, q_pb, + eventual=True) + self.assertEqual(pbs, []) + self.assertEqual(end, CURSOR) + self.assertTrue(more) + self.assertEqual(skipped, 0) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.RunQueryRequest + request = rq_class() + request.ParseFromString(cw['body']) + self.assertEqual(request.partition_id.namespace_id, '') + self.assertEqual(request.query, q_pb) + self.assertEqual(request.read_options.read_consistency, + datastore_pb2.ReadOptions.EVENTUAL) + self.assertEqual(request.read_options.transaction, b'') + + def test_run_query_wo_eventual_w_transaction(self): + from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.datastore._generated import query_pb2 + + PROJECT = 'PROJECT' + KIND = 'Nonesuch' + CURSOR = b'\x00' + TRANSACTION = b'TRANSACTION' + q_pb = self._make_query_pb(KIND) + rsp_pb = datastore_pb2.RunQueryResponse() + rsp_pb.batch.end_cursor = CURSOR + no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS + rsp_pb.batch.more_results = no_more + rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':runQuery', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + pbs, end, more, skipped = conn.run_query( + PROJECT, q_pb, transaction_id=TRANSACTION) + self.assertEqual(pbs, []) + self.assertEqual(end, CURSOR) + self.assertTrue(more) + self.assertEqual(skipped, 0) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.RunQueryRequest + request = rq_class() + request.ParseFromString(cw['body']) + self.assertEqual(request.partition_id.namespace_id, '') + self.assertEqual(request.query, q_pb) + self.assertEqual( + request.read_options.read_consistency, + datastore_pb2.ReadOptions.READ_CONSISTENCY_UNSPECIFIED) + self.assertEqual(request.read_options.transaction, TRANSACTION) + + def test_run_query_w_eventual_and_transaction(self): + from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.datastore._generated import query_pb2 + + PROJECT = 'PROJECT' + KIND = 'Nonesuch' + CURSOR = b'\x00' + TRANSACTION = b'TRANSACTION' + q_pb = self._make_query_pb(KIND) + rsp_pb = datastore_pb2.RunQueryResponse() + rsp_pb.batch.end_cursor = CURSOR + no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS + rsp_pb.batch.more_results = no_more + rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL + conn = self._makeOne() + self.assertRaises(ValueError, conn.run_query, PROJECT, q_pb, + eventual=True, transaction_id=TRANSACTION) + + def test_run_query_wo_namespace_empty_result(self): + from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.datastore._generated import query_pb2 + + PROJECT = 'PROJECT' + KIND = 'Nonesuch' + CURSOR = b'\x00' + q_pb = self._make_query_pb(KIND) + rsp_pb = datastore_pb2.RunQueryResponse() + rsp_pb.batch.end_cursor = CURSOR + no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS + rsp_pb.batch.more_results = no_more + rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':runQuery', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + pbs, end, more, skipped = conn.run_query(PROJECT, q_pb) + self.assertEqual(pbs, []) + self.assertEqual(end, CURSOR) + self.assertTrue(more) + self.assertEqual(skipped, 0) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.RunQueryRequest + request = rq_class() + request.ParseFromString(cw['body']) + self.assertEqual(request.partition_id.namespace_id, '') + self.assertEqual(request.query, q_pb) + + def test_run_query_w_namespace_nonempty_result(self): + from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.datastore._generated import entity_pb2 + + PROJECT = 'PROJECT' + KIND = 'Kind' + entity = entity_pb2.Entity() + q_pb = self._make_query_pb(KIND) + rsp_pb = datastore_pb2.RunQueryResponse() + rsp_pb.batch.entity_results.add(entity=entity) + rsp_pb.batch.entity_result_type = 1 # FULL + rsp_pb.batch.more_results = 3 # NO_MORE_RESULTS + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':runQuery', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + pbs = conn.run_query(PROJECT, q_pb, 'NS')[0] + self.assertEqual(len(pbs), 1) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.RunQueryRequest + request = rq_class() + request.ParseFromString(cw['body']) + self.assertEqual(request.partition_id.namespace_id, 'NS') + self.assertEqual(request.query, q_pb) + + def test_begin_transaction(self): + from google.cloud.datastore._generated import datastore_pb2 + + PROJECT = 'PROJECT' + TRANSACTION = b'TRANSACTION' + rsp_pb = datastore_pb2.BeginTransactionResponse() + rsp_pb.transaction = TRANSACTION + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':beginTransaction', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + self.assertEqual(conn.begin_transaction(PROJECT), TRANSACTION) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.BeginTransactionRequest + request = rq_class() + request.ParseFromString(cw['body']) + + def test_commit_wo_transaction(self): + from google.cloud._testing import _Monkey + from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.datastore import connection as MUT + from google.cloud.datastore.helpers import _new_value_pb + + PROJECT = 'PROJECT' + key_pb = self._make_key_pb(PROJECT) + rsp_pb = datastore_pb2.CommitResponse() + req_pb = datastore_pb2.CommitRequest() + mutation = req_pb.mutations.add() + insert = mutation.upsert + insert.key.CopyFrom(key_pb) + value_pb = _new_value_pb(insert, 'foo') + value_pb.string_value = u'Foo' + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':commit', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + + # Set up mock for parsing the response. + expected_result = object() + _parsed = [] + + def mock_parse(response): + _parsed.append(response) + return expected_result + + with _Monkey(MUT, _parse_commit_response=mock_parse): + result = conn.commit(PROJECT, req_pb, None) + + self.assertIs(result, expected_result) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.CommitRequest + request = rq_class() + request.ParseFromString(cw['body']) + self.assertEqual(request.transaction, b'') + self.assertEqual(list(request.mutations), [mutation]) + self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL) + self.assertEqual(_parsed, [rsp_pb]) + + def test_commit_w_transaction(self): + from google.cloud._testing import _Monkey + from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.datastore import connection as MUT + from google.cloud.datastore.helpers import _new_value_pb + + PROJECT = 'PROJECT' + key_pb = self._make_key_pb(PROJECT) + rsp_pb = datastore_pb2.CommitResponse() + req_pb = datastore_pb2.CommitRequest() + mutation = req_pb.mutations.add() + insert = mutation.upsert + insert.key.CopyFrom(key_pb) + value_pb = _new_value_pb(insert, 'foo') + value_pb.string_value = u'Foo' + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':commit', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + + # Set up mock for parsing the response. + expected_result = object() + _parsed = [] + + def mock_parse(response): + _parsed.append(response) + return expected_result + + with _Monkey(MUT, _parse_commit_response=mock_parse): + result = conn.commit(PROJECT, req_pb, b'xact') + + self.assertIs(result, expected_result) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.CommitRequest + request = rq_class() + request.ParseFromString(cw['body']) + self.assertEqual(request.transaction, b'xact') + self.assertEqual(list(request.mutations), [mutation]) + self.assertEqual(request.mode, rq_class.TRANSACTIONAL) + self.assertEqual(_parsed, [rsp_pb]) + + def test_rollback_ok(self): + from google.cloud.datastore._generated import datastore_pb2 + PROJECT = 'PROJECT' + TRANSACTION = b'xact' + + rsp_pb = datastore_pb2.RollbackResponse() + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':rollback', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + self.assertIsNone(conn.rollback(PROJECT, TRANSACTION)) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.RollbackRequest + request = rq_class() + request.ParseFromString(cw['body']) + self.assertEqual(request.transaction, TRANSACTION) + + def test_allocate_ids_empty(self): + from google.cloud.datastore._generated import datastore_pb2 + + PROJECT = 'PROJECT' + rsp_pb = datastore_pb2.AllocateIdsResponse() + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':allocateIds', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + self.assertEqual(conn.allocate_ids(PROJECT, []), []) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.AllocateIdsRequest + request = rq_class() + request.ParseFromString(cw['body']) + self.assertEqual(list(request.keys), []) + + def test_allocate_ids_non_empty(self): + from google.cloud.datastore._generated import datastore_pb2 + + PROJECT = 'PROJECT' + before_key_pbs = [ + self._make_key_pb(PROJECT, id_=None), + self._make_key_pb(PROJECT, id_=None), + ] + after_key_pbs = [ + self._make_key_pb(PROJECT), + self._make_key_pb(PROJECT, id_=2345), + ] + rsp_pb = datastore_pb2.AllocateIdsResponse() + rsp_pb.keys.add().CopyFrom(after_key_pbs[0]) + rsp_pb.keys.add().CopyFrom(after_key_pbs[1]) + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':allocateIds', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + self.assertEqual(conn.allocate_ids(PROJECT, before_key_pbs), + after_key_pbs) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.AllocateIdsRequest + request = rq_class() + request.ParseFromString(cw['body']) + self.assertEqual(len(request.keys), len(before_key_pbs)) + for key_before, key_after in zip(before_key_pbs, request.keys): + self.assertEqual(key_before, key_after) + + +class Test__parse_commit_response(unittest.TestCase): + + def _callFUT(self, commit_response_pb): + from google.cloud.datastore.connection import _parse_commit_response + return _parse_commit_response(commit_response_pb) + + def test_it(self): + from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.datastore._generated import entity_pb2 + + index_updates = 1337 + keys = [ + entity_pb2.Key( + path=[ + entity_pb2.Key.PathElement( + kind='Foo', + id=1234, + ), + ], + ), + entity_pb2.Key( + path=[ + entity_pb2.Key.PathElement( + kind='Bar', + name='baz', + ), + ], + ), + ] + response = datastore_pb2.CommitResponse( + mutation_results=[ + datastore_pb2.MutationResult(key=key) for key in keys + ], + index_updates=index_updates, + ) + result = self._callFUT(response) + self.assertEqual(result, (index_updates, keys)) + + +class Http(object): + + _called_with = None + + def __init__(self, headers, content): + from httplib2 import Response + self._response = Response(headers) + self._content = content + + def request(self, **kw): + self._called_with = kw + return self._response, self._content + + +class _Connection(object): + + host = None + USER_AGENT = 'you-sir-age-int' + + def __init__(self, api_url): + self.api_url = api_url + self.build_kwargs = [] + + def build_api_url(self, **kwargs): + self.build_kwargs.append(kwargs) + return self.api_url + + +class _GRPCStub(object): + + def __init__(self, return_val=None, side_effect=Exception): + self.return_val = return_val + self.side_effect = side_effect + self.method_calls = [] + + def _method(self, request_pb, name): + self.method_calls.append((request_pb, name)) + return self.return_val + + def Lookup(self, request_pb): + return self._method(request_pb, 'Lookup') + + def RunQuery(self, request_pb): + return self._method(request_pb, 'RunQuery') + + def BeginTransaction(self, request_pb): + return self._method(request_pb, 'BeginTransaction') + + def Commit(self, request_pb): + result = self._method(request_pb, 'Commit') + if self.side_effect is Exception: + return result + else: + raise self.side_effect + + def Rollback(self, request_pb): + return self._method(request_pb, 'Rollback') + + def AllocateIds(self, request_pb): + return self._method(request_pb, 'AllocateIds') + + +class _RequestPB(object): + project_id = None diff --git a/packages/google-cloud-datastore/unit_tests/test_entity.py b/packages/google-cloud-datastore/unit_tests/test_entity.py new file mode 100644 index 000000000000..3a5674a85cd4 --- /dev/null +++ b/packages/google-cloud-datastore/unit_tests/test_entity.py @@ -0,0 +1,211 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +_PROJECT = 'PROJECT' +_KIND = 'KIND' +_ID = 1234 + + +class TestEntity(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.datastore.entity import Entity + return Entity + + def _makeOne(self, key=None, exclude_from_indexes=()): + klass = self._getTargetClass() + return klass(key=key, exclude_from_indexes=exclude_from_indexes) + + def test_ctor_defaults(self): + klass = self._getTargetClass() + entity = klass() + self.assertIsNone(entity.key) + self.assertIsNone(entity.kind) + self.assertEqual(sorted(entity.exclude_from_indexes), []) + + def test_ctor_explicit(self): + _EXCLUDE_FROM_INDEXES = ['foo', 'bar'] + key = _Key() + entity = self._makeOne( + key=key, exclude_from_indexes=_EXCLUDE_FROM_INDEXES) + self.assertEqual(sorted(entity.exclude_from_indexes), + sorted(_EXCLUDE_FROM_INDEXES)) + + def test_ctor_bad_exclude_from_indexes(self): + BAD_EXCLUDE_FROM_INDEXES = object() + key = _Key() + self.assertRaises(TypeError, self._makeOne, key=key, + exclude_from_indexes=BAD_EXCLUDE_FROM_INDEXES) + + def test___eq_____ne___w_non_entity(self): + from google.cloud.datastore.key import Key + key = Key(_KIND, _ID, project=_PROJECT) + entity = self._makeOne(key=key) + self.assertFalse(entity == object()) + self.assertTrue(entity != object()) + + def test___eq_____ne___w_different_keys(self): + from google.cloud.datastore.key import Key + _ID1 = 1234 + _ID2 = 2345 + key1 = Key(_KIND, _ID1, project=_PROJECT) + entity1 = self._makeOne(key=key1) + key2 = Key(_KIND, _ID2, project=_PROJECT) + entity2 = self._makeOne(key=key2) + self.assertFalse(entity1 == entity2) + self.assertTrue(entity1 != entity2) + + def test___eq_____ne___w_same_keys(self): + from google.cloud.datastore.key import Key + + name = 'foo' + value = 42 + meaning = 9 + + key1 = Key(_KIND, _ID, project=_PROJECT) + entity1 = self._makeOne(key=key1, exclude_from_indexes=(name,)) + entity1[name] = value + entity1._meanings[name] = (meaning, value) + + key2 = Key(_KIND, _ID, project=_PROJECT) + entity2 = self._makeOne(key=key2, exclude_from_indexes=(name,)) + entity2[name] = value + entity2._meanings[name] = (meaning, value) + + self.assertTrue(entity1 == entity2) + self.assertFalse(entity1 != entity2) + + def test___eq_____ne___w_same_keys_different_props(self): + from google.cloud.datastore.key import Key + key1 = Key(_KIND, _ID, project=_PROJECT) + entity1 = self._makeOne(key=key1) + entity1['foo'] = 'Foo' + key2 = Key(_KIND, _ID, project=_PROJECT) + entity2 = self._makeOne(key=key2) + entity1['bar'] = 'Bar' + self.assertFalse(entity1 == entity2) + self.assertTrue(entity1 != entity2) + + def test___eq_____ne___w_same_keys_props_w_equiv_keys_as_value(self): + from google.cloud.datastore.key import Key + key1 = Key(_KIND, _ID, project=_PROJECT) + key2 = Key(_KIND, _ID, project=_PROJECT) + entity1 = self._makeOne(key=key1) + entity1['some_key'] = key1 + entity2 = self._makeOne(key=key1) + entity2['some_key'] = key2 + self.assertTrue(entity1 == entity2) + self.assertFalse(entity1 != entity2) + + def test___eq_____ne___w_same_keys_props_w_diff_keys_as_value(self): + from google.cloud.datastore.key import Key + _ID1 = 1234 + _ID2 = 2345 + key1 = Key(_KIND, _ID1, project=_PROJECT) + key2 = Key(_KIND, _ID2, project=_PROJECT) + entity1 = self._makeOne(key=key1) + entity1['some_key'] = key1 + entity2 = self._makeOne(key=key1) + entity2['some_key'] = key2 + self.assertFalse(entity1 == entity2) + self.assertTrue(entity1 != entity2) + + def test___eq_____ne___w_same_keys_props_w_equiv_entities_as_value(self): + from google.cloud.datastore.key import Key + key = Key(_KIND, _ID, project=_PROJECT) + entity1 = self._makeOne(key=key) + sub1 = self._makeOne() + sub1.update({'foo': 'Foo'}) + entity1['some_entity'] = sub1 + entity2 = self._makeOne(key=key) + sub2 = self._makeOne() + sub2.update({'foo': 'Foo'}) + entity2['some_entity'] = sub2 + self.assertTrue(entity1 == entity2) + self.assertFalse(entity1 != entity2) + + def test___eq_____ne___w_same_keys_props_w_diff_entities_as_value(self): + from google.cloud.datastore.key import Key + key = Key(_KIND, _ID, project=_PROJECT) + entity1 = self._makeOne(key=key) + sub1 = self._makeOne() + sub1.update({'foo': 'Foo'}) + entity1['some_entity'] = sub1 + entity2 = self._makeOne(key=key) + sub2 = self._makeOne() + sub2.update({'foo': 'Bar'}) + entity2['some_entity'] = sub2 + self.assertFalse(entity1 == entity2) + self.assertTrue(entity1 != entity2) + + def test__eq__same_value_different_exclude(self): + from google.cloud.datastore.key import Key + + name = 'foo' + value = 42 + key = Key(_KIND, _ID, project=_PROJECT) + + entity1 = self._makeOne(key=key, exclude_from_indexes=(name,)) + entity1[name] = value + + entity2 = self._makeOne(key=key, exclude_from_indexes=()) + entity2[name] = value + + self.assertFalse(entity1 == entity2) + + def test__eq__same_value_different_meanings(self): + from google.cloud.datastore.key import Key + + name = 'foo' + value = 42 + meaning = 9 + key = Key(_KIND, _ID, project=_PROJECT) + + entity1 = self._makeOne(key=key, exclude_from_indexes=(name,)) + entity1[name] = value + + entity2 = self._makeOne(key=key, exclude_from_indexes=(name,)) + entity2[name] = value + entity2._meanings[name] = (meaning, value) + + self.assertFalse(entity1 == entity2) + + def test___repr___no_key_empty(self): + entity = self._makeOne() + self.assertEqual(repr(entity), '') + + def test___repr___w_key_non_empty(self): + key = _Key() + key._path = '/bar/baz' + entity = self._makeOne(key=key) + entity['foo'] = 'Foo' + self.assertEqual(repr(entity), "") + + +class _Key(object): + _MARKER = object() + _key = 'KEY' + _partial = False + _path = None + _id = None + _stored = None + + def __init__(self, project=_PROJECT): + self.project = project + + @property + def path(self): + return self._path diff --git a/packages/google-cloud-datastore/unit_tests/test_helpers.py b/packages/google-cloud-datastore/unit_tests/test_helpers.py new file mode 100644 index 000000000000..5d6a90eed0ff --- /dev/null +++ b/packages/google-cloud-datastore/unit_tests/test_helpers.py @@ -0,0 +1,926 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class Test__new_value_pb(unittest.TestCase): + + def _callFUT(self, entity_pb, name): + from google.cloud.datastore.helpers import _new_value_pb + return _new_value_pb(entity_pb, name) + + def test_it(self): + from google.cloud.datastore._generated import entity_pb2 + + entity_pb = entity_pb2.Entity() + name = 'foo' + result = self._callFUT(entity_pb, name) + + self.assertIsInstance(result, entity_pb2.Value) + self.assertEqual(len(entity_pb.properties), 1) + self.assertEqual(entity_pb.properties[name], result) + + +class Test__property_tuples(unittest.TestCase): + + def _callFUT(self, entity_pb): + from google.cloud.datastore.helpers import _property_tuples + return _property_tuples(entity_pb) + + def test_it(self): + import types + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore.helpers import _new_value_pb + + entity_pb = entity_pb2.Entity() + name1 = 'foo' + name2 = 'bar' + val_pb1 = _new_value_pb(entity_pb, name1) + val_pb2 = _new_value_pb(entity_pb, name2) + + result = self._callFUT(entity_pb) + self.assertIsInstance(result, types.GeneratorType) + self.assertEqual(sorted(result), + sorted([(name1, val_pb1), (name2, val_pb2)])) + + +class Test_entity_from_protobuf(unittest.TestCase): + + def _callFUT(self, val): + from google.cloud.datastore.helpers import entity_from_protobuf + return entity_from_protobuf(val) + + def test_it(self): + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore.helpers import _new_value_pb + + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _ID = 1234 + entity_pb = entity_pb2.Entity() + entity_pb.key.partition_id.project_id = _PROJECT + entity_pb.key.path.add(kind=_KIND, id=_ID) + + value_pb = _new_value_pb(entity_pb, 'foo') + value_pb.string_value = 'Foo' + + unindexed_val_pb = _new_value_pb(entity_pb, 'bar') + unindexed_val_pb.integer_value = 10 + unindexed_val_pb.exclude_from_indexes = True + + array_val_pb1 = _new_value_pb(entity_pb, 'baz') + array_pb1 = array_val_pb1.array_value.values + + unindexed_array_val_pb = array_pb1.add() + unindexed_array_val_pb.integer_value = 11 + unindexed_array_val_pb.exclude_from_indexes = True + + array_val_pb2 = _new_value_pb(entity_pb, 'qux') + array_pb2 = array_val_pb2.array_value.values + + indexed_array_val_pb = array_pb2.add() + indexed_array_val_pb.integer_value = 12 + + entity = self._callFUT(entity_pb) + self.assertEqual(entity.kind, _KIND) + self.assertEqual(entity.exclude_from_indexes, + frozenset(['bar', 'baz'])) + entity_props = dict(entity) + self.assertEqual(entity_props, + {'foo': 'Foo', 'bar': 10, 'baz': [11], 'qux': [12]}) + + # Also check the key. + key = entity.key + self.assertEqual(key.project, _PROJECT) + self.assertIsNone(key.namespace) + self.assertEqual(key.kind, _KIND) + self.assertEqual(key.id, _ID) + + def test_mismatched_value_indexed(self): + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore.helpers import _new_value_pb + + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _ID = 1234 + entity_pb = entity_pb2.Entity() + entity_pb.key.partition_id.project_id = _PROJECT + entity_pb.key.path.add(kind=_KIND, id=_ID) + + array_val_pb = _new_value_pb(entity_pb, 'baz') + array_pb = array_val_pb.array_value.values + + unindexed_value_pb1 = array_pb.add() + unindexed_value_pb1.integer_value = 10 + unindexed_value_pb1.exclude_from_indexes = True + + unindexed_value_pb2 = array_pb.add() + unindexed_value_pb2.integer_value = 11 + + with self.assertRaises(ValueError): + self._callFUT(entity_pb) + + def test_entity_no_key(self): + from google.cloud.datastore._generated import entity_pb2 + + entity_pb = entity_pb2.Entity() + entity = self._callFUT(entity_pb) + + self.assertIsNone(entity.key) + self.assertEqual(dict(entity), {}) + + def test_entity_with_meaning(self): + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore.helpers import _new_value_pb + + entity_pb = entity_pb2.Entity() + name = 'hello' + value_pb = _new_value_pb(entity_pb, name) + value_pb.meaning = meaning = 9 + value_pb.string_value = val = u'something' + + entity = self._callFUT(entity_pb) + self.assertIsNone(entity.key) + self.assertEqual(dict(entity), {name: val}) + self.assertEqual(entity._meanings, {name: (meaning, val)}) + + def test_nested_entity_no_key(self): + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore.helpers import _new_value_pb + + PROJECT = 'FOO' + KIND = 'KIND' + INSIDE_NAME = 'IFOO' + OUTSIDE_NAME = 'OBAR' + INSIDE_VALUE = 1337 + + entity_inside = entity_pb2.Entity() + inside_val_pb = _new_value_pb(entity_inside, INSIDE_NAME) + inside_val_pb.integer_value = INSIDE_VALUE + + entity_pb = entity_pb2.Entity() + entity_pb.key.partition_id.project_id = PROJECT + element = entity_pb.key.path.add() + element.kind = KIND + + outside_val_pb = _new_value_pb(entity_pb, OUTSIDE_NAME) + outside_val_pb.entity_value.CopyFrom(entity_inside) + + entity = self._callFUT(entity_pb) + self.assertEqual(entity.key.project, PROJECT) + self.assertEqual(entity.key.flat_path, (KIND,)) + self.assertEqual(len(entity), 1) + + inside_entity = entity[OUTSIDE_NAME] + self.assertIsNone(inside_entity.key) + self.assertEqual(len(inside_entity), 1) + self.assertEqual(inside_entity[INSIDE_NAME], INSIDE_VALUE) + + +class Test_entity_to_protobuf(unittest.TestCase): + + def _callFUT(self, entity): + from google.cloud.datastore.helpers import entity_to_protobuf + return entity_to_protobuf(entity) + + def _compareEntityProto(self, entity_pb1, entity_pb2): + from google.cloud.datastore.helpers import _property_tuples + + self.assertEqual(entity_pb1.key, entity_pb2.key) + value_list1 = sorted(_property_tuples(entity_pb1)) + value_list2 = sorted(_property_tuples(entity_pb2)) + self.assertEqual(len(value_list1), len(value_list2)) + for pair1, pair2 in zip(value_list1, value_list2): + name1, val1 = pair1 + name2, val2 = pair2 + self.assertEqual(name1, name2) + if val1.HasField('entity_value'): # Message field (Entity) + self.assertEqual(val1.meaning, val2.meaning) + self._compareEntityProto(val1.entity_value, + val2.entity_value) + else: + self.assertEqual(val1, val2) + + def test_empty(self): + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore.entity import Entity + + entity = Entity() + entity_pb = self._callFUT(entity) + self._compareEntityProto(entity_pb, entity_pb2.Entity()) + + def test_key_only(self): + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.key import Key + + kind, name = 'PATH', 'NAME' + project = 'PROJECT' + key = Key(kind, name, project=project) + entity = Entity(key=key) + entity_pb = self._callFUT(entity) + + expected_pb = entity_pb2.Entity() + expected_pb.key.partition_id.project_id = project + path_elt = expected_pb.key.path.add() + path_elt.kind = kind + path_elt.name = name + + self._compareEntityProto(entity_pb, expected_pb) + + def test_simple_fields(self): + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.helpers import _new_value_pb + + entity = Entity() + name1 = 'foo' + entity[name1] = value1 = 42 + name2 = 'bar' + entity[name2] = value2 = u'some-string' + entity_pb = self._callFUT(entity) + + expected_pb = entity_pb2.Entity() + val_pb1 = _new_value_pb(expected_pb, name1) + val_pb1.integer_value = value1 + val_pb2 = _new_value_pb(expected_pb, name2) + val_pb2.string_value = value2 + + self._compareEntityProto(entity_pb, expected_pb) + + def test_with_empty_list(self): + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore.entity import Entity + + entity = Entity() + entity['foo'] = [] + entity_pb = self._callFUT(entity) + + self._compareEntityProto(entity_pb, entity_pb2.Entity()) + + def test_inverts_to_protobuf(self): + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore.helpers import _new_value_pb + from google.cloud.datastore.helpers import entity_from_protobuf + + original_pb = entity_pb2.Entity() + # Add a key. + original_pb.key.partition_id.project_id = project = 'PROJECT' + elem1 = original_pb.key.path.add() + elem1.kind = 'Family' + elem1.id = 1234 + elem2 = original_pb.key.path.add() + elem2.kind = 'King' + elem2.name = 'Spades' + + # Add an integer property. + val_pb1 = _new_value_pb(original_pb, 'foo') + val_pb1.integer_value = 1337 + val_pb1.exclude_from_indexes = True + # Add a string property. + val_pb2 = _new_value_pb(original_pb, 'bar') + val_pb2.string_value = u'hello' + + # Add a nested (entity) property. + val_pb3 = _new_value_pb(original_pb, 'entity-baz') + sub_pb = entity_pb2.Entity() + sub_val_pb1 = _new_value_pb(sub_pb, 'x') + sub_val_pb1.double_value = 3.14 + sub_val_pb2 = _new_value_pb(sub_pb, 'y') + sub_val_pb2.double_value = 2.718281828 + val_pb3.meaning = 9 + val_pb3.entity_value.CopyFrom(sub_pb) + + # Add a list property. + val_pb4 = _new_value_pb(original_pb, 'list-quux') + array_val1 = val_pb4.array_value.values.add() + array_val1.exclude_from_indexes = False + array_val1.meaning = meaning = 22 + array_val1.blob_value = b'\xe2\x98\x83' + array_val2 = val_pb4.array_value.values.add() + array_val2.exclude_from_indexes = False + array_val2.meaning = meaning + array_val2.blob_value = b'\xe2\x98\x85' + + # Convert to the user-space Entity. + entity = entity_from_protobuf(original_pb) + # Convert the user-space Entity back to a protobuf. + new_pb = self._callFUT(entity) + + # NOTE: entity_to_protobuf() strips the project so we "cheat". + new_pb.key.partition_id.project_id = project + self._compareEntityProto(original_pb, new_pb) + + def test_meaning_with_change(self): + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.helpers import _new_value_pb + + entity = Entity() + name = 'foo' + entity[name] = value = 42 + entity._meanings[name] = (9, 1337) + entity_pb = self._callFUT(entity) + + expected_pb = entity_pb2.Entity() + value_pb = _new_value_pb(expected_pb, name) + value_pb.integer_value = value + # NOTE: No meaning is used since the value differs from the + # value stored. + self._compareEntityProto(entity_pb, expected_pb) + + def test_variable_meanings(self): + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.helpers import _new_value_pb + + entity = Entity() + name = 'quux' + entity[name] = values = [1, 20, 300] + meaning = 9 + entity._meanings[name] = ([None, meaning, None], values) + entity_pb = self._callFUT(entity) + + # Construct the expected protobuf. + expected_pb = entity_pb2.Entity() + value_pb = _new_value_pb(expected_pb, name) + value0 = value_pb.array_value.values.add() + value0.integer_value = values[0] + # The only array entry with a meaning is the middle one. + value1 = value_pb.array_value.values.add() + value1.integer_value = values[1] + value1.meaning = meaning + value2 = value_pb.array_value.values.add() + value2.integer_value = values[2] + + self._compareEntityProto(entity_pb, expected_pb) + + +class Test_key_from_protobuf(unittest.TestCase): + + def _callFUT(self, val): + from google.cloud.datastore.helpers import key_from_protobuf + + return key_from_protobuf(val) + + def _makePB(self, project=None, namespace=None, path=()): + from google.cloud.datastore._generated import entity_pb2 + pb = entity_pb2.Key() + if project is not None: + pb.partition_id.project_id = project + if namespace is not None: + pb.partition_id.namespace_id = namespace + for elem in path: + added = pb.path.add() + added.kind = elem['kind'] + if 'id' in elem: + added.id = elem['id'] + if 'name' in elem: + added.name = elem['name'] + return pb + + def test_wo_namespace_in_pb(self): + _PROJECT = 'PROJECT' + pb = self._makePB(path=[{'kind': 'KIND'}], project=_PROJECT) + key = self._callFUT(pb) + self.assertEqual(key.project, _PROJECT) + self.assertIsNone(key.namespace) + + def test_w_namespace_in_pb(self): + _PROJECT = 'PROJECT' + _NAMESPACE = 'NAMESPACE' + pb = self._makePB(path=[{'kind': 'KIND'}], namespace=_NAMESPACE, + project=_PROJECT) + key = self._callFUT(pb) + self.assertEqual(key.project, _PROJECT) + self.assertEqual(key.namespace, _NAMESPACE) + + def test_w_nested_path_in_pb(self): + _PATH = [ + {'kind': 'PARENT', 'name': 'NAME'}, + {'kind': 'CHILD', 'id': 1234}, + {'kind': 'GRANDCHILD', 'id': 5678}, + ] + pb = self._makePB(path=_PATH, project='PROJECT') + key = self._callFUT(pb) + self.assertEqual(key.path, _PATH) + + def test_w_nothing_in_pb(self): + pb = self._makePB() + self.assertRaises(ValueError, self._callFUT, pb) + + +class Test__pb_attr_value(unittest.TestCase): + + def _callFUT(self, val): + from google.cloud.datastore.helpers import _pb_attr_value + + return _pb_attr_value(val) + + def test_datetime_naive(self): + import calendar + import datetime + from google.cloud._helpers import UTC + + micros = 4375 + naive = datetime.datetime(2014, 9, 16, 10, 19, 32, micros) # No zone. + utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) + name, value = self._callFUT(naive) + self.assertEqual(name, 'timestamp_value') + self.assertEqual(value.seconds, calendar.timegm(utc.timetuple())) + self.assertEqual(value.nanos, 1000 * micros) + + def test_datetime_w_zone(self): + import calendar + import datetime + from google.cloud._helpers import UTC + + micros = 4375 + utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) + name, value = self._callFUT(utc) + self.assertEqual(name, 'timestamp_value') + self.assertEqual(value.seconds, calendar.timegm(utc.timetuple())) + self.assertEqual(value.nanos, 1000 * micros) + + def test_key(self): + from google.cloud.datastore.key import Key + + key = Key('PATH', 1234, project='PROJECT') + name, value = self._callFUT(key) + self.assertEqual(name, 'key_value') + self.assertEqual(value, key.to_protobuf()) + + def test_bool(self): + name, value = self._callFUT(False) + self.assertEqual(name, 'boolean_value') + self.assertEqual(value, False) + + def test_float(self): + name, value = self._callFUT(3.1415926) + self.assertEqual(name, 'double_value') + self.assertEqual(value, 3.1415926) + + def test_int(self): + name, value = self._callFUT(42) + self.assertEqual(name, 'integer_value') + self.assertEqual(value, 42) + + def test_long(self): + must_be_long = (1 << 63) - 1 + name, value = self._callFUT(must_be_long) + self.assertEqual(name, 'integer_value') + self.assertEqual(value, must_be_long) + + def test_native_str(self): + import six + name, value = self._callFUT('str') + if six.PY2: + self.assertEqual(name, 'blob_value') + else: # pragma: NO COVER Python 3 + self.assertEqual(name, 'string_value') + self.assertEqual(value, 'str') + + def test_bytes(self): + name, value = self._callFUT(b'bytes') + self.assertEqual(name, 'blob_value') + self.assertEqual(value, b'bytes') + + def test_unicode(self): + name, value = self._callFUT(u'str') + self.assertEqual(name, 'string_value') + self.assertEqual(value, u'str') + + def test_entity(self): + from google.cloud.datastore.entity import Entity + entity = Entity() + name, value = self._callFUT(entity) + self.assertEqual(name, 'entity_value') + self.assertIs(value, entity) + + def test_array(self): + values = ['a', 0, 3.14] + name, value = self._callFUT(values) + self.assertEqual(name, 'array_value') + self.assertIs(value, values) + + def test_geo_point(self): + from google.type import latlng_pb2 + from google.cloud.datastore.helpers import GeoPoint + + lat = 42.42 + lng = 99.0007 + geo_pt = GeoPoint(latitude=lat, longitude=lng) + geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) + name, value = self._callFUT(geo_pt) + self.assertEqual(name, 'geo_point_value') + self.assertEqual(value, geo_pt_pb) + + def test_null(self): + from google.protobuf import struct_pb2 + + name, value = self._callFUT(None) + self.assertEqual(name, 'null_value') + self.assertEqual(value, struct_pb2.NULL_VALUE) + + def test_object(self): + self.assertRaises(ValueError, self._callFUT, object()) + + +class Test__get_value_from_value_pb(unittest.TestCase): + + def _callFUT(self, pb): + from google.cloud.datastore.helpers import _get_value_from_value_pb + + return _get_value_from_value_pb(pb) + + def _makePB(self, attr_name, value): + from google.cloud.datastore._generated import entity_pb2 + + pb = entity_pb2.Value() + setattr(pb, attr_name, value) + return pb + + def test_datetime(self): + import calendar + import datetime + from google.cloud._helpers import UTC + from google.cloud.datastore._generated import entity_pb2 + + micros = 4375 + utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) + pb = entity_pb2.Value() + pb.timestamp_value.seconds = calendar.timegm(utc.timetuple()) + pb.timestamp_value.nanos = 1000 * micros + self.assertEqual(self._callFUT(pb), utc) + + def test_key(self): + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore.key import Key + + pb = entity_pb2.Value() + expected = Key('KIND', 1234, project='PROJECT').to_protobuf() + pb.key_value.CopyFrom(expected) + found = self._callFUT(pb) + self.assertEqual(found.to_protobuf(), expected) + + def test_bool(self): + pb = self._makePB('boolean_value', False) + self.assertEqual(self._callFUT(pb), False) + + def test_float(self): + pb = self._makePB('double_value', 3.1415926) + self.assertEqual(self._callFUT(pb), 3.1415926) + + def test_int(self): + pb = self._makePB('integer_value', 42) + self.assertEqual(self._callFUT(pb), 42) + + def test_bytes(self): + pb = self._makePB('blob_value', b'str') + self.assertEqual(self._callFUT(pb), b'str') + + def test_unicode(self): + pb = self._makePB('string_value', u'str') + self.assertEqual(self._callFUT(pb), u'str') + + def test_entity(self): + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.helpers import _new_value_pb + + pb = entity_pb2.Value() + entity_pb = pb.entity_value + entity_pb.key.path.add(kind='KIND') + entity_pb.key.partition_id.project_id = 'PROJECT' + + value_pb = _new_value_pb(entity_pb, 'foo') + value_pb.string_value = 'Foo' + entity = self._callFUT(pb) + self.assertIsInstance(entity, Entity) + self.assertEqual(entity['foo'], 'Foo') + + def test_array(self): + from google.cloud.datastore._generated import entity_pb2 + + pb = entity_pb2.Value() + array_pb = pb.array_value.values + item_pb = array_pb.add() + item_pb.string_value = 'Foo' + item_pb = array_pb.add() + item_pb.string_value = 'Bar' + items = self._callFUT(pb) + self.assertEqual(items, ['Foo', 'Bar']) + + def test_geo_point(self): + from google.type import latlng_pb2 + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore.helpers import GeoPoint + + lat = -3.14 + lng = 13.37 + geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) + pb = entity_pb2.Value(geo_point_value=geo_pt_pb) + result = self._callFUT(pb) + self.assertIsInstance(result, GeoPoint) + self.assertEqual(result.latitude, lat) + self.assertEqual(result.longitude, lng) + + def test_null(self): + from google.protobuf import struct_pb2 + from google.cloud.datastore._generated import entity_pb2 + + pb = entity_pb2.Value(null_value=struct_pb2.NULL_VALUE) + result = self._callFUT(pb) + self.assertIsNone(result) + + def test_unknown(self): + from google.cloud.datastore._generated import entity_pb2 + + pb = entity_pb2.Value() + with self.assertRaises(ValueError): + self._callFUT(pb) + + +class Test_set_protobuf_value(unittest.TestCase): + + def _callFUT(self, value_pb, val): + from google.cloud.datastore.helpers import _set_protobuf_value + + return _set_protobuf_value(value_pb, val) + + def _makePB(self): + from google.cloud.datastore._generated import entity_pb2 + return entity_pb2.Value() + + def test_datetime(self): + import calendar + import datetime + from google.cloud._helpers import UTC + + pb = self._makePB() + micros = 4375 + utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) + self._callFUT(pb, utc) + value = pb.timestamp_value + self.assertEqual(value.seconds, calendar.timegm(utc.timetuple())) + self.assertEqual(value.nanos, 1000 * micros) + + def test_key(self): + from google.cloud.datastore.key import Key + + pb = self._makePB() + key = Key('KIND', 1234, project='PROJECT') + self._callFUT(pb, key) + value = pb.key_value + self.assertEqual(value, key.to_protobuf()) + + def test_none(self): + pb = self._makePB() + self._callFUT(pb, None) + self.assertEqual(pb.WhichOneof('value_type'), 'null_value') + + def test_bool(self): + pb = self._makePB() + self._callFUT(pb, False) + value = pb.boolean_value + self.assertEqual(value, False) + + def test_float(self): + pb = self._makePB() + self._callFUT(pb, 3.1415926) + value = pb.double_value + self.assertEqual(value, 3.1415926) + + def test_int(self): + pb = self._makePB() + self._callFUT(pb, 42) + value = pb.integer_value + self.assertEqual(value, 42) + + def test_long(self): + pb = self._makePB() + must_be_long = (1 << 63) - 1 + self._callFUT(pb, must_be_long) + value = pb.integer_value + self.assertEqual(value, must_be_long) + + def test_native_str(self): + import six + pb = self._makePB() + self._callFUT(pb, 'str') + if six.PY2: + value = pb.blob_value + else: # pragma: NO COVER Python 3 + value = pb.string_value + self.assertEqual(value, 'str') + + def test_bytes(self): + pb = self._makePB() + self._callFUT(pb, b'str') + value = pb.blob_value + self.assertEqual(value, b'str') + + def test_unicode(self): + pb = self._makePB() + self._callFUT(pb, u'str') + value = pb.string_value + self.assertEqual(value, u'str') + + def test_entity_empty_wo_key(self): + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.helpers import _property_tuples + + pb = self._makePB() + entity = Entity() + self._callFUT(pb, entity) + value = pb.entity_value + self.assertEqual(value.key.SerializeToString(), b'') + self.assertEqual(len(list(_property_tuples(value))), 0) + + def test_entity_w_key(self): + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.helpers import _property_tuples + from google.cloud.datastore.key import Key + + name = 'foo' + value = u'Foo' + pb = self._makePB() + key = Key('KIND', 123, project='PROJECT') + entity = Entity(key=key) + entity[name] = value + self._callFUT(pb, entity) + entity_pb = pb.entity_value + self.assertEqual(entity_pb.key, key.to_protobuf()) + + prop_dict = dict(_property_tuples(entity_pb)) + self.assertEqual(len(prop_dict), 1) + self.assertEqual(list(prop_dict.keys()), [name]) + self.assertEqual(prop_dict[name].string_value, value) + + def test_array(self): + pb = self._makePB() + values = [u'a', 0, 3.14] + self._callFUT(pb, values) + marshalled = pb.array_value.values + self.assertEqual(len(marshalled), len(values)) + self.assertEqual(marshalled[0].string_value, values[0]) + self.assertEqual(marshalled[1].integer_value, values[1]) + self.assertEqual(marshalled[2].double_value, values[2]) + + def test_geo_point(self): + from google.type import latlng_pb2 + from google.cloud.datastore.helpers import GeoPoint + + pb = self._makePB() + lat = 9.11 + lng = 3.337 + geo_pt = GeoPoint(latitude=lat, longitude=lng) + geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) + self._callFUT(pb, geo_pt) + self.assertEqual(pb.geo_point_value, geo_pt_pb) + + +class Test__get_meaning(unittest.TestCase): + + def _callFUT(self, *args, **kwargs): + from google.cloud.datastore.helpers import _get_meaning + return _get_meaning(*args, **kwargs) + + def test_no_meaning(self): + from google.cloud.datastore._generated import entity_pb2 + + value_pb = entity_pb2.Value() + result = self._callFUT(value_pb) + self.assertIsNone(result) + + def test_single(self): + from google.cloud.datastore._generated import entity_pb2 + + value_pb = entity_pb2.Value() + value_pb.meaning = meaning = 22 + value_pb.string_value = u'hi' + result = self._callFUT(value_pb) + self.assertEqual(meaning, result) + + def test_empty_array_value(self): + from google.cloud.datastore._generated import entity_pb2 + + value_pb = entity_pb2.Value() + value_pb.array_value.values.add() + value_pb.array_value.values.pop() + + result = self._callFUT(value_pb, is_list=True) + self.assertEqual(None, result) + + def test_array_value(self): + from google.cloud.datastore._generated import entity_pb2 + + value_pb = entity_pb2.Value() + meaning = 9 + sub_value_pb1 = value_pb.array_value.values.add() + sub_value_pb2 = value_pb.array_value.values.add() + + sub_value_pb1.meaning = sub_value_pb2.meaning = meaning + sub_value_pb1.string_value = u'hi' + sub_value_pb2.string_value = u'bye' + + result = self._callFUT(value_pb, is_list=True) + self.assertEqual(meaning, result) + + def test_array_value_multiple_meanings(self): + from google.cloud.datastore._generated import entity_pb2 + + value_pb = entity_pb2.Value() + meaning1 = 9 + meaning2 = 10 + sub_value_pb1 = value_pb.array_value.values.add() + sub_value_pb2 = value_pb.array_value.values.add() + + sub_value_pb1.meaning = meaning1 + sub_value_pb2.meaning = meaning2 + sub_value_pb1.string_value = u'hi' + sub_value_pb2.string_value = u'bye' + + result = self._callFUT(value_pb, is_list=True) + self.assertEqual(result, [meaning1, meaning2]) + + def test_array_value_meaning_partially_unset(self): + from google.cloud.datastore._generated import entity_pb2 + + value_pb = entity_pb2.Value() + meaning1 = 9 + sub_value_pb1 = value_pb.array_value.values.add() + sub_value_pb2 = value_pb.array_value.values.add() + + sub_value_pb1.meaning = meaning1 + sub_value_pb1.string_value = u'hi' + sub_value_pb2.string_value = u'bye' + + result = self._callFUT(value_pb, is_list=True) + self.assertEqual(result, [meaning1, None]) + + +class TestGeoPoint(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.datastore.helpers import GeoPoint + return GeoPoint + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + lat = 81.2 + lng = 359.9999 + geo_pt = self._makeOne(lat, lng) + self.assertEqual(geo_pt.latitude, lat) + self.assertEqual(geo_pt.longitude, lng) + + def test_to_protobuf(self): + from google.type import latlng_pb2 + + lat = 0.0001 + lng = 20.03 + geo_pt = self._makeOne(lat, lng) + result = geo_pt.to_protobuf() + geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) + self.assertEqual(result, geo_pt_pb) + + def test___eq__(self): + lat = 0.0001 + lng = 20.03 + geo_pt1 = self._makeOne(lat, lng) + geo_pt2 = self._makeOne(lat, lng) + self.assertEqual(geo_pt1, geo_pt2) + + def test___eq__type_differ(self): + lat = 0.0001 + lng = 20.03 + geo_pt1 = self._makeOne(lat, lng) + geo_pt2 = object() + self.assertNotEqual(geo_pt1, geo_pt2) + + def test___ne__same_value(self): + lat = 0.0001 + lng = 20.03 + geo_pt1 = self._makeOne(lat, lng) + geo_pt2 = self._makeOne(lat, lng) + comparison_val = (geo_pt1 != geo_pt2) + self.assertFalse(comparison_val) + + def test___ne__(self): + geo_pt1 = self._makeOne(0.0, 1.0) + geo_pt2 = self._makeOne(2.0, 3.0) + self.assertNotEqual(geo_pt1, geo_pt2) diff --git a/packages/google-cloud-datastore/unit_tests/test_key.py b/packages/google-cloud-datastore/unit_tests/test_key.py new file mode 100644 index 000000000000..22aaefb27656 --- /dev/null +++ b/packages/google-cloud-datastore/unit_tests/test_key.py @@ -0,0 +1,431 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestKey(unittest.TestCase): + + _DEFAULT_PROJECT = 'PROJECT' + + def _getTargetClass(self): + from google.cloud.datastore.key import Key + return Key + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_ctor_empty(self): + self.assertRaises(ValueError, self._makeOne) + + def test_ctor_no_project(self): + klass = self._getTargetClass() + self.assertRaises(ValueError, klass, 'KIND') + + def test_ctor_w_explicit_project_empty_path(self): + _PROJECT = 'PROJECT' + self.assertRaises(ValueError, self._makeOne, project=_PROJECT) + + def test_ctor_parent(self): + _PARENT_KIND = 'KIND1' + _PARENT_ID = 1234 + _PARENT_PROJECT = 'PROJECT-ALT' + _PARENT_NAMESPACE = 'NAMESPACE' + _CHILD_KIND = 'KIND2' + _CHILD_ID = 2345 + _PATH = [ + {'kind': _PARENT_KIND, 'id': _PARENT_ID}, + {'kind': _CHILD_KIND, 'id': _CHILD_ID}, + ] + parent_key = self._makeOne(_PARENT_KIND, _PARENT_ID, + project=_PARENT_PROJECT, + namespace=_PARENT_NAMESPACE) + key = self._makeOne(_CHILD_KIND, _CHILD_ID, parent=parent_key) + self.assertEqual(key.project, parent_key.project) + self.assertEqual(key.namespace, parent_key.namespace) + self.assertEqual(key.kind, _CHILD_KIND) + self.assertEqual(key.path, _PATH) + self.assertIs(key.parent, parent_key) + + def test_ctor_partial_parent(self): + parent_key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + with self.assertRaises(ValueError): + self._makeOne('KIND2', 1234, parent=parent_key) + + def test_ctor_parent_bad_type(self): + with self.assertRaises(AttributeError): + self._makeOne('KIND2', 1234, parent=('KIND1', 1234), + project=self._DEFAULT_PROJECT) + + def test_ctor_parent_bad_namespace(self): + parent_key = self._makeOne('KIND', 1234, namespace='FOO', + project=self._DEFAULT_PROJECT) + with self.assertRaises(ValueError): + self._makeOne('KIND2', 1234, namespace='BAR', parent=parent_key, + project=self._DEFAULT_PROJECT) + + def test_ctor_parent_bad_project(self): + parent_key = self._makeOne('KIND', 1234, project='FOO') + with self.assertRaises(ValueError): + self._makeOne('KIND2', 1234, parent=parent_key, + project='BAR') + + def test_ctor_parent_empty_path(self): + parent_key = self._makeOne('KIND', 1234, + project=self._DEFAULT_PROJECT) + with self.assertRaises(ValueError): + self._makeOne(parent=parent_key) + + def test_ctor_explicit(self): + _PROJECT = 'PROJECT-ALT' + _NAMESPACE = 'NAMESPACE' + _KIND = 'KIND' + _ID = 1234 + _PATH = [{'kind': _KIND, 'id': _ID}] + key = self._makeOne(_KIND, _ID, namespace=_NAMESPACE, + project=_PROJECT) + self.assertEqual(key.project, _PROJECT) + self.assertEqual(key.namespace, _NAMESPACE) + self.assertEqual(key.kind, _KIND) + self.assertEqual(key.path, _PATH) + + def test_ctor_bad_kind(self): + self.assertRaises(ValueError, self._makeOne, object(), + project=self._DEFAULT_PROJECT) + + def test_ctor_bad_id_or_name(self): + self.assertRaises(ValueError, self._makeOne, 'KIND', object(), + project=self._DEFAULT_PROJECT) + self.assertRaises(ValueError, self._makeOne, 'KIND', None, + project=self._DEFAULT_PROJECT) + self.assertRaises(ValueError, self._makeOne, 'KIND', 10, 'KIND2', None, + project=self._DEFAULT_PROJECT) + + def test__clone(self): + _PROJECT = 'PROJECT-ALT' + _NAMESPACE = 'NAMESPACE' + _KIND = 'KIND' + _ID = 1234 + _PATH = [{'kind': _KIND, 'id': _ID}] + key = self._makeOne(_KIND, _ID, namespace=_NAMESPACE, + project=_PROJECT) + clone = key._clone() + self.assertEqual(clone.project, _PROJECT) + self.assertEqual(clone.namespace, _NAMESPACE) + self.assertEqual(clone.kind, _KIND) + self.assertEqual(clone.path, _PATH) + + def test__clone_with_parent(self): + _PROJECT = 'PROJECT-ALT' + _NAMESPACE = 'NAMESPACE' + _KIND1 = 'PARENT' + _KIND2 = 'KIND' + _ID1 = 1234 + _ID2 = 2345 + _PATH = [{'kind': _KIND1, 'id': _ID1}, {'kind': _KIND2, 'id': _ID2}] + + parent = self._makeOne(_KIND1, _ID1, namespace=_NAMESPACE, + project=_PROJECT) + key = self._makeOne(_KIND2, _ID2, parent=parent) + self.assertIs(key.parent, parent) + clone = key._clone() + self.assertIs(clone.parent, key.parent) + self.assertEqual(clone.project, _PROJECT) + self.assertEqual(clone.namespace, _NAMESPACE) + self.assertEqual(clone.path, _PATH) + + def test___eq_____ne___w_non_key(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _NAME = 'one' + key = self._makeOne(_KIND, _NAME, project=_PROJECT) + self.assertFalse(key == object()) + self.assertTrue(key != object()) + + def test___eq_____ne___two_incomplete_keys_same_kind(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + key1 = self._makeOne(_KIND, project=_PROJECT) + key2 = self._makeOne(_KIND, project=_PROJECT) + self.assertFalse(key1 == key2) + self.assertTrue(key1 != key2) + + def test___eq_____ne___incomplete_key_w_complete_key_same_kind(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _ID = 1234 + key1 = self._makeOne(_KIND, project=_PROJECT) + key2 = self._makeOne(_KIND, _ID, project=_PROJECT) + self.assertFalse(key1 == key2) + self.assertTrue(key1 != key2) + + def test___eq_____ne___complete_key_w_incomplete_key_same_kind(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _ID = 1234 + key1 = self._makeOne(_KIND, _ID, project=_PROJECT) + key2 = self._makeOne(_KIND, project=_PROJECT) + self.assertFalse(key1 == key2) + self.assertTrue(key1 != key2) + + def test___eq_____ne___same_kind_different_ids(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _ID1 = 1234 + _ID2 = 2345 + key1 = self._makeOne(_KIND, _ID1, project=_PROJECT) + key2 = self._makeOne(_KIND, _ID2, project=_PROJECT) + self.assertFalse(key1 == key2) + self.assertTrue(key1 != key2) + + def test___eq_____ne___same_kind_and_id(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _ID = 1234 + key1 = self._makeOne(_KIND, _ID, project=_PROJECT) + key2 = self._makeOne(_KIND, _ID, project=_PROJECT) + self.assertTrue(key1 == key2) + self.assertFalse(key1 != key2) + + def test___eq_____ne___same_kind_and_id_different_project(self): + _PROJECT1 = 'PROJECT1' + _PROJECT2 = 'PROJECT2' + _KIND = 'KIND' + _ID = 1234 + key1 = self._makeOne(_KIND, _ID, project=_PROJECT1) + key2 = self._makeOne(_KIND, _ID, project=_PROJECT2) + self.assertFalse(key1 == key2) + self.assertTrue(key1 != key2) + + def test___eq_____ne___same_kind_and_id_different_namespace(self): + _PROJECT = 'PROJECT' + _NAMESPACE1 = 'NAMESPACE1' + _NAMESPACE2 = 'NAMESPACE2' + _KIND = 'KIND' + _ID = 1234 + key1 = self._makeOne(_KIND, _ID, project=_PROJECT, + namespace=_NAMESPACE1) + key2 = self._makeOne(_KIND, _ID, project=_PROJECT, + namespace=_NAMESPACE2) + self.assertFalse(key1 == key2) + self.assertTrue(key1 != key2) + + def test___eq_____ne___same_kind_different_names(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _NAME1 = 'one' + _NAME2 = 'two' + key1 = self._makeOne(_KIND, _NAME1, project=_PROJECT) + key2 = self._makeOne(_KIND, _NAME2, project=_PROJECT) + self.assertFalse(key1 == key2) + self.assertTrue(key1 != key2) + + def test___eq_____ne___same_kind_and_name(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _NAME = 'one' + key1 = self._makeOne(_KIND, _NAME, project=_PROJECT) + key2 = self._makeOne(_KIND, _NAME, project=_PROJECT) + self.assertTrue(key1 == key2) + self.assertFalse(key1 != key2) + + def test___eq_____ne___same_kind_and_name_different_project(self): + _PROJECT1 = 'PROJECT1' + _PROJECT2 = 'PROJECT2' + _KIND = 'KIND' + _NAME = 'one' + key1 = self._makeOne(_KIND, _NAME, project=_PROJECT1) + key2 = self._makeOne(_KIND, _NAME, project=_PROJECT2) + self.assertFalse(key1 == key2) + self.assertTrue(key1 != key2) + + def test___eq_____ne___same_kind_and_name_different_namespace(self): + _PROJECT = 'PROJECT' + _NAMESPACE1 = 'NAMESPACE1' + _NAMESPACE2 = 'NAMESPACE2' + _KIND = 'KIND' + _NAME = 'one' + key1 = self._makeOne(_KIND, _NAME, project=_PROJECT, + namespace=_NAMESPACE1) + key2 = self._makeOne(_KIND, _NAME, project=_PROJECT, + namespace=_NAMESPACE2) + self.assertFalse(key1 == key2) + self.assertTrue(key1 != key2) + + def test___hash___incomplete(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + key = self._makeOne(_KIND, project=_PROJECT) + self.assertNotEqual(hash(key), + hash(_KIND) + hash(_PROJECT) + hash(None)) + + def test___hash___completed_w_id(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _ID = 1234 + key = self._makeOne(_KIND, _ID, project=_PROJECT) + self.assertNotEqual(hash(key), + hash(_KIND) + hash(_ID) + + hash(_PROJECT) + hash(None)) + + def test___hash___completed_w_name(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _NAME = 'NAME' + key = self._makeOne(_KIND, _NAME, project=_PROJECT) + self.assertNotEqual(hash(key), + hash(_KIND) + hash(_NAME) + + hash(_PROJECT) + hash(None)) + + def test_completed_key_on_partial_w_id(self): + key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + _ID = 1234 + new_key = key.completed_key(_ID) + self.assertIsNot(key, new_key) + self.assertEqual(new_key.id, _ID) + self.assertIsNone(new_key.name) + + def test_completed_key_on_partial_w_name(self): + key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + _NAME = 'NAME' + new_key = key.completed_key(_NAME) + self.assertIsNot(key, new_key) + self.assertIsNone(new_key.id) + self.assertEqual(new_key.name, _NAME) + + def test_completed_key_on_partial_w_invalid(self): + key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + self.assertRaises(ValueError, key.completed_key, object()) + + def test_completed_key_on_complete(self): + key = self._makeOne('KIND', 1234, project=self._DEFAULT_PROJECT) + self.assertRaises(ValueError, key.completed_key, 5678) + + def test_to_protobuf_defaults(self): + from google.cloud.datastore._generated import entity_pb2 + + _KIND = 'KIND' + key = self._makeOne(_KIND, project=self._DEFAULT_PROJECT) + pb = key.to_protobuf() + self.assertIsInstance(pb, entity_pb2.Key) + + # Check partition ID. + self.assertEqual(pb.partition_id.project_id, self._DEFAULT_PROJECT) + # Unset values are False-y. + self.assertEqual(pb.partition_id.namespace_id, '') + + # Check the element PB matches the partial key and kind. + elem, = list(pb.path) + self.assertEqual(elem.kind, _KIND) + # Unset values are False-y. + self.assertEqual(elem.name, '') + # Unset values are False-y. + self.assertEqual(elem.id, 0) + + def test_to_protobuf_w_explicit_project(self): + _PROJECT = 'PROJECT-ALT' + key = self._makeOne('KIND', project=_PROJECT) + pb = key.to_protobuf() + self.assertEqual(pb.partition_id.project_id, _PROJECT) + + def test_to_protobuf_w_explicit_namespace(self): + _NAMESPACE = 'NAMESPACE' + key = self._makeOne('KIND', namespace=_NAMESPACE, + project=self._DEFAULT_PROJECT) + pb = key.to_protobuf() + self.assertEqual(pb.partition_id.namespace_id, _NAMESPACE) + + def test_to_protobuf_w_explicit_path(self): + _PARENT = 'PARENT' + _CHILD = 'CHILD' + _ID = 1234 + _NAME = 'NAME' + key = self._makeOne(_PARENT, _NAME, _CHILD, _ID, + project=self._DEFAULT_PROJECT) + pb = key.to_protobuf() + elems = list(pb.path) + self.assertEqual(len(elems), 2) + self.assertEqual(elems[0].kind, _PARENT) + self.assertEqual(elems[0].name, _NAME) + self.assertEqual(elems[1].kind, _CHILD) + self.assertEqual(elems[1].id, _ID) + + def test_to_protobuf_w_no_kind(self): + key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + # Force the 'kind' to be unset. Maybe `to_protobuf` should fail + # on this? The backend certainly will. + key._path[-1].pop('kind') + pb = key.to_protobuf() + # Unset values are False-y. + self.assertEqual(pb.path[0].kind, '') + + def test_is_partial_no_name_or_id(self): + key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + self.assertTrue(key.is_partial) + + def test_is_partial_w_id(self): + _ID = 1234 + key = self._makeOne('KIND', _ID, project=self._DEFAULT_PROJECT) + self.assertFalse(key.is_partial) + + def test_is_partial_w_name(self): + _NAME = 'NAME' + key = self._makeOne('KIND', _NAME, project=self._DEFAULT_PROJECT) + self.assertFalse(key.is_partial) + + def test_id_or_name_no_name_or_id(self): + key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + self.assertIsNone(key.id_or_name) + + def test_id_or_name_no_name_or_id_child(self): + key = self._makeOne('KIND1', 1234, 'KIND2', + project=self._DEFAULT_PROJECT) + self.assertIsNone(key.id_or_name) + + def test_id_or_name_w_id_only(self): + _ID = 1234 + key = self._makeOne('KIND', _ID, project=self._DEFAULT_PROJECT) + self.assertEqual(key.id_or_name, _ID) + + def test_id_or_name_w_name_only(self): + _NAME = 'NAME' + key = self._makeOne('KIND', _NAME, project=self._DEFAULT_PROJECT) + self.assertEqual(key.id_or_name, _NAME) + + def test_parent_default(self): + key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + self.assertIsNone(key.parent) + + def test_parent_explicit_top_level(self): + key = self._makeOne('KIND', 1234, project=self._DEFAULT_PROJECT) + self.assertIsNone(key.parent) + + def test_parent_explicit_nested(self): + _PARENT_KIND = 'KIND1' + _PARENT_ID = 1234 + _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] + key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2', + project=self._DEFAULT_PROJECT) + self.assertEqual(key.parent.path, _PARENT_PATH) + + def test_parent_multiple_calls(self): + _PARENT_KIND = 'KIND1' + _PARENT_ID = 1234 + _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] + key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2', + project=self._DEFAULT_PROJECT) + parent = key.parent + self.assertEqual(parent.path, _PARENT_PATH) + new_parent = key.parent + self.assertIs(parent, new_parent) diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py new file mode 100644 index 000000000000..0fa333a83e7c --- /dev/null +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -0,0 +1,759 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestQuery(unittest.TestCase): + + _PROJECT = 'PROJECT' + + def _getTargetClass(self): + from google.cloud.datastore.query import Query + return Query + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def _makeClient(self, connection=None): + if connection is None: + connection = _Connection() + return _Client(self._PROJECT, connection) + + def test_ctor_defaults(self): + client = self._makeClient() + query = self._makeOne(client) + self.assertIs(query._client, client) + self.assertEqual(query.project, client.project) + self.assertIsNone(query.kind) + self.assertEqual(query.namespace, client.namespace) + self.assertIsNone(query.ancestor) + self.assertEqual(query.filters, []) + self.assertEqual(query.projection, []) + self.assertEqual(query.order, []) + self.assertEqual(query.distinct_on, []) + + def test_ctor_explicit(self): + from google.cloud.datastore.key import Key + _PROJECT = 'OTHER_PROJECT' + _KIND = 'KIND' + _NAMESPACE = 'OTHER_NAMESPACE' + client = self._makeClient() + ancestor = Key('ANCESTOR', 123, project=_PROJECT) + FILTERS = [('foo', '=', 'Qux'), ('bar', '<', 17)] + PROJECTION = ['foo', 'bar', 'baz'] + ORDER = ['foo', 'bar'] + DISTINCT_ON = ['foo'] + query = self._makeOne( + client, + kind=_KIND, + project=_PROJECT, + namespace=_NAMESPACE, + ancestor=ancestor, + filters=FILTERS, + projection=PROJECTION, + order=ORDER, + distinct_on=DISTINCT_ON, + ) + self.assertIs(query._client, client) + self.assertEqual(query.project, _PROJECT) + self.assertEqual(query.kind, _KIND) + self.assertEqual(query.namespace, _NAMESPACE) + self.assertEqual(query.ancestor.path, ancestor.path) + self.assertEqual(query.filters, FILTERS) + self.assertEqual(query.projection, PROJECTION) + self.assertEqual(query.order, ORDER) + self.assertEqual(query.distinct_on, DISTINCT_ON) + + def test_ctor_bad_projection(self): + BAD_PROJECTION = object() + self.assertRaises(TypeError, self._makeOne, self._makeClient(), + projection=BAD_PROJECTION) + + def test_ctor_bad_order(self): + BAD_ORDER = object() + self.assertRaises(TypeError, self._makeOne, self._makeClient(), + order=BAD_ORDER) + + def test_ctor_bad_distinct_on(self): + BAD_DISTINCT_ON = object() + self.assertRaises(TypeError, self._makeOne, self._makeClient(), + distinct_on=BAD_DISTINCT_ON) + + def test_ctor_bad_filters(self): + FILTERS_CANT_UNPACK = [('one', 'two')] + self.assertRaises(ValueError, self._makeOne, self._makeClient(), + filters=FILTERS_CANT_UNPACK) + + def test_namespace_setter_w_non_string(self): + query = self._makeOne(self._makeClient()) + + def _assign(val): + query.namespace = val + + self.assertRaises(ValueError, _assign, object()) + + def test_namespace_setter(self): + _NAMESPACE = 'OTHER_NAMESPACE' + query = self._makeOne(self._makeClient()) + query.namespace = _NAMESPACE + self.assertEqual(query.namespace, _NAMESPACE) + + def test_kind_setter_w_non_string(self): + query = self._makeOne(self._makeClient()) + + def _assign(val): + query.kind = val + + self.assertRaises(TypeError, _assign, object()) + + def test_kind_setter_wo_existing(self): + _KIND = 'KIND' + query = self._makeOne(self._makeClient()) + query.kind = _KIND + self.assertEqual(query.kind, _KIND) + + def test_kind_setter_w_existing(self): + _KIND_BEFORE = 'KIND_BEFORE' + _KIND_AFTER = 'KIND_AFTER' + query = self._makeOne(self._makeClient(), kind=_KIND_BEFORE) + self.assertEqual(query.kind, _KIND_BEFORE) + query.kind = _KIND_AFTER + self.assertEqual(query.project, self._PROJECT) + self.assertEqual(query.kind, _KIND_AFTER) + + def test_ancestor_setter_w_non_key(self): + query = self._makeOne(self._makeClient()) + + def _assign(val): + query.ancestor = val + + self.assertRaises(TypeError, _assign, object()) + self.assertRaises(TypeError, _assign, ['KIND', 'NAME']) + + def test_ancestor_setter_w_key(self): + from google.cloud.datastore.key import Key + _NAME = u'NAME' + key = Key('KIND', 123, project=self._PROJECT) + query = self._makeOne(self._makeClient()) + query.add_filter('name', '=', _NAME) + query.ancestor = key + self.assertEqual(query.ancestor.path, key.path) + + def test_ancestor_deleter_w_key(self): + from google.cloud.datastore.key import Key + key = Key('KIND', 123, project=self._PROJECT) + query = self._makeOne(client=self._makeClient(), ancestor=key) + del query.ancestor + self.assertIsNone(query.ancestor) + + def test_add_filter_setter_w_unknown_operator(self): + query = self._makeOne(self._makeClient()) + self.assertRaises(ValueError, query.add_filter, + 'firstname', '~~', 'John') + + def test_add_filter_w_known_operator(self): + query = self._makeOne(self._makeClient()) + query.add_filter('firstname', '=', u'John') + self.assertEqual(query.filters, [('firstname', '=', u'John')]) + + def test_add_filter_w_all_operators(self): + query = self._makeOne(self._makeClient()) + query.add_filter('leq_prop', '<=', u'val1') + query.add_filter('geq_prop', '>=', u'val2') + query.add_filter('lt_prop', '<', u'val3') + query.add_filter('gt_prop', '>', u'val4') + query.add_filter('eq_prop', '=', u'val5') + self.assertEqual(len(query.filters), 5) + self.assertEqual(query.filters[0], ('leq_prop', '<=', u'val1')) + self.assertEqual(query.filters[1], ('geq_prop', '>=', u'val2')) + self.assertEqual(query.filters[2], ('lt_prop', '<', u'val3')) + self.assertEqual(query.filters[3], ('gt_prop', '>', u'val4')) + self.assertEqual(query.filters[4], ('eq_prop', '=', u'val5')) + + def test_add_filter_w_known_operator_and_entity(self): + from google.cloud.datastore.entity import Entity + query = self._makeOne(self._makeClient()) + other = Entity() + other['firstname'] = u'John' + other['lastname'] = u'Smith' + query.add_filter('other', '=', other) + self.assertEqual(query.filters, [('other', '=', other)]) + + def test_add_filter_w_whitespace_property_name(self): + query = self._makeOne(self._makeClient()) + PROPERTY_NAME = ' property with lots of space ' + query.add_filter(PROPERTY_NAME, '=', u'John') + self.assertEqual(query.filters, [(PROPERTY_NAME, '=', u'John')]) + + def test_add_filter___key__valid_key(self): + from google.cloud.datastore.key import Key + query = self._makeOne(self._makeClient()) + key = Key('Foo', project=self._PROJECT) + query.add_filter('__key__', '=', key) + self.assertEqual(query.filters, [('__key__', '=', key)]) + + def test_filter___key__not_equal_operator(self): + from google.cloud.datastore.key import Key + key = Key('Foo', project=self._PROJECT) + query = self._makeOne(self._makeClient()) + query.add_filter('__key__', '<', key) + self.assertEqual(query.filters, [('__key__', '<', key)]) + + def test_filter___key__invalid_value(self): + query = self._makeOne(self._makeClient()) + self.assertRaises(ValueError, query.add_filter, '__key__', '=', None) + + def test_projection_setter_empty(self): + query = self._makeOne(self._makeClient()) + query.projection = [] + self.assertEqual(query.projection, []) + + def test_projection_setter_string(self): + query = self._makeOne(self._makeClient()) + query.projection = 'field1' + self.assertEqual(query.projection, ['field1']) + + def test_projection_setter_non_empty(self): + query = self._makeOne(self._makeClient()) + query.projection = ['field1', 'field2'] + self.assertEqual(query.projection, ['field1', 'field2']) + + def test_projection_setter_multiple_calls(self): + _PROJECTION1 = ['field1', 'field2'] + _PROJECTION2 = ['field3'] + query = self._makeOne(self._makeClient()) + query.projection = _PROJECTION1 + self.assertEqual(query.projection, _PROJECTION1) + query.projection = _PROJECTION2 + self.assertEqual(query.projection, _PROJECTION2) + + def test_keys_only(self): + query = self._makeOne(self._makeClient()) + query.keys_only() + self.assertEqual(query.projection, ['__key__']) + + def test_key_filter_defaults(self): + from google.cloud.datastore.key import Key + + client = self._makeClient() + query = self._makeOne(client) + self.assertEqual(query.filters, []) + key = Key('Kind', 1234, project='project') + query.key_filter(key) + self.assertEqual(query.filters, [('__key__', '=', key)]) + + def test_key_filter_explicit(self): + from google.cloud.datastore.key import Key + + client = self._makeClient() + query = self._makeOne(client) + self.assertEqual(query.filters, []) + key = Key('Kind', 1234, project='project') + query.key_filter(key, operator='>') + self.assertEqual(query.filters, [('__key__', '>', key)]) + + def test_order_setter_empty(self): + query = self._makeOne(self._makeClient(), order=['foo', '-bar']) + query.order = [] + self.assertEqual(query.order, []) + + def test_order_setter_string(self): + query = self._makeOne(self._makeClient()) + query.order = 'field' + self.assertEqual(query.order, ['field']) + + def test_order_setter_single_item_list_desc(self): + query = self._makeOne(self._makeClient()) + query.order = ['-field'] + self.assertEqual(query.order, ['-field']) + + def test_order_setter_multiple(self): + query = self._makeOne(self._makeClient()) + query.order = ['foo', '-bar'] + self.assertEqual(query.order, ['foo', '-bar']) + + def test_distinct_on_setter_empty(self): + query = self._makeOne(self._makeClient(), distinct_on=['foo', 'bar']) + query.distinct_on = [] + self.assertEqual(query.distinct_on, []) + + def test_distinct_on_setter_string(self): + query = self._makeOne(self._makeClient()) + query.distinct_on = 'field1' + self.assertEqual(query.distinct_on, ['field1']) + + def test_distinct_on_setter_non_empty(self): + query = self._makeOne(self._makeClient()) + query.distinct_on = ['field1', 'field2'] + self.assertEqual(query.distinct_on, ['field1', 'field2']) + + def test_distinct_on_multiple_calls(self): + _DISTINCT_ON1 = ['field1', 'field2'] + _DISTINCT_ON2 = ['field3'] + query = self._makeOne(self._makeClient()) + query.distinct_on = _DISTINCT_ON1 + self.assertEqual(query.distinct_on, _DISTINCT_ON1) + query.distinct_on = _DISTINCT_ON2 + self.assertEqual(query.distinct_on, _DISTINCT_ON2) + + def test_fetch_defaults_w_client_attr(self): + connection = _Connection() + client = self._makeClient(connection) + query = self._makeOne(client) + iterator = query.fetch() + self.assertIs(iterator._query, query) + self.assertIs(iterator._client, client) + self.assertIsNone(iterator._limit) + self.assertEqual(iterator._offset, 0) + + def test_fetch_w_explicit_client(self): + connection = _Connection() + client = self._makeClient(connection) + other_client = self._makeClient(connection) + query = self._makeOne(client) + iterator = query.fetch(limit=7, offset=8, client=other_client) + self.assertIs(iterator._query, query) + self.assertIs(iterator._client, other_client) + self.assertEqual(iterator._limit, 7) + self.assertEqual(iterator._offset, 8) + + +class TestIterator(unittest.TestCase): + _PROJECT = 'PROJECT' + _NAMESPACE = 'NAMESPACE' + _KIND = 'KIND' + _ID = 123 + _START = b'\x00' + _END = b'\xFF' + + def _getTargetClass(self): + from google.cloud.datastore.query import Iterator + return Iterator + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def _addQueryResults(self, connection, cursor=_END, more=False, + skipped_results=None, no_entity=False): + from google.cloud.datastore._generated import entity_pb2 + from google.cloud.datastore._generated import query_pb2 + from google.cloud.datastore.helpers import _new_value_pb + + if more: + more_enum = query_pb2.QueryResultBatch.NOT_FINISHED + else: + more_enum = query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT + _ID = 123 + if no_entity: + entities = [] + else: + entity_pb = entity_pb2.Entity() + entity_pb.key.partition_id.project_id = self._PROJECT + path_element = entity_pb.key.path.add() + path_element.kind = self._KIND + path_element.id = _ID + value_pb = _new_value_pb(entity_pb, 'foo') + value_pb.string_value = u'Foo' + entities = [entity_pb] + + connection._results.append( + (entities, cursor, more_enum, skipped_results)) + + def _makeClient(self, connection=None): + if connection is None: + connection = _Connection() + return _Client(self._PROJECT, connection) + + def test_ctor_defaults(self): + connection = _Connection() + query = object() + iterator = self._makeOne(query, connection) + self.assertIs(iterator._query, query) + self.assertIsNone(iterator._limit) + self.assertIsNone(iterator._offset) + self.assertIsNone(iterator._skipped_results) + + def test_ctor_explicit(self): + client = self._makeClient() + query = _Query(client) + iterator = self._makeOne(query, client, 13, 29) + self.assertIs(iterator._query, query) + self.assertEqual(iterator._limit, 13) + self.assertEqual(iterator._offset, 29) + + def test_next_page_no_cursors_no_more(self): + from google.cloud.datastore.query import _pb_from_query + connection = _Connection() + client = self._makeClient(connection) + query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) + self._addQueryResults(connection, cursor=b'') + iterator = self._makeOne(query, client) + entities, more_results, cursor = iterator.next_page() + self.assertIsNone(iterator._skipped_results) + + self.assertIsNone(cursor) + self.assertFalse(more_results) + self.assertFalse(iterator._more_results) + self.assertEqual(len(entities), 1) + self.assertEqual(entities[0].key.path, + [{'kind': self._KIND, 'id': self._ID}]) + self.assertEqual(entities[0]['foo'], u'Foo') + qpb = _pb_from_query(query) + qpb.offset = 0 + EXPECTED = { + 'project': self._PROJECT, + 'query_pb': qpb, + 'namespace': self._NAMESPACE, + 'transaction_id': None, + } + self.assertEqual(connection._called_with, [EXPECTED]) + + def test_next_page_no_cursors_no_more_w_offset_and_limit(self): + from google.cloud.datastore.query import _pb_from_query + connection = _Connection() + client = self._makeClient(connection) + query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) + skipped_results = object() + self._addQueryResults(connection, cursor=b'', + skipped_results=skipped_results) + iterator = self._makeOne(query, client, 13, 29) + entities, more_results, cursor = iterator.next_page() + + self.assertIsNone(cursor) + self.assertFalse(more_results) + self.assertFalse(iterator._more_results) + self.assertEqual(iterator._skipped_results, skipped_results) + self.assertEqual(len(entities), 1) + self.assertEqual(entities[0].key.path, + [{'kind': self._KIND, 'id': self._ID}]) + self.assertEqual(entities[0]['foo'], u'Foo') + qpb = _pb_from_query(query) + qpb.limit.value = 13 + qpb.offset = 29 + EXPECTED = { + 'project': self._PROJECT, + 'query_pb': qpb, + 'namespace': self._NAMESPACE, + 'transaction_id': None, + } + self.assertEqual(connection._called_with, [EXPECTED]) + + def test_next_page_w_cursors_w_more(self): + from base64 import urlsafe_b64decode + from base64 import urlsafe_b64encode + from google.cloud.datastore.query import _pb_from_query + connection = _Connection() + client = self._makeClient(connection) + query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) + self._addQueryResults(connection, cursor=self._END, more=True) + iterator = self._makeOne(query, client) + iterator._start_cursor = self._START + iterator._end_cursor = self._END + entities, more_results, cursor = iterator.next_page() + + self.assertEqual(cursor, urlsafe_b64encode(self._END)) + self.assertTrue(more_results) + self.assertTrue(iterator._more_results) + self.assertIsNone(iterator._skipped_results) + self.assertIsNone(iterator._end_cursor) + self.assertEqual(urlsafe_b64decode(iterator._start_cursor), self._END) + self.assertEqual(len(entities), 1) + self.assertEqual(entities[0].key.path, + [{'kind': self._KIND, 'id': self._ID}]) + self.assertEqual(entities[0]['foo'], u'Foo') + qpb = _pb_from_query(query) + qpb.offset = 0 + qpb.start_cursor = urlsafe_b64decode(self._START) + qpb.end_cursor = urlsafe_b64decode(self._END) + EXPECTED = { + 'project': self._PROJECT, + 'query_pb': qpb, + 'namespace': self._NAMESPACE, + 'transaction_id': None, + } + self.assertEqual(connection._called_with, [EXPECTED]) + + def test_next_page_w_cursors_w_bogus_more(self): + connection = _Connection() + client = self._makeClient(connection) + query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) + self._addQueryResults(connection, cursor=self._END, more=True) + epb, cursor, _, _ = connection._results.pop() + connection._results.append((epb, cursor, 5, None)) # invalid enum + iterator = self._makeOne(query, client) + self.assertRaises(ValueError, iterator.next_page) + + def test___iter___no_more(self): + from google.cloud.datastore.query import _pb_from_query + connection = _Connection() + client = self._makeClient(connection) + query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) + self._addQueryResults(connection) + iterator = self._makeOne(query, client) + entities = list(iterator) + + self.assertFalse(iterator._more_results) + self.assertEqual(len(entities), 1) + self.assertEqual(entities[0].key.path, + [{'kind': self._KIND, 'id': self._ID}]) + self.assertEqual(entities[0]['foo'], u'Foo') + qpb = _pb_from_query(query) + qpb.offset = 0 + EXPECTED = { + 'project': self._PROJECT, + 'query_pb': qpb, + 'namespace': self._NAMESPACE, + 'transaction_id': None, + } + self.assertEqual(connection._called_with, [EXPECTED]) + + def test___iter___w_more(self): + from google.cloud.datastore.query import _pb_from_query + connection = _Connection() + client = self._makeClient(connection) + query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) + self._addQueryResults(connection, cursor=self._END, more=True) + self._addQueryResults(connection) + iterator = self._makeOne(query, client) + entities = list(iterator) + + self.assertFalse(iterator._more_results) + self.assertEqual(len(entities), 2) + for entity in entities: + self.assertEqual( + entity.key.path, + [{'kind': self._KIND, 'id': self._ID}]) + self.assertEqual(entities[1]['foo'], u'Foo') + qpb1 = _pb_from_query(query) + qpb2 = _pb_from_query(query) + qpb2.start_cursor = self._END + EXPECTED1 = { + 'project': self._PROJECT, + 'query_pb': qpb1, + 'namespace': self._NAMESPACE, + 'transaction_id': None, + } + EXPECTED2 = { + 'project': self._PROJECT, + 'query_pb': qpb2, + 'namespace': self._NAMESPACE, + 'transaction_id': None, + } + self.assertEqual(len(connection._called_with), 2) + self.assertEqual(connection._called_with[0], EXPECTED1) + self.assertEqual(connection._called_with[1], EXPECTED2) + + def test___iter___w_limit(self): + from google.cloud.datastore.query import _pb_from_query + + connection = _Connection() + client = self._makeClient(connection) + query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) + skip1 = 4 + skip2 = 9 + self._addQueryResults(connection, more=True, skipped_results=skip1, + no_entity=True) + self._addQueryResults(connection, more=True, skipped_results=skip2) + self._addQueryResults(connection) + offset = skip1 + skip2 + iterator = self._makeOne(query, client, limit=2, offset=offset) + entities = list(iterator) + + self.assertFalse(iterator._more_results) + self.assertEqual(len(entities), 2) + for entity in entities: + self.assertEqual( + entity.key.path, + [{'kind': self._KIND, 'id': self._ID}]) + qpb1 = _pb_from_query(query) + qpb1.limit.value = 2 + qpb1.offset = offset + qpb2 = _pb_from_query(query) + qpb2.start_cursor = self._END + qpb2.limit.value = 2 + qpb2.offset = offset - skip1 + qpb3 = _pb_from_query(query) + qpb3.start_cursor = self._END + qpb3.limit.value = 1 + EXPECTED1 = { + 'project': self._PROJECT, + 'query_pb': qpb1, + 'namespace': self._NAMESPACE, + 'transaction_id': None, + } + EXPECTED2 = { + 'project': self._PROJECT, + 'query_pb': qpb2, + 'namespace': self._NAMESPACE, + 'transaction_id': None, + } + EXPECTED3 = { + 'project': self._PROJECT, + 'query_pb': qpb3, + 'namespace': self._NAMESPACE, + 'transaction_id': None, + } + self.assertEqual(len(connection._called_with), 3) + self.assertEqual(connection._called_with[0], EXPECTED1) + self.assertEqual(connection._called_with[1], EXPECTED2) + self.assertEqual(connection._called_with[2], EXPECTED3) + + +class Test__pb_from_query(unittest.TestCase): + + def _callFUT(self, query): + from google.cloud.datastore.query import _pb_from_query + return _pb_from_query(query) + + def test_empty(self): + from google.cloud.datastore._generated import query_pb2 + + pb = self._callFUT(_Query()) + self.assertEqual(list(pb.projection), []) + self.assertEqual(list(pb.kind), []) + self.assertEqual(list(pb.order), []) + self.assertEqual(list(pb.distinct_on), []) + self.assertEqual(pb.filter.property_filter.property.name, '') + cfilter = pb.filter.composite_filter + self.assertEqual(cfilter.op, + query_pb2.CompositeFilter.OPERATOR_UNSPECIFIED) + self.assertEqual(list(cfilter.filters), []) + self.assertEqual(pb.start_cursor, b'') + self.assertEqual(pb.end_cursor, b'') + self.assertEqual(pb.limit.value, 0) + self.assertEqual(pb.offset, 0) + + def test_projection(self): + pb = self._callFUT(_Query(projection=['a', 'b', 'c'])) + self.assertEqual([item.property.name for item in pb.projection], + ['a', 'b', 'c']) + + def test_kind(self): + pb = self._callFUT(_Query(kind='KIND')) + self.assertEqual([item.name for item in pb.kind], ['KIND']) + + def test_ancestor(self): + from google.cloud.datastore.key import Key + from google.cloud.datastore._generated import query_pb2 + + ancestor = Key('Ancestor', 123, project='PROJECT') + pb = self._callFUT(_Query(ancestor=ancestor)) + cfilter = pb.filter.composite_filter + self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND) + self.assertEqual(len(cfilter.filters), 1) + pfilter = cfilter.filters[0].property_filter + self.assertEqual(pfilter.property.name, '__key__') + ancestor_pb = ancestor.to_protobuf() + self.assertEqual(pfilter.value.key_value, ancestor_pb) + + def test_filter(self): + from google.cloud.datastore._generated import query_pb2 + + query = _Query(filters=[('name', '=', u'John')]) + query.OPERATORS = { + '=': query_pb2.PropertyFilter.EQUAL, + } + pb = self._callFUT(query) + cfilter = pb.filter.composite_filter + self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND) + self.assertEqual(len(cfilter.filters), 1) + pfilter = cfilter.filters[0].property_filter + self.assertEqual(pfilter.property.name, 'name') + self.assertEqual(pfilter.value.string_value, u'John') + + def test_filter_key(self): + from google.cloud.datastore.key import Key + from google.cloud.datastore._generated import query_pb2 + + key = Key('Kind', 123, project='PROJECT') + query = _Query(filters=[('__key__', '=', key)]) + query.OPERATORS = { + '=': query_pb2.PropertyFilter.EQUAL, + } + pb = self._callFUT(query) + cfilter = pb.filter.composite_filter + self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND) + self.assertEqual(len(cfilter.filters), 1) + pfilter = cfilter.filters[0].property_filter + self.assertEqual(pfilter.property.name, '__key__') + key_pb = key.to_protobuf() + self.assertEqual(pfilter.value.key_value, key_pb) + + def test_order(self): + from google.cloud.datastore._generated import query_pb2 + + pb = self._callFUT(_Query(order=['a', '-b', 'c'])) + self.assertEqual([item.property.name for item in pb.order], + ['a', 'b', 'c']) + self.assertEqual([item.direction for item in pb.order], + [query_pb2.PropertyOrder.ASCENDING, + query_pb2.PropertyOrder.DESCENDING, + query_pb2.PropertyOrder.ASCENDING]) + + def test_distinct_on(self): + pb = self._callFUT(_Query(distinct_on=['a', 'b', 'c'])) + self.assertEqual([item.name for item in pb.distinct_on], + ['a', 'b', 'c']) + + +class _Query(object): + + def __init__(self, + client=object(), + kind=None, + project=None, + namespace=None, + ancestor=None, + filters=(), + projection=(), + order=(), + distinct_on=()): + self._client = client + self.kind = kind + self.project = project + self.namespace = namespace + self.ancestor = ancestor + self.filters = filters + self.projection = projection + self.order = order + self.distinct_on = distinct_on + + +class _Connection(object): + + _called_with = None + _cursor = b'\x00' + _skipped = 0 + + def __init__(self): + self._results = [] + self._called_with = [] + + def run_query(self, **kw): + self._called_with.append(kw) + result, self._results = self._results[0], self._results[1:] + return result + + +class _Client(object): + + def __init__(self, project, connection, namespace=None): + self.project = project + self.connection = connection + self.namespace = namespace + + @property + def current_transaction(self): + pass diff --git a/packages/google-cloud-datastore/unit_tests/test_transaction.py b/packages/google-cloud-datastore/unit_tests/test_transaction.py new file mode 100644 index 000000000000..8b28ee0cb277 --- /dev/null +++ b/packages/google-cloud-datastore/unit_tests/test_transaction.py @@ -0,0 +1,256 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestTransaction(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.datastore.transaction import Transaction + return Transaction + + def _makeOne(self, client, **kw): + return self._getTargetClass()(client, **kw) + + def test_ctor_defaults(self): + from google.cloud.datastore._generated import datastore_pb2 + + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + xact = self._makeOne(client) + self.assertEqual(xact.project, _PROJECT) + self.assertEqual(xact.connection, connection) + self.assertIsNone(xact.id) + self.assertEqual(xact._status, self._getTargetClass()._INITIAL) + self.assertIsInstance(xact._commit_request, + datastore_pb2.CommitRequest) + self.assertIs(xact.mutations, xact._commit_request.mutations) + self.assertEqual(len(xact._partial_key_entities), 0) + + def test_current(self): + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + xact1 = self._makeOne(client) + xact2 = self._makeOne(client) + self.assertIsNone(xact1.current()) + self.assertIsNone(xact2.current()) + with xact1: + self.assertIs(xact1.current(), xact1) + self.assertIs(xact2.current(), xact1) + with _NoCommitBatch(client): + self.assertIsNone(xact1.current()) + self.assertIsNone(xact2.current()) + with xact2: + self.assertIs(xact1.current(), xact2) + self.assertIs(xact2.current(), xact2) + with _NoCommitBatch(client): + self.assertIsNone(xact1.current()) + self.assertIsNone(xact2.current()) + self.assertIs(xact1.current(), xact1) + self.assertIs(xact2.current(), xact1) + self.assertIsNone(xact1.current()) + self.assertIsNone(xact2.current()) + + def test_begin(self): + _PROJECT = 'PROJECT' + connection = _Connection(234) + client = _Client(_PROJECT, connection) + xact = self._makeOne(client) + xact.begin() + self.assertEqual(xact.id, 234) + self.assertEqual(connection._begun, _PROJECT) + + def test_begin_tombstoned(self): + _PROJECT = 'PROJECT' + connection = _Connection(234) + client = _Client(_PROJECT, connection) + xact = self._makeOne(client) + xact.begin() + self.assertEqual(xact.id, 234) + self.assertEqual(connection._begun, _PROJECT) + + xact.rollback() + self.assertIsNone(xact.id) + + self.assertRaises(ValueError, xact.begin) + + def test_begin_w_begin_transaction_failure(self): + _PROJECT = 'PROJECT' + connection = _Connection(234) + client = _Client(_PROJECT, connection) + xact = self._makeOne(client) + + connection._side_effect = RuntimeError + with self.assertRaises(RuntimeError): + xact.begin() + + self.assertIsNone(xact.id) + self.assertEqual(connection._begun, _PROJECT) + + def test_rollback(self): + _PROJECT = 'PROJECT' + connection = _Connection(234) + client = _Client(_PROJECT, connection) + xact = self._makeOne(client) + xact.begin() + xact.rollback() + self.assertIsNone(xact.id) + self.assertEqual(connection._rolled_back, (_PROJECT, 234)) + + def test_commit_no_partial_keys(self): + _PROJECT = 'PROJECT' + connection = _Connection(234) + client = _Client(_PROJECT, connection) + xact = self._makeOne(client) + xact._commit_request = commit_request = object() + xact.begin() + xact.commit() + self.assertEqual(connection._committed, + (_PROJECT, commit_request, 234)) + self.assertIsNone(xact.id) + + def test_commit_w_partial_keys(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _ID = 123 + connection = _Connection(234) + connection._completed_keys = [_make_key(_KIND, _ID, _PROJECT)] + client = _Client(_PROJECT, connection) + xact = self._makeOne(client) + xact.begin() + entity = _Entity() + xact.put(entity) + xact._commit_request = commit_request = object() + xact.commit() + self.assertEqual(connection._committed, + (_PROJECT, commit_request, 234)) + self.assertIsNone(xact.id) + self.assertEqual(entity.key.path, [{'kind': _KIND, 'id': _ID}]) + + def test_context_manager_no_raise(self): + _PROJECT = 'PROJECT' + connection = _Connection(234) + client = _Client(_PROJECT, connection) + xact = self._makeOne(client) + xact._commit_request = commit_request = object() + with xact: + self.assertEqual(xact.id, 234) + self.assertEqual(connection._begun, _PROJECT) + self.assertEqual(connection._committed, + (_PROJECT, commit_request, 234)) + self.assertIsNone(xact.id) + + def test_context_manager_w_raise(self): + + class Foo(Exception): + pass + + _PROJECT = 'PROJECT' + connection = _Connection(234) + client = _Client(_PROJECT, connection) + xact = self._makeOne(client) + xact._mutation = object() + try: + with xact: + self.assertEqual(xact.id, 234) + self.assertEqual(connection._begun, _PROJECT) + raise Foo() + except Foo: + self.assertIsNone(xact.id) + self.assertEqual(connection._rolled_back, (_PROJECT, 234)) + self.assertIsNone(connection._committed) + self.assertIsNone(xact.id) + + +def _make_key(kind, id_, project): + from google.cloud.datastore._generated import entity_pb2 + + key = entity_pb2.Key() + key.partition_id.project_id = project + elem = key.path.add() + elem.kind = kind + elem.id = id_ + return key + + +class _Connection(object): + _marker = object() + _begun = None + _rolled_back = None + _committed = None + _side_effect = None + + def __init__(self, xact_id=123): + self._xact_id = xact_id + self._completed_keys = [] + self._index_updates = 0 + + def begin_transaction(self, project): + self._begun = project + if self._side_effect is None: + return self._xact_id + else: + raise self._side_effect + + def rollback(self, project, transaction_id): + self._rolled_back = project, transaction_id + + def commit(self, project, commit_request, transaction_id): + self._committed = (project, commit_request, transaction_id) + return self._index_updates, self._completed_keys + + +class _Entity(dict): + + def __init__(self): + super(_Entity, self).__init__() + from google.cloud.datastore.key import Key + self.key = Key('KIND', project='PROJECT') + + +class _Client(object): + + def __init__(self, project, connection, namespace=None): + self.project = project + self.connection = connection + self.namespace = namespace + self._batches = [] + + def _push_batch(self, batch): + self._batches.insert(0, batch) + + def _pop_batch(self): + return self._batches.pop(0) + + @property + def current_batch(self): + return self._batches and self._batches[0] or None + + +class _NoCommitBatch(object): + + def __init__(self, client): + from google.cloud.datastore.batch import Batch + self._client = client + self._batch = Batch(client) + + def __enter__(self): + self._client._push_batch(self._batch) + return self._batch + + def __exit__(self, *args): + self._client._pop_batch() From 88cdb949fc1c02cbf21fdae2825ad737a69129f7 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 26 Sep 2016 12:39:21 -0700 Subject: [PATCH 002/611] Making datastore subpackage into a proper package. - Adding README, setup.py, MANIFEST.in, .coveragerc and tox.ini - Adding google-cloud-datastore as a dependency to the umbrella package - Adding the datastore subdirectory into the list of packages for verifying the docs - Incorporating the datastore subdirectory into the umbrella coverage report - Adding the datastore only tox tests to the Travis config - Adding {toxinidir}/../core as a dependency for the datastore tox config --- packages/google-cloud-datastore/.coveragerc | 13 ++++ packages/google-cloud-datastore/MANIFEST.in | 4 ++ packages/google-cloud-datastore/README.rst | 67 ++++++++++++++++++++ packages/google-cloud-datastore/setup.py | 69 +++++++++++++++++++++ packages/google-cloud-datastore/tox.ini | 30 +++++++++ 5 files changed, 183 insertions(+) create mode 100644 packages/google-cloud-datastore/.coveragerc create mode 100644 packages/google-cloud-datastore/MANIFEST.in create mode 100644 packages/google-cloud-datastore/README.rst create mode 100644 packages/google-cloud-datastore/setup.py create mode 100644 packages/google-cloud-datastore/tox.ini diff --git a/packages/google-cloud-datastore/.coveragerc b/packages/google-cloud-datastore/.coveragerc new file mode 100644 index 000000000000..08f3fdea2433 --- /dev/null +++ b/packages/google-cloud-datastore/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +omit = + */_generated/*.py +fail_under = 100 +show_missing = True +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-datastore/MANIFEST.in b/packages/google-cloud-datastore/MANIFEST.in new file mode 100644 index 000000000000..cb3a2b9ef4fa --- /dev/null +++ b/packages/google-cloud-datastore/MANIFEST.in @@ -0,0 +1,4 @@ +include README.rst +graft google +graft unit_tests +global-exclude *.pyc diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst new file mode 100644 index 000000000000..b3145186ef59 --- /dev/null +++ b/packages/google-cloud-datastore/README.rst @@ -0,0 +1,67 @@ +Python Client for Google Cloud Datastore +======================================== + + Python idiomatic client for `Google Cloud Datastore`_ + +.. _Google Cloud Datastore: https://cloud.google.com/datastore/docs + +- `Homepage`_ +- `API Documentation`_ + +.. _Homepage: https://googlecloudplatform.github.io/google-cloud-python/ +.. _API Documentation: http://googlecloudplatform.github.io/google-cloud-python/ + +Quick Start +----------- + +:: + + $ pip install --upgrade google-cloud-datastore + +Authentication +-------------- + +With ``google-cloud-python`` we try to make authentication as painless as +possible. Check out the `Authentication section`_ in our documentation to +learn more. You may also find the `authentication document`_ shared by all +the ``google-cloud-*`` libraries to be helpful. + +.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication + +Using the API +------------- + +Google `Cloud Datastore`_ (`Datastore API docs`_) is a fully managed, +schemaless database for storing non-relational data. Cloud Datastore +automatically scales with your users and supports ACID transactions, high +availability of reads and writes, strong consistency for reads and ancestor +queries, and eventual consistency for all other queries. + +.. _Cloud Datastore: https://cloud.google.com/datastore/docs +.. _Datastore API docs: https://cloud.google.com/datastore/docs/ + +See the ``google-cloud-python`` API `datastore documentation`_ to learn how to +interact with the Cloud Datastore using this Client Library. + +.. _datastore documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore-client.html + +See the `official Google Cloud Datastore documentation`_ for more details on +how to activate Cloud Datastore for your project. + +.. _official Google Cloud Datastore documentation: https://cloud.google.com/datastore/docs/activate + +.. code:: python + + from google.cloud import datastore + # Create, populate and persist an entity + entity = datastore.Entity(key=datastore.Key('EntityKind')) + entity.update({ + 'foo': u'bar', + 'baz': 1337, + 'qux': False, + }) + # Then query for entities + query = datastore.Query(kind='EntityKind') + for result in query.fetch(): + print(result) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py new file mode 100644 index 000000000000..1067876e13b0 --- /dev/null +++ b/packages/google-cloud-datastore/setup.py @@ -0,0 +1,69 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from setuptools import find_packages +from setuptools import setup + + +PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) + +with open(os.path.join(PACKAGE_ROOT, 'README.rst')) as file_obj: + README = file_obj.read() + +# NOTE: This is duplicated throughout and we should try to +# consolidate. +SETUP_BASE = { + 'author': 'Google Cloud Platform', + 'author_email': 'jjg+google-cloud-python@google.com', + 'scripts': [], + 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', + 'license': 'Apache 2.0', + 'platforms': 'Posix; MacOS X; Windows', + 'include_package_data': True, + 'zip_safe': False, + 'classifiers': [ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Topic :: Internet', + ], +} + + +REQUIREMENTS = [ + 'google-cloud-core', + 'grpcio >= 1.0.0', +] + +setup( + name='google-cloud-datastore', + version='0.20.0dev', + description='Python Client for Google Cloud Datastore', + long_description=README, + namespace_packages=[ + 'google', + 'google.cloud', + ], + packages=find_packages(), + install_requires=REQUIREMENTS, + **SETUP_BASE +) diff --git a/packages/google-cloud-datastore/tox.ini b/packages/google-cloud-datastore/tox.ini new file mode 100644 index 000000000000..64f0d45463dd --- /dev/null +++ b/packages/google-cloud-datastore/tox.ini @@ -0,0 +1,30 @@ +[tox] +envlist = + py27,py34,py35,cover + +[testing] +deps = + {toxinidir}/../core + pytest +covercmd = + py.test --quiet \ + --cov=google.cloud.datastore \ + --cov=unit_tests \ + --cov-config {toxinidir}/.coveragerc \ + unit_tests + +[testenv] +commands = + py.test --quiet {posargs} unit_tests +deps = + {[testing]deps} + +[testenv:cover] +basepython = + python2.7 +commands = + {[testing]covercmd} +deps = + {[testenv]deps} + coverage + pytest-cov From 83513e0ea020f4882126c0eecc5a2f2263560d63 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 26 Sep 2016 10:20:57 -0700 Subject: [PATCH 003/611] Translating INVALID_ARGUMENT on datastore commit to standard exception. --- .../google/cloud/datastore/connection.py | 6 +++++- .../unit_tests/test_connection.py | 13 +++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/connection.py b/packages/google-cloud-datastore/google/cloud/datastore/connection.py index ef0eeb066d1c..a339d34f0c18 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/connection.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/connection.py @@ -23,6 +23,7 @@ from google.cloud import connection as connection_module from google.cloud.environment_vars import DISABLE_GRPC from google.cloud.environment_vars import GCD_HOST +from google.cloud.exceptions import BadRequest from google.cloud.exceptions import Conflict from google.cloud.exceptions import GrpcRendezvous from google.cloud.exceptions import make_exception @@ -313,8 +314,11 @@ def commit(self, project, request_pb): try: return self._stub.Commit(request_pb) except GrpcRendezvous as exc: - if exc.code() == StatusCode.ABORTED: + error_code = exc.code() + if error_code == StatusCode.ABORTED: raise Conflict(exc.details()) + if error_code == StatusCode.INVALID_ARGUMENT: + raise BadRequest(exc.details()) raise def rollback(self, project, request_pb): diff --git a/packages/google-cloud-datastore/unit_tests/test_connection.py b/packages/google-cloud-datastore/unit_tests/test_connection.py index f1030a61c948..6791a59cde76 100644 --- a/packages/google-cloud-datastore/unit_tests/test_connection.py +++ b/packages/google-cloud-datastore/unit_tests/test_connection.py @@ -252,6 +252,19 @@ def test_commit_failure_aborted(self): exc = GrpcRendezvous(exc_state, None, None, None) self._commit_failure_helper(exc, Conflict) + @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') + def test_commit_failure_invalid_argument(self): + from grpc import StatusCode + from grpc._channel import _RPCState + from google.cloud.exceptions import BadRequest + from google.cloud.exceptions import GrpcRendezvous + + details = 'Too long content.' + exc_state = _RPCState((), None, None, + StatusCode.INVALID_ARGUMENT, details) + exc = GrpcRendezvous(exc_state, None, None, None) + self._commit_failure_helper(exc, BadRequest) + @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') def test_commit_failure_cancelled(self): from grpc import StatusCode From cde2cbe0a128f3a8592aabaa82f00b254b36bbcf Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 27 Sep 2016 08:53:24 -0700 Subject: [PATCH 004/611] Preparing for a release of all packages. Towards #2441. - Updating umbrella README to point at all packages - Putting upper bounds on grpcio in dependencies - Putting lower bounds on all google-cloud-* packages listed as dependencies - Adding `setup.cfg` for universal wheels --- packages/google-cloud-datastore/setup.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 1067876e13b0..17052a272044 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -50,13 +50,13 @@ REQUIREMENTS = [ - 'google-cloud-core', - 'grpcio >= 1.0.0', + 'google-cloud-core >= 0.20.0', + 'grpcio >= 1.0.0, < 2.0dev', ] setup( name='google-cloud-datastore', - version='0.20.0dev', + version='0.20.0', description='Python Client for Google Cloud Datastore', long_description=README, namespace_packages=[ From b8161a792d9a0043c566b968d66928d20f2de891 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 27 Sep 2016 08:57:43 -0700 Subject: [PATCH 005/611] Adding setup.cfg to all packages. --- packages/google-cloud-datastore/setup.cfg | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 packages/google-cloud-datastore/setup.cfg diff --git a/packages/google-cloud-datastore/setup.cfg b/packages/google-cloud-datastore/setup.cfg new file mode 100644 index 000000000000..2a9acf13daa9 --- /dev/null +++ b/packages/google-cloud-datastore/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal = 1 From 1780cf2663788c024793d5be8a0bd8ae92a0401c Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 26 Sep 2016 14:13:40 -0400 Subject: [PATCH 006/611] Disable pylint's 'ungrouped-imports' error. We share the 'google' namespace with third-party pacakages. PEP 8 wants 'local' imports to be separated fro 'third-party' imports, which is more important than pylint's attempt to group them by name alone. --- .../google-cloud-datastore/google/cloud/datastore/connection.py | 2 -- .../google-cloud-datastore/google/cloud/datastore/helpers.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/connection.py b/packages/google-cloud-datastore/google/cloud/datastore/connection.py index a339d34f0c18..c2701c218bef 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/connection.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/connection.py @@ -28,7 +28,6 @@ from google.cloud.exceptions import GrpcRendezvous from google.cloud.exceptions import make_exception from google.cloud.datastore._generated import datastore_pb2 as _datastore_pb2 -# pylint: disable=ungrouped-imports try: from grpc import StatusCode from google.cloud.datastore._generated import datastore_grpc_pb2 @@ -38,7 +37,6 @@ StatusCode = None else: _HAVE_GRPC = True -# pylint: enable=ungrouped-imports DATASTORE_API_HOST = 'datastore.googleapis.com' diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index 6bf6bcb1cb92..51ae06ec32c7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -24,13 +24,11 @@ from google.type import latlng_pb2 import six -# pylint: disable=ungrouped-imports from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import _pb_timestamp_to_datetime from google.cloud.datastore._generated import entity_pb2 as _entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key -# pylint: enable=ungrouped-imports __all__ = ('entity_from_protobuf', 'key_from_protobuf') From fdf7da0d41fb510b9af62773a27b4171f3b19aee Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 29 Sep 2016 16:17:02 -0700 Subject: [PATCH 007/611] General clean-up after rename. - Removing "graft google" from MANIFEST for umbrella package. It isn't needed since the umbrella package has no source - Updating license year on copy-pasted namespace package __init__.py files. Done via: https://gist.github.com/dhermes/a0e88f891ffffc3ecea5c9bb2f13e4f5 - Removing unused HTML context from docs/conf.py - Setting GH_OWNER AND GH_PROJECT_NAME (which together make the REPO_SLUG) manually in the docs update scripts. This way the env. variables don't need to be set in the Travis UI / CLI. Also updating tox.ini to stop passing those variables through - Removing the root package from `verify_included_modules.py` since it no longer has any source - Updated a docstring reference to a moved class in the Bigtable system test - Removing redundant `GOOGLE_CLOUD_*` in `tox` system test `passenv` (already covered by `GOOGLE_*`) --- packages/google-cloud-datastore/google/cloud/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/google/cloud/__init__.py b/packages/google-cloud-datastore/google/cloud/__init__.py index 8ac7b74af136..b2b833373882 100644 --- a/packages/google-cloud-datastore/google/cloud/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2016 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From f022ff7c9efdbdea0da26bb9069125f761107320 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 3 Oct 2016 21:32:48 -0700 Subject: [PATCH 008/611] Updating package README's with more useful doc links. Also removing duplicate "Homepage" links (duplicate of "API Documentation" links). --- packages/google-cloud-datastore/README.rst | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index b3145186ef59..56ad3f3c69ce 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -5,11 +5,9 @@ Python Client for Google Cloud Datastore .. _Google Cloud Datastore: https://cloud.google.com/datastore/docs -- `Homepage`_ -- `API Documentation`_ +- `Documentation`_ -.. _Homepage: https://googlecloudplatform.github.io/google-cloud-python/ -.. _API Documentation: http://googlecloudplatform.github.io/google-cloud-python/ +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore-client.html Quick Start ----------- From daaf13d510483fac5851445fc7df85a07f8b23bc Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 4 Oct 2016 13:27:31 -0700 Subject: [PATCH 009/611] Catching gRPC error in datastore run_query and converting to our error. Fixes #2494. --- .../google/cloud/datastore/connection.py | 8 +++- .../unit_tests/test_connection.py | 43 ++++++++++++++++++- 2 files changed, 49 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/connection.py b/packages/google-cloud-datastore/google/cloud/datastore/connection.py index c2701c218bef..e84457a011ea 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/connection.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/connection.py @@ -276,7 +276,13 @@ def run_query(self, project, request_pb): :returns: The returned protobuf response object. """ request_pb.project_id = project - return self._stub.RunQuery(request_pb) + try: + return self._stub.RunQuery(request_pb) + except GrpcRendezvous as exc: + error_code = exc.code() + if error_code == StatusCode.INVALID_ARGUMENT: + raise BadRequest(exc.details()) + raise def begin_transaction(self, project, request_pb): """Perform a ``beginTransaction`` request. diff --git a/packages/google-cloud-datastore/unit_tests/test_connection.py b/packages/google-cloud-datastore/unit_tests/test_connection.py index 6791a59cde76..cbafc72ac3f0 100644 --- a/packages/google-cloud-datastore/unit_tests/test_connection.py +++ b/packages/google-cloud-datastore/unit_tests/test_connection.py @@ -200,6 +200,43 @@ def test_run_query(self): self.assertEqual(stub.method_calls, [(request_pb, 'RunQuery')]) + def _run_query_failure_helper(self, exc, err_class): + stub = _GRPCStub(side_effect=exc) + datastore_api = self._makeOne(stub=stub) + + request_pb = _RequestPB() + project = 'PROJECT' + with self.assertRaises(err_class): + datastore_api.run_query(project, request_pb) + + self.assertEqual(request_pb.project_id, project) + self.assertEqual(stub.method_calls, + [(request_pb, 'RunQuery')]) + + @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') + def test_run_query_invalid_argument(self): + from grpc import StatusCode + from grpc._channel import _RPCState + from google.cloud.exceptions import BadRequest + from google.cloud.exceptions import GrpcRendezvous + + details = ('Cannot have inequality filters on multiple ' + 'properties: [created, priority]') + exc_state = _RPCState((), None, None, + StatusCode.INVALID_ARGUMENT, details) + exc = GrpcRendezvous(exc_state, None, None, None) + self._run_query_failure_helper(exc, BadRequest) + + @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') + def test_run_query_cancelled(self): + from grpc import StatusCode + from grpc._channel import _RPCState + from google.cloud.exceptions import GrpcRendezvous + + exc_state = _RPCState((), None, None, StatusCode.CANCELLED, None) + exc = GrpcRendezvous(exc_state, None, None, None) + self._run_query_failure_helper(exc, GrpcRendezvous) + def test_begin_transaction(self): return_val = object() stub = _GRPCStub(return_val) @@ -1130,7 +1167,11 @@ def Lookup(self, request_pb): return self._method(request_pb, 'Lookup') def RunQuery(self, request_pb): - return self._method(request_pb, 'RunQuery') + result = self._method(request_pb, 'RunQuery') + if self.side_effect is Exception: + return result + else: + raise self.side_effect def BeginTransaction(self, request_pb): return self._method(request_pb, 'BeginTransaction') From 7f8dd270723edb86d695b2b16cbb5dc286768ae9 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 11 Oct 2016 19:56:21 -0700 Subject: [PATCH 010/611] Cut 0.20.1 datastore release. --- packages/google-cloud-datastore/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 17052a272044..d79db705950a 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -56,7 +56,7 @@ setup( name='google-cloud-datastore', - version='0.20.0', + version='0.20.1', description='Python Client for Google Cloud Datastore', long_description=README, namespace_packages=[ From fa50f6309c7607023d8fb6db5d713299deabcaf3 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 20 Oct 2016 15:34:03 -0700 Subject: [PATCH 011/611] Replace string with str in rtypes. Used the command: ag -l 'rtype: string' | xargs sed -i .bak 's/rtype: string/rtype: str/g' Based on this comment: https://github.com/GoogleCloudPlatform/google-cloud-python/pull/2485#discussion_r83267163 `str` is a type, `string` is a module. --- .../google/cloud/datastore/client.py | 2 +- .../google/cloud/datastore/connection.py | 2 +- .../google/cloud/datastore/key.py | 10 +++++----- .../google/cloud/datastore/query.py | 4 ++-- .../google/cloud/datastore/transaction.py | 2 +- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 5df32cc42970..9b05a0d362f2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -53,7 +53,7 @@ def _determine_default_project(project=None): :type project: string :param project: Optional. The project to use as default. - :rtype: string or ``NoneType`` + :rtype: str or ``NoneType`` :returns: Default project if it can be determined. """ if project is None: diff --git a/packages/google-cloud-datastore/google/cloud/datastore/connection.py b/packages/google-cloud-datastore/google/cloud/datastore/connection.py index e84457a011ea..836e0389c0e9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/connection.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/connection.py @@ -76,7 +76,7 @@ def _request(self, project, method, data): :param data: The data to send with the API call. Typically this is a serialized Protobuf string. - :rtype: string + :rtype: str :returns: The string response content from the API call. :raises: :class:`google.cloud.exceptions.GoogleCloudError` if the response code is not 200 OK. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index be6ca3875f16..aaf2f18a248f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -269,7 +269,7 @@ def is_partial(self): def namespace(self): """Namespace getter. - :rtype: string + :rtype: str :returns: The namespace of the current key. """ return self._namespace @@ -298,7 +298,7 @@ def flat_path(self): def kind(self): """Kind getter. Based on the last element of path. - :rtype: string + :rtype: str :returns: The kind of the current key. """ return self.path[-1]['kind'] @@ -316,7 +316,7 @@ def id(self): def name(self): """Name getter. Based on the last element of path. - :rtype: string + :rtype: str :returns: The (string) name of the key. """ return self.path[-1].get('name') @@ -335,7 +335,7 @@ def id_or_name(self): def project(self): """Project getter. - :rtype: string + :rtype: str :returns: The key's project. """ return self._project @@ -391,7 +391,7 @@ def _validate_project(project, parent): :type parent: :class:`google.cloud.datastore.key.Key` or ``NoneType`` :param parent: The parent of the key or ``None``. - :rtype: string + :rtype: str :returns: The ``project`` passed in, or implied from the environment. :raises: :class:`ValueError` if ``project`` is ``None`` and no project can be inferred from the parent. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 954a320116a5..8c1e510c03b0 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -109,7 +109,7 @@ def project(self): def namespace(self): """This query's namespace - :rtype: string or None + :rtype: str or None :returns: the namespace assigned to this query """ return self._namespace or self._client.namespace @@ -128,7 +128,7 @@ def namespace(self, value): def kind(self): """Get the Kind of the Query. - :rtype: string + :rtype: str :returns: The kind for the query. """ return self._kind diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index 700068dfe7cd..a053518597ef 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -100,7 +100,7 @@ def __init__(self, client): def id(self): """Getter for the transaction ID. - :rtype: string + :rtype: str :returns: The ID of the current transaction. """ return self._id From 87ce034fcc754514867670cfe4df9a3d35d9d368 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 20 Oct 2016 15:50:55 -0700 Subject: [PATCH 012/611] Replace types string with str. Uses command: ag -l 'type ([^:]+): string' | \ xargs gsed -r -i.bak -e 's/type ([^:]+): string/type \1: str/g' Note: [-r for gsed (GNU sed) is needed for group matching](http://superuser.com/a/336819/125262). --- .../google/cloud/datastore/client.py | 10 +-- .../google/cloud/datastore/connection.py | 64 +++++++++---------- .../google/cloud/datastore/helpers.py | 4 +- .../google/cloud/datastore/key.py | 4 +- .../google/cloud/datastore/query.py | 22 +++---- 5 files changed, 52 insertions(+), 52 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 9b05a0d362f2..c8e95e6013a5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -50,7 +50,7 @@ def _determine_default_project(project=None): * Google App Engine application ID * Google Compute Engine project ID (from metadata server) - :type project: string + :type project: str :param project: Optional. The project to use as default. :rtype: str or ``NoneType`` @@ -75,7 +75,7 @@ def _extended_lookup(connection, project, key_pbs, :type connection: :class:`google.cloud.datastore.connection.Connection` :param connection: The connection used to connect to datastore. - :type project: string + :type project: str :param project: The project to make the request for. :type key_pbs: list of :class:`._generated.entity_pb2.Key` @@ -95,7 +95,7 @@ def _extended_lookup(connection, project, key_pbs, consistency. If True, request ``EVENTUAL`` read consistency. - :type transaction_id: string + :type transaction_id: str :param transaction_id: If passed, make the request in the scope of the given transaction. Incompatible with ``eventual==True``. @@ -146,10 +146,10 @@ def _extended_lookup(connection, project, key_pbs, class Client(_BaseClient, _ClientProjectMixin): """Convenience wrapper for invoking APIs/factories w/ a project. - :type project: string + :type project: str :param project: (optional) The project to pass to proxied API methods. - :type namespace: string + :type namespace: str :param namespace: (optional) namespace to pass to proxied API methods. :type credentials: :class:`oauth2client.client.OAuth2Credentials` or diff --git a/packages/google-cloud-datastore/google/cloud/datastore/connection.py b/packages/google-cloud-datastore/google/cloud/datastore/connection.py index 836e0389c0e9..5ee84eadf874 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/connection.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/connection.py @@ -65,14 +65,14 @@ def __init__(self, connection): def _request(self, project, method, data): """Make a request over the Http transport to the Cloud Datastore API. - :type project: string + :type project: str :param project: The project to make the request for. - :type method: string + :type method: str :param method: The API call method name (ie, ``runQuery``, ``lookup``, etc) - :type data: string + :type data: str :param data: The data to send with the API call. Typically this is a serialized Protobuf string. @@ -100,11 +100,11 @@ def _request(self, project, method, data): def _rpc(self, project, method, request_pb, response_pb_cls): """Make a protobuf RPC request. - :type project: string + :type project: str :param project: The project to connect to. This is usually your project name in the cloud console. - :type method: string + :type method: str :param method: The name of the method to invoke. :type request_pb: :class:`google.protobuf.message.Message` instance @@ -125,7 +125,7 @@ def _rpc(self, project, method, request_pb, response_pb_cls): def lookup(self, project, request_pb): """Perform a ``lookup`` request. - :type project: string + :type project: str :param project: The project to connect to. This is usually your project name in the cloud console. @@ -141,7 +141,7 @@ def lookup(self, project, request_pb): def run_query(self, project, request_pb): """Perform a ``runQuery`` request. - :type project: string + :type project: str :param project: The project to connect to. This is usually your project name in the cloud console. @@ -157,7 +157,7 @@ def run_query(self, project, request_pb): def begin_transaction(self, project, request_pb): """Perform a ``beginTransaction`` request. - :type project: string + :type project: str :param project: The project to connect to. This is usually your project name in the cloud console. @@ -174,7 +174,7 @@ def begin_transaction(self, project, request_pb): def commit(self, project, request_pb): """Perform a ``commit`` request. - :type project: string + :type project: str :param project: The project to connect to. This is usually your project name in the cloud console. @@ -190,7 +190,7 @@ def commit(self, project, request_pb): def rollback(self, project, request_pb): """Perform a ``rollback`` request. - :type project: string + :type project: str :param project: The project to connect to. This is usually your project name in the cloud console. @@ -206,7 +206,7 @@ def rollback(self, project, request_pb): def allocate_ids(self, project, request_pb): """Perform an ``allocateIds`` request. - :type project: string + :type project: str :param project: The project to connect to. This is usually your project name in the cloud console. @@ -249,7 +249,7 @@ def __init__(self, connection, secure): def lookup(self, project, request_pb): """Perform a ``lookup`` request. - :type project: string + :type project: str :param project: The project to connect to. This is usually your project name in the cloud console. @@ -265,7 +265,7 @@ def lookup(self, project, request_pb): def run_query(self, project, request_pb): """Perform a ``runQuery`` request. - :type project: string + :type project: str :param project: The project to connect to. This is usually your project name in the cloud console. @@ -287,7 +287,7 @@ def run_query(self, project, request_pb): def begin_transaction(self, project, request_pb): """Perform a ``beginTransaction`` request. - :type project: string + :type project: str :param project: The project to connect to. This is usually your project name in the cloud console. @@ -304,7 +304,7 @@ def begin_transaction(self, project, request_pb): def commit(self, project, request_pb): """Perform a ``commit`` request. - :type project: string + :type project: str :param project: The project to connect to. This is usually your project name in the cloud console. @@ -328,7 +328,7 @@ def commit(self, project, request_pb): def rollback(self, project, request_pb): """Perform a ``rollback`` request. - :type project: string + :type project: str :param project: The project to connect to. This is usually your project name in the cloud console. @@ -344,7 +344,7 @@ def rollback(self, project, request_pb): def allocate_ids(self, project, request_pb): """Perform an ``allocateIds`` request. - :type project: string + :type project: str :param project: The project to connect to. This is usually your project name in the cloud console. @@ -407,18 +407,18 @@ def build_api_url(self, project, method, base_url=None, This method is used internally to come up with the URL to use when making RPCs to the Cloud Datastore API. - :type project: string + :type project: str :param project: The project to connect to. This is usually your project name in the cloud console. - :type method: string + :type method: str :param method: The API method to call (e.g. 'runQuery', 'lookup'). - :type base_url: string + :type base_url: str :param base_url: The base URL where the API lives. You shouldn't have to provide this. - :type api_version: string + :type api_version: str :param api_version: The version of the API to connect to. You shouldn't have to provide this. @@ -453,7 +453,7 @@ def lookup(self, project, key_pbs, >>> connection.lookup('project', [key.to_protobuf()]) [] - :type project: string + :type project: str :param project: The project to look up the keys in. :type key_pbs: list of @@ -465,7 +465,7 @@ def lookup(self, project, key_pbs, consistency. If True, request ``EVENTUAL`` read consistency. - :type transaction_id: string + :type transaction_id: str :param transaction_id: If passed, make the request in the scope of the given transaction. Incompatible with ``eventual==True``. @@ -504,13 +504,13 @@ def run_query(self, project, query_pb, namespace=None, Under the hood, the :class:`google.cloud.datastore.query.Query` class uses this method to fetch data. - :type project: string + :type project: str :param project: The project over which to run the query. :type query_pb: :class:`.datastore._generated.query_pb2.Query` :param query_pb: The Protobuf representing the query to run. - :type namespace: string + :type namespace: str :param namespace: The namespace over which to run the query. :type eventual: bool @@ -518,7 +518,7 @@ def run_query(self, project, query_pb, namespace=None, consistency. If True, request ``EVENTUAL`` read consistency. - :type transaction_id: string + :type transaction_id: str :param transaction_id: If passed, make the request in the scope of the given transaction. Incompatible with ``eventual==True``. @@ -548,7 +548,7 @@ def begin_transaction(self, project): Maps the ``DatastoreService.BeginTransaction`` protobuf RPC. - :type project: string + :type project: str :param project: The project to which the transaction applies. :rtype: bytes @@ -563,13 +563,13 @@ def commit(self, project, request, transaction_id): Maps the ``DatastoreService.Commit`` protobuf RPC. - :type project: string + :type project: str :param project: The project to which the transaction applies. :type request: :class:`._generated.datastore_pb2.CommitRequest` :param request: The protobuf with the mutations being committed. - :type transaction_id: string or None + :type transaction_id: str or None :param transaction_id: The transaction ID returned from :meth:`begin_transaction`. Non-transactional batches must pass ``None``. @@ -597,10 +597,10 @@ def rollback(self, project, transaction_id): Maps the ``DatastoreService.Rollback`` protobuf RPC. - :type project: string + :type project: str :param project: The project to which the transaction belongs. - :type transaction_id: string + :type transaction_id: str :param transaction_id: The transaction ID returned from :meth:`begin_transaction`. """ @@ -614,7 +614,7 @@ def allocate_ids(self, project, key_pbs): Maps the ``DatastoreService.AllocateIds`` protobuf RPC. - :type project: string + :type project: str :param project: The project to which the transaction belongs. :type key_pbs: list of diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index 51ae06ec32c7..ff105c5c2f9d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -80,7 +80,7 @@ def _new_value_pb(entity_pb, name): :type entity_pb: :class:`.datastore._generated.entity_pb2.Entity` :param entity_pb: An entity protobuf to add a new property to. - :type name: string + :type name: str :param name: The name of the new property. :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Value` @@ -162,7 +162,7 @@ def _set_pb_meaning_from_entity(entity, name, value, value_pb, :type entity: :class:`google.cloud.datastore.entity.Entity` :param entity: The entity to be turned into a protobuf. - :type name: string + :type name: str :param name: The name of the property. :type value: object diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index aaf2f18a248f..7541b65e40fb 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -207,7 +207,7 @@ def _clone(self): def completed_key(self, id_or_name): """Creates new key from existing partial key by adding final ID/name. - :type id_or_name: string or integer + :type id_or_name: str or integer :param id_or_name: ID or name to be added to the key. :rtype: :class:`google.cloud.datastore.key.Key` @@ -385,7 +385,7 @@ def _validate_project(project, parent): If ``project`` is unset, attempt to infer the project from the environment. - :type project: string + :type project: str :param project: A project. :type parent: :class:`google.cloud.datastore.key.Key` or ``NoneType`` diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 8c1e510c03b0..ab88a35b43f7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -31,14 +31,14 @@ class Query(object): :type client: :class:`google.cloud.datastore.client.Client` :param client: The client used to connect to Datastore. - :type kind: string + :type kind: str :param kind: The kind to query. - :type project: string + :type project: str :param project: The project associated with the query. If not passed, uses the client's value. - :type namespace: string or None + :type namespace: str or None :param namespace: The namespace to which to restrict results. If not passed, uses the client's value. @@ -118,7 +118,7 @@ def namespace(self): def namespace(self, value): """Update the query's namespace. - :type value: string + :type value: str """ if not isinstance(value, str): raise ValueError("Namespace must be a string") @@ -137,7 +137,7 @@ def kind(self): def kind(self, value): """Update the Kind of the Query. - :type value: string + :type value: str :param value: updated kind for the query. .. note:: @@ -203,10 +203,10 @@ def add_filter(self, property_name, operator, value): >>> query.add_filter('name', '=', 'James') >>> query.add_filter('age', '>', 50) - :type property_name: string + :type property_name: str :param property_name: A property name. - :type operator: string + :type operator: str :param operator: One of ``=``, ``<``, ``<=``, ``>``, ``>=``. :type value: :class:`int`, :class:`str`, :class:`bool`, @@ -242,7 +242,7 @@ def projection(self): def projection(self, projection): """Set the fields returned the query. - :type projection: string or sequence of strings + :type projection: str or sequence of strings :param projection: Each value is a string giving the name of a property to be included in the projection query. """ @@ -260,7 +260,7 @@ def key_filter(self, key, operator='='): :type key: :class:`google.cloud.datastore.key.Key` :param key: The key to filter on. - :type operator: string + :type operator: str :param operator: (Optional) One of ``=``, ``<``, ``<=``, ``>``, ``>=``. Defaults to ``=``. """ @@ -281,7 +281,7 @@ def order(self, value): Sort fields will be applied in the order specified. - :type value: string or sequence of strings + :type value: str or sequence of strings :param value: Each value is a string giving the name of the property on which to sort, optionally preceded by a hyphen (-) to specify descending order. @@ -304,7 +304,7 @@ def distinct_on(self): def distinct_on(self, value): """Set fields used to group query results. - :type value: string or sequence of strings + :type value: str or sequence of strings :param value: Each value is a string giving the name of a property to use to group results together. """ From 32b3d84ac9696e9ab8a051b8936482fe41b38c24 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 20 Oct 2016 16:04:23 -0700 Subject: [PATCH 013/611] Replace rtypes boolean with bool. Uses the command: ag -l 'rtype: boolean' | xargs sed -i .bak 's/rtype: boolean/rtype: bool/g' --- .../google-cloud-datastore/google/cloud/datastore/entity.py | 4 ++-- .../google-cloud-datastore/google/cloud/datastore/helpers.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/entity.py b/packages/google-cloud-datastore/google/cloud/datastore/entity.py index 6da069ccf432..c0756ce9f2f3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/entity.py @@ -91,7 +91,7 @@ def __eq__(self, other): Entities compare equal if their keys compare equal and their properties compare equal. - :rtype: boolean + :rtype: bool :returns: True if the entities compare equal, else False. """ if not isinstance(other, Entity): @@ -108,7 +108,7 @@ def __ne__(self, other): Entities compare equal if their keys compare equal and their properties compare equal. - :rtype: boolean + :rtype: bool :returns: False if the entities compare equal, else True. """ return not self.__eq__(other) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index ff105c5c2f9d..13723bdb0fa5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -452,7 +452,7 @@ def to_protobuf(self): def __eq__(self, other): """Compare two geo points for equality. - :rtype: boolean + :rtype: bool :returns: True if the points compare equal, else False. """ if not isinstance(other, GeoPoint): @@ -464,7 +464,7 @@ def __eq__(self, other): def __ne__(self, other): """Compare two geo points for inequality. - :rtype: boolean + :rtype: bool :returns: False if the points compare equal, else True. """ return not self.__eq__(other) From dadca4aab918553a581f1fd975340de81bf64605 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 20 Oct 2016 16:20:20 -0700 Subject: [PATCH 014/611] Replace integer with int in types. Uses the command: ag -l 'type ([^:]+): integer' | \ xargs gsed -r -i.bak -e 's/type ([^:]+): integer/type \1: int/g' --- .../google/cloud/datastore/query.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 954a320116a5..ecf62002ffb5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -327,10 +327,10 @@ def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, >>> list(query.fetch(1)) [] - :type limit: integer or None + :type limit: int or None :param limit: An optional limit passed through to the iterator. - :type offset: integer + :type offset: int :param offset: An optional offset passed through to the iterator. :type start_cursor: bytes @@ -366,10 +366,10 @@ class Iterator(object): :type client: :class:`google.cloud.datastore.client.Client` :param client: The client used to make a request. - :type limit: integer + :type limit: int :param limit: (Optional) Limit the number of results returned. - :type offset: integer + :type offset: int :param offset: (Optional) Offset used to begin a query. :type start_cursor: bytes From 0aeb01945e4bc7daf99240f04320d8a1e5444b04 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 20 Oct 2016 16:24:00 -0700 Subject: [PATCH 015/611] Replaces integer with int in rtypes. Uses the command: ag -l 'rtype: integer' | xargs sed -i .bak 's/rtype: integer/rtype: int/g' --- .../google-cloud-datastore/google/cloud/datastore/key.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index be6ca3875f16..1ae6f1d2c102 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -105,7 +105,7 @@ def __ne__(self, other): def __hash__(self): """Hash a keys for use in a dictionary lookp. - :rtype: integer + :rtype: int :returns: a hash of the key's state. """ return (hash(self.flat_path) + @@ -307,7 +307,7 @@ def kind(self): def id(self): """ID getter. Based on the last element of path. - :rtype: integer + :rtype: int :returns: The (integer) ID of the key. """ return self.path[-1].get('id') @@ -325,7 +325,7 @@ def name(self): def id_or_name(self): """Getter. Based on the last element of path. - :rtype: integer (if ``id``) or string (if ``name``) + :rtype: int (if ``id``) or string (if ``name``) :returns: The last element of the key's path if it is either an ``id`` or a ``name``. """ From f1bc75585a11b66559985bbe50e75e32b11c4557 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 20 Oct 2016 17:04:00 -0700 Subject: [PATCH 016/611] Replace :: with `.. code-block:: console`. Towards #2404. --- packages/google-cloud-datastore/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index 56ad3f3c69ce..f625e16d3d2e 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -12,7 +12,7 @@ Python Client for Google Cloud Datastore Quick Start ----------- -:: +.. code-block:: console $ pip install --upgrade google-cloud-datastore From b412f816a3f22ddede22822ce658bb822b37ec29 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 21 Oct 2016 09:51:22 -0700 Subject: [PATCH 017/611] Remove None from param types and add (Optional). This runs a script to remove None from the types for parameters, and added (Optional) to the description. Does not pass lint due to some too-long lines. I will clean those up manually. See: https://github.com/GoogleCloudPlatform/google-cloud-python/pull/2580#pullrequestreview-5178193 --- .../google/cloud/datastore/connection.py | 4 ++-- .../google/cloud/datastore/key.py | 4 ++-- .../google/cloud/datastore/query.py | 12 ++++++------ 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/connection.py b/packages/google-cloud-datastore/google/cloud/datastore/connection.py index 5ee84eadf874..74070b142355 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/connection.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/connection.py @@ -569,8 +569,8 @@ def commit(self, project, request, transaction_id): :type request: :class:`._generated.datastore_pb2.CommitRequest` :param request: The protobuf with the mutations being committed. - :type transaction_id: str or None - :param transaction_id: The transaction ID returned from + :type transaction_id: str + :param transaction_id: (Optional) The transaction ID returned from :meth:`begin_transaction`. Non-transactional batches must pass ``None``. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index 38fe7a9d8128..d814553434ca 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -388,8 +388,8 @@ def _validate_project(project, parent): :type project: str :param project: A project. - :type parent: :class:`google.cloud.datastore.key.Key` or ``NoneType`` - :param parent: The parent of the key or ``None``. + :type parent: :class:`google.cloud.datastore.key.Key` + :param parent: (Optional) The parent of the key or ``None``. :rtype: str :returns: The ``project`` passed in, or implied from the environment. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index ac69b4805844..4de07409f2f9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -38,12 +38,12 @@ class Query(object): :param project: The project associated with the query. If not passed, uses the client's value. - :type namespace: str or None - :param namespace: The namespace to which to restrict results. If not + :type namespace: str + :param namespace: (Optional) The namespace to which to restrict results. If not passed, uses the client's value. - :type ancestor: :class:`google.cloud.datastore.key.Key` or None - :param ancestor: key of the ancestor to which this query's results are + :type ancestor: :class:`google.cloud.datastore.key.Key` + :param ancestor: (Optional) key of the ancestor to which this query's results are restricted. :type filters: sequence of (property_name, operator, value) tuples @@ -327,8 +327,8 @@ def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, >>> list(query.fetch(1)) [] - :type limit: int or None - :param limit: An optional limit passed through to the iterator. + :type limit: int + :param limit: (Optional) An optional limit passed through to the iterator. :type offset: int :param offset: An optional offset passed through to the iterator. From e7ab357a16debef5ea0e3e300aa9d58aa07e6fe7 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 21 Oct 2016 11:21:42 -0700 Subject: [PATCH 018/611] Fix lint errors caused by addition of (Optional). Mostly, lines that were too long. --- .../google/cloud/datastore/query.py | 23 +++++++++++-------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 4de07409f2f9..8a7807496b18 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -35,16 +35,19 @@ class Query(object): :param kind: The kind to query. :type project: str - :param project: The project associated with the query. If not passed, - uses the client's value. + :param project: + (Optional) The project associated with the query. If not passed, uses + the client's value. :type namespace: str - :param namespace: (Optional) The namespace to which to restrict results. If not - passed, uses the client's value. + :param namespace: + (Optional) The namespace to which to restrict results. If not passed, + uses the client's value. :type ancestor: :class:`google.cloud.datastore.key.Key` - :param ancestor: (Optional) key of the ancestor to which this query's results are - restricted. + :param ancestor: + (Optional) key of the ancestor to which this query's results are + restricted. :type filters: sequence of (property_name, operator, value) tuples :param filters: property filters applied by this query. @@ -328,16 +331,16 @@ def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, [] :type limit: int - :param limit: (Optional) An optional limit passed through to the iterator. + :param limit: (Optional) limit passed through to the iterator. :type offset: int - :param offset: An optional offset passed through to the iterator. + :param offset: (Optional) offset passed through to the iterator. :type start_cursor: bytes - :param start_cursor: An optional cursor passed through to the iterator. + :param start_cursor: (Optional) cursor passed through to the iterator. :type end_cursor: bytes - :param end_cursor: An optional cursor passed through to the iterator. + :param end_cursor: (Optional) cursor passed through to the iterator. :type client: :class:`google.cloud.datastore.client.Client` :param client: client used to connect to datastore. From 7fc3ec9a0855d9a710705fde24c645e453c9e0cf Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 21 Oct 2016 23:20:07 -0700 Subject: [PATCH 019/611] Remapping (almost) all RPC status codes to our exceptions in datastore. Fixes #2497. --- .../google/cloud/datastore/connection.py | 94 +++++++++--- .../unit_tests/test_connection.py | 141 +++++++++--------- 2 files changed, 137 insertions(+), 98 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/connection.py b/packages/google-cloud-datastore/google/cloud/datastore/connection.py index 74070b142355..2d038eff28c2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/connection.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/connection.py @@ -23,19 +23,35 @@ from google.cloud import connection as connection_module from google.cloud.environment_vars import DISABLE_GRPC from google.cloud.environment_vars import GCD_HOST -from google.cloud.exceptions import BadRequest -from google.cloud.exceptions import Conflict -from google.cloud.exceptions import GrpcRendezvous -from google.cloud.exceptions import make_exception +from google.cloud import exceptions from google.cloud.datastore._generated import datastore_pb2 as _datastore_pb2 try: from grpc import StatusCode from google.cloud.datastore._generated import datastore_grpc_pb2 except ImportError: # pragma: NO COVER + _GRPC_ERROR_MAPPING = {} _HAVE_GRPC = False datastore_grpc_pb2 = None StatusCode = None else: + # NOTE: We don't include OK -> 200 or CANCELLED -> 499 + _GRPC_ERROR_MAPPING = { + StatusCode.UNKNOWN: exceptions.InternalServerError, + StatusCode.INVALID_ARGUMENT: exceptions.BadRequest, + StatusCode.DEADLINE_EXCEEDED: exceptions.GatewayTimeout, + StatusCode.NOT_FOUND: exceptions.NotFound, + StatusCode.ALREADY_EXISTS: exceptions.Conflict, + StatusCode.PERMISSION_DENIED: exceptions.Forbidden, + StatusCode.UNAUTHENTICATED: exceptions.Unauthorized, + StatusCode.RESOURCE_EXHAUSTED: exceptions.TooManyRequests, + StatusCode.FAILED_PRECONDITION: exceptions.PreconditionFailed, + StatusCode.ABORTED: exceptions.Conflict, + StatusCode.OUT_OF_RANGE: exceptions.BadRequest, + StatusCode.UNIMPLEMENTED: exceptions.MethodNotImplemented, + StatusCode.INTERNAL: exceptions.InternalServerError, + StatusCode.UNAVAILABLE: exceptions.ServiceUnavailable, + StatusCode.DATA_LOSS: exceptions.InternalServerError, + } _HAVE_GRPC = True @@ -93,7 +109,8 @@ def _request(self, project, method, data): status = headers['status'] if status != '200': error_status = status_pb2.Status.FromString(content) - raise make_exception(headers, error_status.message, use_json=False) + raise exceptions.make_exception( + headers, error_status.message, use_json=False) return content @@ -220,6 +237,44 @@ def allocate_ids(self, project, request_pb): _datastore_pb2.AllocateIdsResponse) +def _grpc_catch_rendezvous(to_call, *args, **kwargs): + """Call a method/function and re-map gRPC exceptions. + + .. _code.proto: https://github.com/googleapis/googleapis/blob/\ + master/google/rpc/code.proto + + Remaps gRPC exceptions to the classes defined in + :mod:`~google.cloud.exceptions` (according to the description + in `code.proto`_). + + :type to_call: callable + :param to_call: Callable that makes a request which may raise a + :class:`~google.cloud.exceptions.GrpcRendezvous`. + + :type args: tuple + :param args: Positional arugments to the callable. + + :type kwargs: dict + :param kwargs: Keyword arguments to the callable. + + :rtype: object + :returns: The value returned from ``to_call``. + :raises: :class:`~google.cloud.exceptions.GrpcRendezvous` if one + is encountered that can't be re-mapped, otherwise maps + to a :class:`~google.cloud.exceptions.GoogleCloudError` + subclass. + """ + try: + return to_call(*args, **kwargs) + except exceptions.GrpcRendezvous as exc: + error_code = exc.code() + error_class = _GRPC_ERROR_MAPPING.get(error_code) + if error_class is None: + raise + else: + raise error_class(exc.details()) + + class _DatastoreAPIOverGRPC(object): """Helper mapping datastore API methods. @@ -276,13 +331,8 @@ def run_query(self, project, request_pb): :returns: The returned protobuf response object. """ request_pb.project_id = project - try: - return self._stub.RunQuery(request_pb) - except GrpcRendezvous as exc: - error_code = exc.code() - if error_code == StatusCode.INVALID_ARGUMENT: - raise BadRequest(exc.details()) - raise + return _grpc_catch_rendezvous( + self._stub.RunQuery, request_pb) def begin_transaction(self, project, request_pb): """Perform a ``beginTransaction`` request. @@ -299,7 +349,8 @@ def begin_transaction(self, project, request_pb): :returns: The returned protobuf response object. """ request_pb.project_id = project - return self._stub.BeginTransaction(request_pb) + return _grpc_catch_rendezvous( + self._stub.BeginTransaction, request_pb) def commit(self, project, request_pb): """Perform a ``commit`` request. @@ -315,15 +366,8 @@ def commit(self, project, request_pb): :returns: The returned protobuf response object. """ request_pb.project_id = project - try: - return self._stub.Commit(request_pb) - except GrpcRendezvous as exc: - error_code = exc.code() - if error_code == StatusCode.ABORTED: - raise Conflict(exc.details()) - if error_code == StatusCode.INVALID_ARGUMENT: - raise BadRequest(exc.details()) - raise + return _grpc_catch_rendezvous( + self._stub.Commit, request_pb) def rollback(self, project, request_pb): """Perform a ``rollback`` request. @@ -339,7 +383,8 @@ def rollback(self, project, request_pb): :returns: The returned protobuf response object. """ request_pb.project_id = project - return self._stub.Rollback(request_pb) + return _grpc_catch_rendezvous( + self._stub.Rollback, request_pb) def allocate_ids(self, project, request_pb): """Perform an ``allocateIds`` request. @@ -355,7 +400,8 @@ def allocate_ids(self, project, request_pb): :returns: The returned protobuf response object. """ request_pb.project_id = project - return self._stub.AllocateIds(request_pb) + return _grpc_catch_rendezvous( + self._stub.AllocateIds, request_pb) class Connection(connection_module.Connection): diff --git a/packages/google-cloud-datastore/unit_tests/test_connection.py b/packages/google-cloud-datastore/unit_tests/test_connection.py index cbafc72ac3f0..c7577cedb568 100644 --- a/packages/google-cloud-datastore/unit_tests/test_connection.py +++ b/packages/google-cloud-datastore/unit_tests/test_connection.py @@ -106,6 +106,67 @@ def test__request_not_200(self): [{'method': METHOD, 'project': PROJECT}]) +@unittest.skipUnless(_HAVE_GRPC, 'No gRPC') +class Test__grpc_catch_rendezvous(unittest.TestCase): + + def _callFUT(self, to_call, *args, **kwargs): + from google.cloud.datastore.connection import _grpc_catch_rendezvous + return _grpc_catch_rendezvous(to_call, *args, **kwargs) + + @staticmethod + def _fake_method(exc, result=None): + if exc is None: + return result + else: + raise exc + + def test_success(self): + expected = object() + result = self._callFUT(self._fake_method, None, expected) + self.assertIs(result, expected) + + def test_failure_aborted(self): + from grpc import StatusCode + from grpc._channel import _RPCState + from google.cloud.exceptions import Conflict + from google.cloud.exceptions import GrpcRendezvous + + details = 'Bad things.' + exc_state = _RPCState((), None, None, StatusCode.ABORTED, details) + exc = GrpcRendezvous(exc_state, None, None, None) + with self.assertRaises(Conflict): + self._callFUT(self._fake_method, exc) + + def test_failure_invalid_argument(self): + from grpc import StatusCode + from grpc._channel import _RPCState + from google.cloud.exceptions import BadRequest + from google.cloud.exceptions import GrpcRendezvous + + details = ('Cannot have inequality filters on multiple ' + 'properties: [created, priority]') + exc_state = _RPCState((), None, None, + StatusCode.INVALID_ARGUMENT, details) + exc = GrpcRendezvous(exc_state, None, None, None) + with self.assertRaises(BadRequest): + self._callFUT(self._fake_method, exc) + + def test_failure_cancelled(self): + from grpc import StatusCode + from grpc._channel import _RPCState + from google.cloud.exceptions import GrpcRendezvous + + exc_state = _RPCState((), None, None, StatusCode.CANCELLED, None) + exc = GrpcRendezvous(exc_state, None, None, None) + with self.assertRaises(GrpcRendezvous): + self._callFUT(self._fake_method, exc) + + def test_commit_failure_non_grpc_err(self): + exc = RuntimeError('Not a gRPC error') + with self.assertRaises(RuntimeError): + self._callFUT(self._fake_method, exc) + + class Test_DatastoreAPIOverGRPC(unittest.TestCase): def _getTargetClass(self): @@ -227,16 +288,6 @@ def test_run_query_invalid_argument(self): exc = GrpcRendezvous(exc_state, None, None, None) self._run_query_failure_helper(exc, BadRequest) - @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') - def test_run_query_cancelled(self): - from grpc import StatusCode - from grpc._channel import _RPCState - from google.cloud.exceptions import GrpcRendezvous - - exc_state = _RPCState((), None, None, StatusCode.CANCELLED, None) - exc = GrpcRendezvous(exc_state, None, None, None) - self._run_query_failure_helper(exc, GrpcRendezvous) - def test_begin_transaction(self): return_val = object() stub = _GRPCStub(return_val) @@ -264,59 +315,6 @@ def test_commit_success(self): self.assertEqual(stub.method_calls, [(request_pb, 'Commit')]) - def _commit_failure_helper(self, exc, err_class): - stub = _GRPCStub(side_effect=exc) - datastore_api = self._makeOne(stub=stub) - - request_pb = _RequestPB() - project = 'PROJECT' - with self.assertRaises(err_class): - datastore_api.commit(project, request_pb) - - self.assertEqual(request_pb.project_id, project) - self.assertEqual(stub.method_calls, - [(request_pb, 'Commit')]) - - @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') - def test_commit_failure_aborted(self): - from grpc import StatusCode - from grpc._channel import _RPCState - from google.cloud.exceptions import Conflict - from google.cloud.exceptions import GrpcRendezvous - - details = 'Bad things.' - exc_state = _RPCState((), None, None, StatusCode.ABORTED, details) - exc = GrpcRendezvous(exc_state, None, None, None) - self._commit_failure_helper(exc, Conflict) - - @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') - def test_commit_failure_invalid_argument(self): - from grpc import StatusCode - from grpc._channel import _RPCState - from google.cloud.exceptions import BadRequest - from google.cloud.exceptions import GrpcRendezvous - - details = 'Too long content.' - exc_state = _RPCState((), None, None, - StatusCode.INVALID_ARGUMENT, details) - exc = GrpcRendezvous(exc_state, None, None, None) - self._commit_failure_helper(exc, BadRequest) - - @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') - def test_commit_failure_cancelled(self): - from grpc import StatusCode - from grpc._channel import _RPCState - from google.cloud.exceptions import GrpcRendezvous - - exc_state = _RPCState((), None, None, StatusCode.CANCELLED, None) - exc = GrpcRendezvous(exc_state, None, None, None) - self._commit_failure_helper(exc, GrpcRendezvous) - - @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') - def test_commit_failure_non_grpc_err(self): - exc = RuntimeError('Not a gRPC error') - self._commit_failure_helper(exc, RuntimeError) - def test_rollback(self): return_val = object() stub = _GRPCStub(return_val) @@ -1161,27 +1159,22 @@ def __init__(self, return_val=None, side_effect=Exception): def _method(self, request_pb, name): self.method_calls.append((request_pb, name)) - return self.return_val + if self.side_effect is Exception: + return self.return_val + else: + raise self.side_effect def Lookup(self, request_pb): return self._method(request_pb, 'Lookup') def RunQuery(self, request_pb): - result = self._method(request_pb, 'RunQuery') - if self.side_effect is Exception: - return result - else: - raise self.side_effect + return self._method(request_pb, 'RunQuery') def BeginTransaction(self, request_pb): return self._method(request_pb, 'BeginTransaction') def Commit(self, request_pb): - result = self._method(request_pb, 'Commit') - if self.side_effect is Exception: - return result - else: - raise self.side_effect + return self._method(request_pb, 'Commit') def Rollback(self, request_pb): return self._method(request_pb, 'Rollback') From 01525af8a0401a97aa723664c89bccb2497abf65 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 24 Oct 2016 09:45:12 -0700 Subject: [PATCH 020/611] Switching _grpc_catch_rendezvous to a context manager. --- .../google/cloud/datastore/connection.py | 45 +++++++------------ .../unit_tests/test_connection.py | 19 +++++--- 2 files changed, 27 insertions(+), 37 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/connection.py b/packages/google-cloud-datastore/google/cloud/datastore/connection.py index 2d038eff28c2..aac5c85e0a88 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/connection.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/connection.py @@ -14,6 +14,7 @@ """Connections to Google Cloud Datastore API servers.""" +import contextlib import os from google.rpc import status_pb2 @@ -237,8 +238,9 @@ def allocate_ids(self, project, request_pb): _datastore_pb2.AllocateIdsResponse) -def _grpc_catch_rendezvous(to_call, *args, **kwargs): - """Call a method/function and re-map gRPC exceptions. +@contextlib.contextmanager +def _grpc_catch_rendezvous(): + """Re-map gRPC exceptions that happen in context. .. _code.proto: https://github.com/googleapis/googleapis/blob/\ master/google/rpc/code.proto @@ -246,26 +248,9 @@ def _grpc_catch_rendezvous(to_call, *args, **kwargs): Remaps gRPC exceptions to the classes defined in :mod:`~google.cloud.exceptions` (according to the description in `code.proto`_). - - :type to_call: callable - :param to_call: Callable that makes a request which may raise a - :class:`~google.cloud.exceptions.GrpcRendezvous`. - - :type args: tuple - :param args: Positional arugments to the callable. - - :type kwargs: dict - :param kwargs: Keyword arguments to the callable. - - :rtype: object - :returns: The value returned from ``to_call``. - :raises: :class:`~google.cloud.exceptions.GrpcRendezvous` if one - is encountered that can't be re-mapped, otherwise maps - to a :class:`~google.cloud.exceptions.GoogleCloudError` - subclass. """ try: - return to_call(*args, **kwargs) + yield except exceptions.GrpcRendezvous as exc: error_code = exc.code() error_class = _GRPC_ERROR_MAPPING.get(error_code) @@ -331,8 +316,8 @@ def run_query(self, project, request_pb): :returns: The returned protobuf response object. """ request_pb.project_id = project - return _grpc_catch_rendezvous( - self._stub.RunQuery, request_pb) + with _grpc_catch_rendezvous(): + return self._stub.RunQuery(request_pb) def begin_transaction(self, project, request_pb): """Perform a ``beginTransaction`` request. @@ -349,8 +334,8 @@ def begin_transaction(self, project, request_pb): :returns: The returned protobuf response object. """ request_pb.project_id = project - return _grpc_catch_rendezvous( - self._stub.BeginTransaction, request_pb) + with _grpc_catch_rendezvous(): + return self._stub.BeginTransaction(request_pb) def commit(self, project, request_pb): """Perform a ``commit`` request. @@ -366,8 +351,8 @@ def commit(self, project, request_pb): :returns: The returned protobuf response object. """ request_pb.project_id = project - return _grpc_catch_rendezvous( - self._stub.Commit, request_pb) + with _grpc_catch_rendezvous(): + return self._stub.Commit(request_pb) def rollback(self, project, request_pb): """Perform a ``rollback`` request. @@ -383,8 +368,8 @@ def rollback(self, project, request_pb): :returns: The returned protobuf response object. """ request_pb.project_id = project - return _grpc_catch_rendezvous( - self._stub.Rollback, request_pb) + with _grpc_catch_rendezvous(): + return self._stub.Rollback(request_pb) def allocate_ids(self, project, request_pb): """Perform an ``allocateIds`` request. @@ -400,8 +385,8 @@ def allocate_ids(self, project, request_pb): :returns: The returned protobuf response object. """ request_pb.project_id = project - return _grpc_catch_rendezvous( - self._stub.AllocateIds, request_pb) + with _grpc_catch_rendezvous(): + return self._stub.AllocateIds(request_pb) class Connection(connection_module.Connection): diff --git a/packages/google-cloud-datastore/unit_tests/test_connection.py b/packages/google-cloud-datastore/unit_tests/test_connection.py index c7577cedb568..973a3241506e 100644 --- a/packages/google-cloud-datastore/unit_tests/test_connection.py +++ b/packages/google-cloud-datastore/unit_tests/test_connection.py @@ -109,9 +109,9 @@ def test__request_not_200(self): @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') class Test__grpc_catch_rendezvous(unittest.TestCase): - def _callFUT(self, to_call, *args, **kwargs): + def _callFUT(self): from google.cloud.datastore.connection import _grpc_catch_rendezvous - return _grpc_catch_rendezvous(to_call, *args, **kwargs) + return _grpc_catch_rendezvous() @staticmethod def _fake_method(exc, result=None): @@ -122,7 +122,8 @@ def _fake_method(exc, result=None): def test_success(self): expected = object() - result = self._callFUT(self._fake_method, None, expected) + with self._callFUT(): + result = self._fake_method(None, expected) self.assertIs(result, expected) def test_failure_aborted(self): @@ -135,7 +136,8 @@ def test_failure_aborted(self): exc_state = _RPCState((), None, None, StatusCode.ABORTED, details) exc = GrpcRendezvous(exc_state, None, None, None) with self.assertRaises(Conflict): - self._callFUT(self._fake_method, exc) + with self._callFUT(): + self._fake_method(exc) def test_failure_invalid_argument(self): from grpc import StatusCode @@ -149,7 +151,8 @@ def test_failure_invalid_argument(self): StatusCode.INVALID_ARGUMENT, details) exc = GrpcRendezvous(exc_state, None, None, None) with self.assertRaises(BadRequest): - self._callFUT(self._fake_method, exc) + with self._callFUT(): + self._fake_method(exc) def test_failure_cancelled(self): from grpc import StatusCode @@ -159,12 +162,14 @@ def test_failure_cancelled(self): exc_state = _RPCState((), None, None, StatusCode.CANCELLED, None) exc = GrpcRendezvous(exc_state, None, None, None) with self.assertRaises(GrpcRendezvous): - self._callFUT(self._fake_method, exc) + with self._callFUT(): + self._fake_method(exc) def test_commit_failure_non_grpc_err(self): exc = RuntimeError('Not a gRPC error') with self.assertRaises(RuntimeError): - self._callFUT(self._fake_method, exc) + with self._callFUT(): + self._fake_method(exc) class Test_DatastoreAPIOverGRPC(unittest.TestCase): From ca13a6422ecd8cdad4fb3c2a10aeba432645d1dd Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 24 Oct 2016 21:10:46 -0700 Subject: [PATCH 021/611] Adding _grpc_catch_rendezvous to _DatastoreAPIOverGRPC.lookup. Missed in #2590. --- .../google/cloud/datastore/connection.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/connection.py b/packages/google-cloud-datastore/google/cloud/datastore/connection.py index aac5c85e0a88..361846d99d46 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/connection.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/connection.py @@ -300,7 +300,8 @@ def lookup(self, project, request_pb): :returns: The returned protobuf response object. """ request_pb.project_id = project - return self._stub.Lookup(request_pb) + with _grpc_catch_rendezvous(): + return self._stub.Lookup(request_pb) def run_query(self, project, request_pb): """Perform a ``runQuery`` request. From 3a2510ddc7ad358e6b5d660d91532016ea2666b5 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 31 Oct 2016 17:01:13 -0700 Subject: [PATCH 022/611] Removing explicit doctest blocks from Sphinx docs. This way we can gradually turn them on with **doctest** and make sure they work piece by piece. Also converted some implicit code blocks (`::`) and some implicit doctest blocks (`:` followed by `>>>`) into explicit code blocks. --- .../google/cloud/datastore/client.py | 36 +++++++++++-------- .../google/cloud/datastore/connection.py | 18 ++++++---- .../google/cloud/datastore/entity.py | 21 +++++++---- .../google/cloud/datastore/key.py | 6 ++++ .../google/cloud/datastore/transaction.py | 31 +++++++++++----- 5 files changed, 74 insertions(+), 38 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index c8e95e6013a5..b97d45437682 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -447,29 +447,35 @@ def query(self, **kwargs): Passes our ``project``. - Using query to search a datastore:: + Using query to search a datastore: - >>> from google.cloud import datastore - >>> client = datastore.Client() - >>> query = client.query(kind='MyKind') - >>> query.add_filter('property', '=', 'val') + .. code-block:: python + + >>> from google.cloud import datastore + >>> client = datastore.Client() + >>> query = client.query(kind='MyKind') + >>> query.add_filter('property', '=', 'val') Using the query iterator's :meth:`~google.cloud.datastore.query.Iterator.next_page` method: - >>> query_iter = query.fetch() - >>> entities, more_results, cursor = query_iter.next_page() - >>> entities - [] - >>> more_results - - >>> cursor - + .. code-block:: python + + >>> query_iter = query.fetch() + >>> entities, more_results, cursor = query_iter.next_page() + >>> entities + [] + >>> more_results + + >>> cursor + Under the hood this is doing: - >>> connection.run_query('project', query.to_protobuf()) - [], cursor, more_results, skipped_results + .. code-block:: python + + >>> connection.run_query('project', query.to_protobuf()) + [], cursor, more_results, skipped_results :type kwargs: dict :param kwargs: Parameters for initializing and instance of diff --git a/packages/google-cloud-datastore/google/cloud/datastore/connection.py b/packages/google-cloud-datastore/google/cloud/datastore/connection.py index 361846d99d46..67a542d85405 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/connection.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/connection.py @@ -474,16 +474,20 @@ def lookup(self, project, key_pbs, as output). It is used under the hood in :meth:`Client.get() <.datastore.client.Client.get>`: - >>> from google.cloud import datastore - >>> client = datastore.Client(project='project') - >>> key = client.key('MyKind', 1234) - >>> client.get(key) - [] + .. code-block:: python + + >>> from google.cloud import datastore + >>> client = datastore.Client(project='project') + >>> key = client.key('MyKind', 1234) + >>> client.get(key) + [] Using a :class:`Connection` directly: - >>> connection.lookup('project', [key.to_protobuf()]) - [] + .. code-block:: python + + >>> connection.lookup('project', [key.to_protobuf()]) + [] :type project: str :param project: The project to look up the keys in. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/entity.py b/packages/google-cloud-datastore/google/cloud/datastore/entity.py index c0756ce9f2f3..02ef0fd3ac73 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/entity.py @@ -37,7 +37,10 @@ class Entity(dict): This means you could take an existing entity and change the key to duplicate the object. - Use :func:`google.cloud.datastore.get` to retrieve an existing entity. + Use :meth:`~google.cloud.datastore.client.Client.get` to retrieve an + existing entity: + + .. code-block:: python >>> from google.cloud import datastore >>> client = datastore.Client() @@ -47,16 +50,20 @@ class Entity(dict): You can the set values on the entity just like you would on any other dictionary. - >>> entity['age'] = 20 - >>> entity['name'] = 'JJ' - >>> entity - + .. code-block:: python + + >>> entity['age'] = 20 + >>> entity['name'] = 'JJ' + >>> entity + And you can convert an entity to a regular Python dictionary with the ``dict`` builtin: - >>> dict(entity) - {'age': 20, 'name': 'JJ'} + .. code-block:: python + + >>> dict(entity) + {'age': 20, 'name': 'JJ'} .. note:: diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index d814553434ca..72a50340ec84 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -25,6 +25,8 @@ class Key(object): To create a basic key: + .. code-block:: python + >>> Key('EntityKind', 1234) >>> Key('EntityKind', 'foo') @@ -32,6 +34,8 @@ class Key(object): To create a key with a parent: + .. code-block:: python + >>> Key('Parent', 'foo', 'Child', 1234) >>> Key('Child', 1234, parent=parent_key) @@ -39,6 +43,8 @@ class Key(object): To create a partial key: + .. code-block:: python + >>> Key('Parent', 'foo', 'Child') diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index a053518597ef..ef61d64a61d6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -25,22 +25,27 @@ class Transaction(Batch): For example, the following snippet of code will put the two ``save`` operations (either ``insert`` or ``upsert``) into the same - mutation, and execute those within a transaction:: + mutation, and execute those within a transaction: + + .. code-block:: python - >>> from google.cloud import datastore >>> client = datastore.Client() >>> with client.transaction(): ... client.put_multi([entity1, entity2]) - Because it derives from :class:`Batch <.datastore.batch.Batch>`, - :class:`Transaction` also provides :meth:`put` and :meth:`delete` methods:: + Because it derives from :class:`~google.cloud.datastore.batch.Batch`, + :class:`Transaction` also provides :meth:`put` and :meth:`delete` methods: + + .. code-block:: python >>> with client.transaction() as xact: ... xact.put(entity1) ... xact.delete(entity2.key) By default, the transaction is rolled back if the transaction block - exits with an error:: + exits with an error: + + .. code-block:: python >>> with client.transaction(): ... do_some_work() @@ -49,9 +54,13 @@ class Transaction(Batch): If the transaction block exists without an exception, it will commit by default. - .. warning:: Inside a transaction, automatically assigned IDs for + .. warning:: + + Inside a transaction, automatically assigned IDs for entities will not be available at save time! That means, if you - try:: + try: + + .. code-block:: python >>> with client.transaction(): ... entity = datastore.Entity(key=client.key('Thing')) @@ -61,7 +70,9 @@ class Transaction(Batch): committed. Once you exit the transaction (or call :meth:`commit`), the - automatically generated ID will be assigned to the entity:: + automatically generated ID will be assigned to the entity: + + .. code-block:: python >>> with client.transaction(): ... entity = datastore.Entity(key=client.key('Thing')) @@ -73,7 +84,9 @@ class Transaction(Batch): False If you don't want to use the context manager you can initialize a - transaction manually:: + transaction manually: + + .. code-block:: python >>> transaction = client.transaction() >>> transaction.begin() From 2a68d5a035f4152b023ea3e3e33b4cd9b62e086a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 31 Oct 2016 16:48:37 -0700 Subject: [PATCH 023/611] Add datastore.__init__ to docs. This in turn required a fix in verify_included_modules to turn `__init__.py` files into the correct module / package name. --- .../google/cloud/datastore/__init__.py | 28 ++++++++++--------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py index 452597ffe7a1..9a97e81759e2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py @@ -14,38 +14,40 @@ """Shortcut methods for getting set up with Google Cloud Datastore. -You'll typically use these to get started with the API:: +You'll typically use these to get started with the API: - >>> from google.cloud import datastore - >>> - >>> client = datastore.Client() - >>> key = client.key('EntityKind', 1234) - >>> entity = datastore.Entity(key) - >>> query = client.query(kind='EntityKind') +.. code-block:: python + + from google.cloud import datastore + + client = datastore.Client() + key = client.key('EntityKind', 1234) + entity = datastore.Entity(key) + query = client.query(kind='EntityKind') The main concepts with this API are: -- :class:`google.cloud.datastore.connection.Connection` +- :class:`~google.cloud.datastore.connection.Connection` which represents a connection between your machine and the Cloud Datastore API. -- :class:`google.cloud.datastore.client.Client` +- :class:`~google.cloud.datastore.client.Client` which represents a project (string) and namespace (string) bundled with a connection and has convenience methods for constructing objects with that project / namespace. -- :class:`google.cloud.datastore.entity.Entity` +- :class:`~google.cloud.datastore.entity.Entity` which represents a single entity in the datastore (akin to a row in relational database world). -- :class:`google.cloud.datastore.key.Key` +- :class:`~google.cloud.datastore.key.Key` which represents a pointer to a particular entity in the datastore (akin to a unique identifier in relational database world). -- :class:`google.cloud.datastore.query.Query` +- :class:`~google.cloud.datastore.query.Query` which represents a lookup or search over the rows in the datastore. -- :class:`google.cloud.datastore.transaction.Transaction` +- :class:`~google.cloud.datastore.transaction.Transaction` which represents an all-or-none transaction and enables consistency when race conditions may occur. """ From d140cc636454048643d093449bdc24a14dd0ff51 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Nov 2016 10:12:12 -0700 Subject: [PATCH 024/611] Adding PyPI badges to package READMEs. --- packages/google-cloud-datastore/README.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index f625e16d3d2e..d913abc7821f 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -5,6 +5,8 @@ Python Client for Google Cloud Datastore .. _Google Cloud Datastore: https://cloud.google.com/datastore/docs +|pypi| |versions| + - `Documentation`_ .. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore-client.html @@ -63,3 +65,8 @@ how to activate Cloud Datastore for your project. query = datastore.Query(kind='EntityKind') for result in query.fetch(): print(result) + +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-datastore.svg + :target: https://pypi.python.org/pypi/google-cloud-datastore +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-datastore.svg + :target: https://pypi.python.org/pypi/google-cloud-datastore From ba1ab4d2b3d73ff403cb7dc77a20d39a76db18fd Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Nov 2016 12:49:02 -0700 Subject: [PATCH 025/611] Renaming connection module as _http in 5 packages. The packages are BigQuery, Datastore, Logging, Pub/Sub and Storage. The rename is in advance of a larger re-factor. But so long as the connections are not public, the re-factor can happen without user-facing implications. --- .../google/cloud/datastore/{connection.py => _http.py} | 0 .../unit_tests/{test_connection.py => test__http.py} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename packages/google-cloud-datastore/google/cloud/datastore/{connection.py => _http.py} (100%) rename packages/google-cloud-datastore/unit_tests/{test_connection.py => test__http.py} (100%) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/connection.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py similarity index 100% rename from packages/google-cloud-datastore/google/cloud/datastore/connection.py rename to packages/google-cloud-datastore/google/cloud/datastore/_http.py diff --git a/packages/google-cloud-datastore/unit_tests/test_connection.py b/packages/google-cloud-datastore/unit_tests/test__http.py similarity index 100% rename from packages/google-cloud-datastore/unit_tests/test_connection.py rename to packages/google-cloud-datastore/unit_tests/test__http.py From 16a1f483ddb6c31f39db12a3bea2b641d9bfd70f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Nov 2016 13:22:29 -0700 Subject: [PATCH 026/611] Updating imports to reflect connection->_http module rename. --- .../google/cloud/datastore/__init__.py | 4 --- .../google/cloud/datastore/client.py | 2 +- .../unit_tests/test__http.py | 28 +++++++++---------- 3 files changed, 15 insertions(+), 19 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py index 9a97e81759e2..a178555786ba 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py @@ -54,12 +54,8 @@ from google.cloud.datastore.batch import Batch -from google.cloud.datastore.connection import Connection from google.cloud.datastore.client import Client from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key from google.cloud.datastore.query import Query from google.cloud.datastore.transaction import Transaction - - -SCOPE = Connection.SCOPE diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index b97d45437682..807a25774a4c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -20,8 +20,8 @@ _determine_default_project as _base_default_project) from google.cloud.client import _ClientProjectMixin from google.cloud.client import Client as _BaseClient +from google.cloud.datastore._http import Connection from google.cloud.datastore import helpers -from google.cloud.datastore.connection import Connection from google.cloud.datastore.batch import Batch from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 973a3241506e..c7f35c3b4772 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -14,13 +14,13 @@ import unittest -from google.cloud.datastore.connection import _HAVE_GRPC +from google.cloud.datastore._http import _HAVE_GRPC class Test_DatastoreAPIOverHttp(unittest.TestCase): def _getTargetClass(self): - from google.cloud.datastore.connection import _DatastoreAPIOverHttp + from google.cloud.datastore._http import _DatastoreAPIOverHttp return _DatastoreAPIOverHttp def _makeOne(self, *args, **kw): @@ -110,7 +110,7 @@ def test__request_not_200(self): class Test__grpc_catch_rendezvous(unittest.TestCase): def _callFUT(self): - from google.cloud.datastore.connection import _grpc_catch_rendezvous + from google.cloud.datastore._http import _grpc_catch_rendezvous return _grpc_catch_rendezvous() @staticmethod @@ -175,12 +175,12 @@ def test_commit_failure_non_grpc_err(self): class Test_DatastoreAPIOverGRPC(unittest.TestCase): def _getTargetClass(self): - from google.cloud.datastore.connection import _DatastoreAPIOverGRPC + from google.cloud.datastore._http import _DatastoreAPIOverGRPC return _DatastoreAPIOverGRPC def _makeOne(self, stub, connection=None, secure=True, mock_args=None): from google.cloud._testing import _Monkey - from google.cloud.datastore import connection as MUT + from google.cloud.datastore import _http as MUT if connection is None: connection = _Connection(None) @@ -202,7 +202,7 @@ def mock_make_stub(*args): return self._getTargetClass()(connection, secure) def test_constructor(self): - from google.cloud.datastore import connection as MUT + from google.cloud.datastore import _http as MUT conn = _Connection(None) conn.credentials = object() @@ -222,7 +222,7 @@ def test_constructor(self): )]) def test_constructor_insecure(self): - from google.cloud.datastore import connection as MUT + from google.cloud.datastore import _http as MUT conn = _Connection(None) conn.credentials = object() @@ -351,7 +351,7 @@ def test_allocate_ids(self): class TestConnection(unittest.TestCase): def _getTargetClass(self): - from google.cloud.datastore.connection import Connection + from google.cloud.datastore._http import Connection return Connection @@ -370,7 +370,7 @@ def _make_query_pb(self, kind): def _makeOne(self, credentials=None, http=None, use_grpc=False): from google.cloud._testing import _Monkey - from google.cloud.datastore import connection as MUT + from google.cloud.datastore import _http as MUT with _Monkey(MUT, _USE_GRPC=use_grpc): return self._getTargetClass()(credentials=credentials, http=http) @@ -408,7 +408,7 @@ def test_ctor_defaults(self): def test_ctor_without_grpc(self): from google.cloud._testing import _Monkey - from google.cloud.datastore import connection as MUT + from google.cloud.datastore import _http as MUT connections = [] return_val = object() @@ -426,7 +426,7 @@ def mock_api(connection): def test_ctor_with_grpc(self): from google.cloud._testing import _Monkey - from google.cloud.datastore import connection as MUT + from google.cloud.datastore import _http as MUT api_args = [] return_val = object() @@ -921,7 +921,7 @@ def test_begin_transaction(self): def test_commit_wo_transaction(self): from google.cloud._testing import _Monkey from google.cloud.datastore._generated import datastore_pb2 - from google.cloud.datastore import connection as MUT + from google.cloud.datastore import _http as MUT from google.cloud.datastore.helpers import _new_value_pb PROJECT = 'PROJECT' @@ -967,7 +967,7 @@ def mock_parse(response): def test_commit_w_transaction(self): from google.cloud._testing import _Monkey from google.cloud.datastore._generated import datastore_pb2 - from google.cloud.datastore import connection as MUT + from google.cloud.datastore import _http as MUT from google.cloud.datastore.helpers import _new_value_pb PROJECT = 'PROJECT' @@ -1091,7 +1091,7 @@ def test_allocate_ids_non_empty(self): class Test__parse_commit_response(unittest.TestCase): def _callFUT(self, commit_response_pb): - from google.cloud.datastore.connection import _parse_commit_response + from google.cloud.datastore._http import _parse_commit_response return _parse_commit_response(commit_response_pb) def test_it(self): From 9d800179faf295af94e62f84a4788059bd6ed236 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Nov 2016 14:30:24 -0700 Subject: [PATCH 027/611] Removing docs references to connection module. --- .../google-cloud-datastore/google/cloud/datastore/__init__.py | 4 ---- .../google-cloud-datastore/google/cloud/datastore/_http.py | 4 ++-- .../google-cloud-datastore/google/cloud/datastore/batch.py | 2 +- .../google-cloud-datastore/google/cloud/datastore/client.py | 2 +- 4 files changed, 4 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py index a178555786ba..7ca8e3f116f0 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py @@ -27,10 +27,6 @@ The main concepts with this API are: -- :class:`~google.cloud.datastore.connection.Connection` - which represents a connection between your machine and the Cloud Datastore - API. - - :class:`~google.cloud.datastore.client.Client` which represents a project (string) and namespace (string) bundled with a connection and has convenience methods for constructing objects with that diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 67a542d85405..4636b2651b7a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -71,7 +71,7 @@ class _DatastoreAPIOverHttp(object): Methods make bare API requests without any helpers for constructing the requests or parsing the responses. - :type connection: :class:`google.cloud.datastore.connection.Connection` + :type connection: :class:`Connection` :param connection: A connection object that contains helpful information for making requests. """ @@ -268,7 +268,7 @@ class _DatastoreAPIOverGRPC(object): Methods make bare API requests without any helpers for constructing the requests or parsing the responses. - :type connection: :class:`google.cloud.datastore.connection.Connection` + :type connection: :class:`Connection` :param connection: A connection object that contains helpful information for making requests. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/batch.py b/packages/google-cloud-datastore/google/cloud/datastore/batch.py index f27d67255525..e944b56b7a6e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/batch.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/batch.py @@ -107,7 +107,7 @@ def namespace(self): def connection(self): """Getter for connection over which the batch will run. - :rtype: :class:`google.cloud.datastore.connection.Connection` + :rtype: :class:`google.cloud.datastore._http.Connection` :returns: The connection over which the batch will run. """ return self._client.connection diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 807a25774a4c..4b4a53f70d83 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -72,7 +72,7 @@ def _extended_lookup(connection, project, key_pbs, Helper function for :meth:`Client.get_multi`. - :type connection: :class:`google.cloud.datastore.connection.Connection` + :type connection: :class:`google.cloud.datastore._http.Connection` :param connection: The connection used to connect to datastore. :type project: str From 62867e88b54e3ad10c257c65114191a15504e76c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 21:26:07 -0800 Subject: [PATCH 028/611] Avoiding using filesystem deps in package tox.ini configs. --- packages/google-cloud-datastore/tox.ini | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/tox.ini b/packages/google-cloud-datastore/tox.ini index 64f0d45463dd..e40bcca54c94 100644 --- a/packages/google-cloud-datastore/tox.ini +++ b/packages/google-cloud-datastore/tox.ini @@ -3,8 +3,9 @@ envlist = py27,py34,py35,cover [testing] +localdeps = + pip install --upgrade {toxinidir}/../core deps = - {toxinidir}/../core pytest covercmd = py.test --quiet \ @@ -15,6 +16,7 @@ covercmd = [testenv] commands = + {[testing]localdeps} py.test --quiet {posargs} unit_tests deps = {[testing]deps} @@ -23,6 +25,7 @@ deps = basepython = python2.7 commands = + {[testing]localdeps} {[testing]covercmd} deps = {[testenv]deps} From 30d75b9bd46d3b748eb2eb23859aef6c29ca7e83 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 21:49:28 -0800 Subject: [PATCH 029/611] Factoring protobuf construction out of datastore iterator. --- .../google/cloud/datastore/query.py | 24 ++++++++++++++----- 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 8a7807496b18..75be752dee3a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -403,14 +403,14 @@ def __init__(self, query, client, limit=None, offset=None, self._page = self._more_results = None self._skipped_results = None - def next_page(self): - """Fetch a single "page" of query results. + def _build_protobuf(self): + """Build a query protobuf. - Low-level API for fine control: the more convenient API is - to iterate on the current Iterator. + Relies on the current state of the iterator. - :rtype: tuple, (entities, more_results, cursor) - :returns: The next page of results. + :rtype: `google.cloud.datastore._generated.query_pb2.Query` + :returns: The query protobuf object for the current + state of the iterator. """ pb = _pb_from_query(self._query) @@ -428,6 +428,18 @@ def next_page(self): if self._offset is not None: pb.offset = self._offset + return pb + + def next_page(self): + """Fetch a single "page" of query results. + + Low-level API for fine control: the more convenient API is + to iterate on the current Iterator. + + :rtype: tuple, (entities, more_results, cursor) + :returns: The next page of results. + """ + pb = self._build_protobuf() transaction = self._client.current_transaction query_results = self._client.connection.run_query( From 5fb442cdb2a52165c8735a9be8f4d5b5611fca15 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 22:07:24 -0800 Subject: [PATCH 030/611] Factoring query processing out of datastore iterator. --- .../google/cloud/datastore/query.py | 57 +++++++++++++------ 1 file changed, 40 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 75be752dee3a..14e0d8803236 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -430,26 +430,28 @@ def _build_protobuf(self): return pb - def next_page(self): - """Fetch a single "page" of query results. + def _process_query_results(self, entity_pbs, cursor_as_bytes, + more_results_enum, skipped_results): + """Process the response from a datastore query. - Low-level API for fine control: the more convenient API is - to iterate on the current Iterator. + :type entity_pbs: iterable + :param entity_pbs: The entities returned in the current page. - :rtype: tuple, (entities, more_results, cursor) + :type cursor_as_bytes: bytes + :param cursor_as_bytes: The end cursor of the query. + + :type more_results_enum: + :class:`._generated.query_pb2.QueryResultBatch.MoreResultsType` + :param more_results_enum: Enum indicating if there are more results. + + :type skipped_results: int + :param skipped_results: The number of skipped results. + + :rtype: list :returns: The next page of results. + :raises ValueError: If ``more_results`` is an unexpected value. """ - pb = self._build_protobuf() - transaction = self._client.current_transaction - - query_results = self._client.connection.run_query( - query_pb=pb, - project=self._query.project, - namespace=self._query.namespace, - transaction_id=transaction and transaction.id, - ) - (entity_pbs, cursor_as_bytes, - more_results_enum, self._skipped_results) = query_results + self._skipped_results = skipped_results if cursor_as_bytes == b'': self._start_cursor = None @@ -464,9 +466,30 @@ def next_page(self): else: raise ValueError('Unexpected value returned for `more_results`.') - self._page = [ + page = [ helpers.entity_from_protobuf(entity) for entity in entity_pbs] + return page + + def next_page(self): + """Fetch a single "page" of query results. + + Low-level API for fine control: the more convenient API is + to iterate on the current Iterator. + + :rtype: tuple, (entities, more_results, cursor) + :returns: The next page of results. + """ + pb = self._build_protobuf() + transaction = self._client.current_transaction + + query_results = self._client.connection.run_query( + query_pb=pb, + project=self._query.project, + namespace=self._query.namespace, + transaction_id=transaction and transaction.id, + ) + self._page = self._process_query_results(*query_results) return self._page, self._more_results, self._start_cursor def __iter__(self): From 8ca36eb6a920373a32fdac9a97f2b7ada8136a5b Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 22:30:24 -0800 Subject: [PATCH 031/611] Moving datastore iterator enum helpers into module scope. --- .../google/cloud/datastore/query.py | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 14e0d8803236..2e6f5f2394a5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -22,6 +22,15 @@ from google.cloud.datastore.key import Key +_NOT_FINISHED = _query_pb2.QueryResultBatch.NOT_FINISHED + +_FINISHED = ( + _query_pb2.QueryResultBatch.NO_MORE_RESULTS, + _query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT, + _query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_CURSOR, +) + + class Query(object): """A Query against the Cloud Datastore. @@ -384,14 +393,6 @@ class Iterator(object): query results. """ - _NOT_FINISHED = _query_pb2.QueryResultBatch.NOT_FINISHED - - _FINISHED = ( - _query_pb2.QueryResultBatch.NO_MORE_RESULTS, - _query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT, - _query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_CURSOR, - ) - def __init__(self, query, client, limit=None, offset=None, start_cursor=None, end_cursor=None): self._query = query @@ -459,9 +460,9 @@ def _process_query_results(self, entity_pbs, cursor_as_bytes, self._start_cursor = base64.urlsafe_b64encode(cursor_as_bytes) self._end_cursor = None - if more_results_enum == self._NOT_FINISHED: + if more_results_enum == _NOT_FINISHED: self._more_results = True - elif more_results_enum in self._FINISHED: + elif more_results_enum in _FINISHED: self._more_results = False else: raise ValueError('Unexpected value returned for `more_results`.') From c09f1acc7a043d6f911913abeead112a9af1922d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 22:39:05 -0800 Subject: [PATCH 032/611] Adding alternate datastore iterator class that inherits from base Iterator. --- .../google/cloud/datastore/query.py | 40 +++++++++++++++ .../unit_tests/test_query.py | 50 +++++++++++++++++++ 2 files changed, 90 insertions(+) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 2e6f5f2394a5..3edc06a18f5e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -17,6 +17,8 @@ import base64 from google.cloud._helpers import _ensure_tuple_or_list +from google.cloud.iterator import Iterator as BaseIterator + from google.cloud.datastore._generated import query_pb2 as _query_pb2 from google.cloud.datastore import helpers from google.cloud.datastore.key import Key @@ -367,6 +369,44 @@ def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, self, client, limit, offset, start_cursor, end_cursor) +class AltIterator(BaseIterator): + """Represent the state of a given execution of a Query. + + :type query: :class:`~google.cloud.datastore.query.Query` + :param query: Query object holding permanent configuration (i.e. + things that don't change on with each page in + a results set). + + :type client: :class:`~google.cloud.datastore.client.Client` + :param client: The client used to make a request. + + :type limit: int + :param limit: (Optional) Limit the number of results returned. + + :type offset: int + :param offset: (Optional) Offset used to begin a query. + + :type start_cursor: bytes + :param start_cursor: (Optional) Cursor to begin paging through + query results. + + :type end_cursor: bytes + :param end_cursor: (Optional) Cursor to end paging through + query results. + """ + + def __init__(self, query, client, limit=None, offset=None, + start_cursor=None, end_cursor=None): + super(AltIterator, self).__init__( + client=client, item_to_value=None, + page_token=start_cursor, max_results=limit) + self._query = query + self._offset = offset + self._end_cursor = end_cursor + # The attributes below will change over the life of the iterator. + self._more_results = True + + class Iterator(object): """Represent the state of a given execution of a Query. diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index 0fa333a83e7c..468a392e4d86 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -611,6 +611,56 @@ def test___iter___w_limit(self): self.assertEqual(connection._called_with[2], EXPECTED3) +class TestAltIterator(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.datastore.query import AltIterator + return AltIterator + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_constructor_defaults(self): + query = object() + client = object() + iterator = self._makeOne(query, client) + + self.assertFalse(iterator._started) + self.assertIs(iterator.client, client) + self.assertIsNone(iterator._item_to_value) + self.assertIsNone(iterator.max_results) + self.assertEqual(iterator.page_number, 0) + self.assertIsNone(iterator.next_page_token,) + self.assertEqual(iterator.num_results, 0) + self.assertIs(iterator._query, query) + self.assertIsNone(iterator._offset) + self.assertIsNone(iterator._end_cursor) + self.assertTrue(iterator._more_results) + + def test_constructor_explicit(self): + query = object() + client = object() + limit = 43 + offset = 9 + start_cursor = b'8290\xff' + end_cursor = b'so20rc\ta' + iterator = self._makeOne( + query, client, limit=limit, offset=offset, + start_cursor=start_cursor, end_cursor=end_cursor) + + self.assertFalse(iterator._started) + self.assertIs(iterator.client, client) + self.assertIsNone(iterator._item_to_value) + self.assertEqual(iterator.max_results, limit) + self.assertEqual(iterator.page_number, 0) + self.assertEqual(iterator.next_page_token, start_cursor) + self.assertEqual(iterator.num_results, 0) + self.assertIs(iterator._query, query) + self.assertEqual(iterator._offset, offset) + self.assertEqual(iterator._end_cursor, end_cursor) + self.assertTrue(iterator._more_results) + + class Test__pb_from_query(unittest.TestCase): def _callFUT(self, query): From 59681fb3e462e8b090793fdeed379f59ce87e3ba Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 22:51:17 -0800 Subject: [PATCH 033/611] Adding helper for datastore iterator to convert entities. --- .../google/cloud/datastore/query.py | 20 ++++++++++++- .../unit_tests/test_query.py | 28 +++++++++++++++++-- 2 files changed, 45 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 3edc06a18f5e..d62933e7928e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -398,7 +398,7 @@ class AltIterator(BaseIterator): def __init__(self, query, client, limit=None, offset=None, start_cursor=None, end_cursor=None): super(AltIterator, self).__init__( - client=client, item_to_value=None, + client=client, item_to_value=_item_to_entity, page_token=start_cursor, max_results=limit) self._query = query self._offset = offset @@ -616,3 +616,21 @@ def _pb_from_query(query): pb.distinct_on.add().name = distinct_on_name return pb + + +# pylint: disable=unused-argument +def _item_to_entity(iterator, entity_pb): + """Convert a raw protobuf entity to the native object. + + :type iterator: :class:`~google.cloud.iterator.Iterator` + :param iterator: The iterator that is currently in use. + + :type entity_pb: + :class:`google.cloud.datastore._generated.entity_pb2.Entity` + :param entity_pb: An entity protobuf to convert to a native entity. + + :rtype: :class:`~google.cloud.datastore.entity.Entity` + :returns: The next entity in the page. + """ + return helpers.entity_from_protobuf(entity_pb) +# pylint: enable=unused-argument diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index 468a392e4d86..ce340a69227c 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -627,7 +627,7 @@ def test_constructor_defaults(self): self.assertFalse(iterator._started) self.assertIs(iterator.client, client) - self.assertIsNone(iterator._item_to_value) + self.assertIsNotNone(iterator._item_to_value) self.assertIsNone(iterator.max_results) self.assertEqual(iterator.page_number, 0) self.assertIsNone(iterator.next_page_token,) @@ -650,7 +650,7 @@ def test_constructor_explicit(self): self.assertFalse(iterator._started) self.assertIs(iterator.client, client) - self.assertIsNone(iterator._item_to_value) + self.assertIsNotNone(iterator._item_to_value) self.assertEqual(iterator.max_results, limit) self.assertEqual(iterator.page_number, 0) self.assertEqual(iterator.next_page_token, start_cursor) @@ -661,6 +661,30 @@ def test_constructor_explicit(self): self.assertTrue(iterator._more_results) +class Test__item_to_entity(unittest.TestCase): + + def _callFUT(self, iterator, entity_pb): + from google.cloud.datastore.query import _item_to_entity + return _item_to_entity(iterator, entity_pb) + + def test_it(self): + from google.cloud._testing import _Monkey + from google.cloud.datastore import helpers + + result = object() + entities = [] + + def mocked(entity_pb): + entities.append(entity_pb) + return result + + entity_pb = object() + with _Monkey(helpers, entity_from_protobuf=mocked): + self.assertIs(result, self._callFUT(None, entity_pb)) + + self.assertEqual(entities, [entity_pb]) + + class Test__pb_from_query(unittest.TestCase): def _callFUT(self, query): From 80ca2e0e148d68bed98ab379126265ee6cf49725 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 23:07:21 -0800 Subject: [PATCH 034/611] Adding _build_protobuf() helper to datastore alt. iterator. --- .../google/cloud/datastore/query.py | 31 ++++++++++++++ .../unit_tests/test_query.py | 40 +++++++++++++++++++ 2 files changed, 71 insertions(+) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index d62933e7928e..1f5a7d3608c5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -405,6 +405,37 @@ def __init__(self, query, client, limit=None, offset=None, self._end_cursor = end_cursor # The attributes below will change over the life of the iterator. self._more_results = True + self._skipped_results = 0 + + def _build_protobuf(self): + """Build a query protobuf. + + Relies on the current state of the iterator. + + :rtype: + :class:`google.cloud.datastore._generated.query_pb2.Query` + :returns: The query protobuf object for the current + state of the iterator. + """ + pb = _pb_from_query(self._query) + + start_cursor = self.next_page_token + if start_cursor is not None: + pb.start_cursor = base64.urlsafe_b64decode(start_cursor) + + end_cursor = self._end_cursor + if end_cursor is not None: + pb.end_cursor = base64.urlsafe_b64decode(end_cursor) + + if self.max_results is not None: + pb.limit.value = self.max_results - self.num_results + + if self._offset is not None: + # NOTE: The offset goes down relative to the location + # because we are updating the cursor each time. + pb.offset = self._offset - self._skipped_results + + return pb class Iterator(object): diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index ce340a69227c..ac84cae16978 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -660,6 +660,46 @@ def test_constructor_explicit(self): self.assertEqual(iterator._end_cursor, end_cursor) self.assertTrue(iterator._more_results) + def test__build_protobuf_empty(self): + from google.cloud.datastore._generated import query_pb2 + from google.cloud.datastore.query import Query + + client = _Client(None, None) + query = Query(client) + iterator = self._makeOne(query, client) + + pb = iterator._build_protobuf() + expected_pb = query_pb2.Query() + self.assertEqual(pb, expected_pb) + + def test__build_protobuf_all_values(self): + from google.cloud.datastore._generated import query_pb2 + from google.cloud.datastore.query import Query + + client = _Client(None, None) + query = Query(client) + limit = 15 + offset = 9 + start_bytes = b'i\xb7\x1d' + start_cursor = 'abcd' + end_bytes = b'\xc3\x1c\xb3' + end_cursor = 'wxyz' + iterator = self._makeOne( + query, client, limit=limit, offset=offset, + start_cursor=start_cursor, end_cursor=end_cursor) + self.assertEqual(iterator.max_results, limit) + iterator.num_results = 4 + iterator._skipped_results = 1 + + pb = iterator._build_protobuf() + expected_pb = query_pb2.Query( + start_cursor=start_bytes, + end_cursor=end_bytes, + offset=offset - iterator._skipped_results, + ) + expected_pb.limit.value = limit - iterator.num_results + self.assertEqual(pb, expected_pb) + class Test__item_to_entity(unittest.TestCase): From 9eb4f3ec488541545f5f338a297f60ea2877ca49 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 23:20:18 -0800 Subject: [PATCH 035/611] Adding _process_query_results helper to alt. datastore iterator. --- .../google/cloud/datastore/query.py | 38 ++++++++++++++ .../unit_tests/test_query.py | 50 +++++++++++++++++++ 2 files changed, 88 insertions(+) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 1f5a7d3608c5..5abcc10d7b4b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -437,6 +437,44 @@ def _build_protobuf(self): return pb + def _process_query_results(self, entity_pbs, cursor_as_bytes, + more_results_enum, skipped_results): + """Process the response from a datastore query. + + :type entity_pbs: iterable + :param entity_pbs: The entities returned in the current page. + + :type cursor_as_bytes: bytes + :param cursor_as_bytes: The end cursor of the query. + + :type more_results_enum: + :class:`._generated.query_pb2.QueryResultBatch.MoreResultsType` + :param more_results_enum: Enum indicating if there are more results. + + :type skipped_results: int + :param skipped_results: The number of skipped results. + + :rtype: iterable + :returns: The next page of entity results. + :raises ValueError: If ``more_results`` is an unexpected value. + """ + self._skipped_results = skipped_results + + if cursor_as_bytes == b'': # Empty-value for bytes. + self.next_page_token = None + else: + self.next_page_token = base64.urlsafe_b64encode(cursor_as_bytes) + self._end_cursor = None + + if more_results_enum == _NOT_FINISHED: + self._more_results = True + elif more_results_enum in _FINISHED: + self._more_results = False + else: + raise ValueError('Unexpected value returned for `more_results`.') + + return entity_pbs + class Iterator(object): """Represent the state of a given execution of a Query. diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index ac84cae16978..b49823add13e 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -700,6 +700,56 @@ def test__build_protobuf_all_values(self): expected_pb.limit.value = limit - iterator.num_results self.assertEqual(pb, expected_pb) + def test__process_query_results(self): + from google.cloud.datastore._generated import query_pb2 + + iterator = self._makeOne(None, None, + end_cursor='abcd') + self.assertIsNotNone(iterator._end_cursor) + + entity_pbs = object() + cursor_as_bytes = b'\x9ai\xe7' + cursor = 'mmnn' + skipped_results = 4 + more_results_enum = query_pb2.QueryResultBatch.NOT_FINISHED + result = iterator._process_query_results( + entity_pbs, cursor_as_bytes, + more_results_enum, skipped_results) + self.assertIs(result, entity_pbs) + + self.assertEqual(iterator._skipped_results, skipped_results) + self.assertEqual(iterator.next_page_token, cursor) + self.assertTrue(iterator._more_results) + + def test__process_query_results_done(self): + from google.cloud.datastore._generated import query_pb2 + + iterator = self._makeOne(None, None, + end_cursor='abcd') + self.assertIsNotNone(iterator._end_cursor) + + entity_pbs = object() + cursor_as_bytes = b'' + skipped_results = 44 + more_results_enum = query_pb2.QueryResultBatch.NO_MORE_RESULTS + result = iterator._process_query_results( + entity_pbs, cursor_as_bytes, + more_results_enum, skipped_results) + self.assertIs(result, entity_pbs) + + self.assertEqual(iterator._skipped_results, skipped_results) + self.assertIsNone(iterator.next_page_token) + self.assertFalse(iterator._more_results) + + def test__process_query_results_bad_enum(self): + from google.cloud.datastore._generated import query_pb2 + + iterator = self._makeOne(None, None) + more_results_enum = 999 + with self.assertRaises(ValueError): + iterator._process_query_results( + None, b'', more_results_enum, None) + class Test__item_to_entity(unittest.TestCase): From 2910421b4e05e2454c6509785497eacb36924744 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 23:35:26 -0800 Subject: [PATCH 036/611] Implement _next_page() on alternate datastore iterator. --- .../google/cloud/datastore/query.py | 23 +++++++++++ .../unit_tests/test_query.py | 40 ++++++++++++++++++- 2 files changed, 62 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 5abcc10d7b4b..b4832f75c35a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -18,6 +18,7 @@ from google.cloud._helpers import _ensure_tuple_or_list from google.cloud.iterator import Iterator as BaseIterator +from google.cloud.iterator import Page from google.cloud.datastore._generated import query_pb2 as _query_pb2 from google.cloud.datastore import helpers @@ -475,6 +476,28 @@ def _process_query_results(self, entity_pbs, cursor_as_bytes, return entity_pbs + def _next_page(self): + """Get the next page in the iterator. + + :rtype: :class:`~google.cloud.iterator.Page` + :returns: The next page in the iterator (or :data:`None` if + there are no pages left). + """ + if not self._more_results: + return None + + pb = self._build_protobuf() + transaction = self.client.current_transaction + + query_results = self.client.connection.run_query( + query_pb=pb, + project=self._query.project, + namespace=self._query.namespace, + transaction_id=transaction and transaction.id, + ) + entity_pbs = self._process_query_results(*query_results) + return Page(self, entity_pbs, self._item_to_value) + class Iterator(object): """Represent the state of a given execution of a Query. diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index b49823add13e..1bc0cb256a8f 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -709,7 +709,7 @@ def test__process_query_results(self): entity_pbs = object() cursor_as_bytes = b'\x9ai\xe7' - cursor = 'mmnn' + cursor = b'mmnn' skipped_results = 4 more_results_enum = query_pb2.QueryResultBatch.NOT_FINISHED result = iterator._process_query_results( @@ -750,6 +750,44 @@ def test__process_query_results_bad_enum(self): iterator._process_query_results( None, b'', more_results_enum, None) + def test__next_page(self): + from google.cloud.iterator import Page + from google.cloud.datastore._generated import query_pb2 + from google.cloud.datastore.query import Query + + connection = _Connection() + more_enum = query_pb2.QueryResultBatch.NOT_FINISHED + result = ([], b'', more_enum, 0) + connection._results = [result] + project = 'prujekt' + client = _Client(project, connection) + query = Query(client) + iterator = self._makeOne(query, client) + + page = iterator._next_page() + self.assertIsInstance(page, Page) + self.assertIs(page._parent, iterator) + + self.assertEqual(connection._called_with, [{ + 'query_pb': query_pb2.Query(), + 'project': project, + 'namespace': None, + 'transaction_id': None, + }]) + + def test__next_page_no_more(self): + from google.cloud.datastore.query import Query + + connection = _Connection() + client = _Client(None, connection) + query = Query(client) + iterator = self._makeOne(query, client) + iterator._more_results = False + + page = iterator._next_page() + self.assertIsNone(page) + self.assertEqual(connection._called_with, []) + class Test__item_to_entity(unittest.TestCase): From f818fd5bde968c8ac3c71e5a8223230dfd93e39d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 23:41:40 -0800 Subject: [PATCH 037/611] Removing original Iterator class in datastore. --- .../google/cloud/datastore/client.py | 12 +- .../google/cloud/datastore/query.py | 153 +-------- .../unit_tests/test_query.py | 296 +----------------- 3 files changed, 18 insertions(+), 443 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 4b4a53f70d83..e629032929d5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -456,19 +456,13 @@ def query(self, **kwargs): >>> query = client.query(kind='MyKind') >>> query.add_filter('property', '=', 'val') - Using the query iterator's - :meth:`~google.cloud.datastore.query.Iterator.next_page` method: + Using the query iterator .. code-block:: python >>> query_iter = query.fetch() - >>> entities, more_results, cursor = query_iter.next_page() - >>> entities - [] - >>> more_results - - >>> cursor - + >>> for entity in query_iter: + ... do_something(entity) Under the hood this is doing: diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index b4832f75c35a..3ea8c3ce4df8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -358,7 +358,7 @@ def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, :param client: client used to connect to datastore. If not supplied, uses the query's value. - :rtype: :class:`Iterator` + :rtype: :class:`AltIterator` :returns: The iterator for the query. :raises: ValueError if ``connection`` is not passed and no implicit default has been set. @@ -366,8 +366,9 @@ def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, if client is None: client = self._client - return Iterator( - self, client, limit, offset, start_cursor, end_cursor) + return AltIterator( + self, client, limit=limit, offset=offset, + start_cursor=start_cursor, end_cursor=end_cursor) class AltIterator(BaseIterator): @@ -499,152 +500,6 @@ def _next_page(self): return Page(self, entity_pbs, self._item_to_value) -class Iterator(object): - """Represent the state of a given execution of a Query. - - :type query: :class:`google.cloud.datastore.query.Query` - :param query: Query object holding permanent configuration (i.e. - things that don't change on with each page in - a results set). - - :type client: :class:`google.cloud.datastore.client.Client` - :param client: The client used to make a request. - - :type limit: int - :param limit: (Optional) Limit the number of results returned. - - :type offset: int - :param offset: (Optional) Offset used to begin a query. - - :type start_cursor: bytes - :param start_cursor: (Optional) Cursor to begin paging through - query results. - - :type end_cursor: bytes - :param end_cursor: (Optional) Cursor to end paging through - query results. - """ - - def __init__(self, query, client, limit=None, offset=None, - start_cursor=None, end_cursor=None): - self._query = query - self._client = client - self._limit = limit - self._offset = offset - self._start_cursor = start_cursor - self._end_cursor = end_cursor - self._page = self._more_results = None - self._skipped_results = None - - def _build_protobuf(self): - """Build a query protobuf. - - Relies on the current state of the iterator. - - :rtype: `google.cloud.datastore._generated.query_pb2.Query` - :returns: The query protobuf object for the current - state of the iterator. - """ - pb = _pb_from_query(self._query) - - start_cursor = self._start_cursor - if start_cursor is not None: - pb.start_cursor = base64.urlsafe_b64decode(start_cursor) - - end_cursor = self._end_cursor - if end_cursor is not None: - pb.end_cursor = base64.urlsafe_b64decode(end_cursor) - - if self._limit is not None: - pb.limit.value = self._limit - - if self._offset is not None: - pb.offset = self._offset - - return pb - - def _process_query_results(self, entity_pbs, cursor_as_bytes, - more_results_enum, skipped_results): - """Process the response from a datastore query. - - :type entity_pbs: iterable - :param entity_pbs: The entities returned in the current page. - - :type cursor_as_bytes: bytes - :param cursor_as_bytes: The end cursor of the query. - - :type more_results_enum: - :class:`._generated.query_pb2.QueryResultBatch.MoreResultsType` - :param more_results_enum: Enum indicating if there are more results. - - :type skipped_results: int - :param skipped_results: The number of skipped results. - - :rtype: list - :returns: The next page of results. - :raises ValueError: If ``more_results`` is an unexpected value. - """ - self._skipped_results = skipped_results - - if cursor_as_bytes == b'': - self._start_cursor = None - else: - self._start_cursor = base64.urlsafe_b64encode(cursor_as_bytes) - self._end_cursor = None - - if more_results_enum == _NOT_FINISHED: - self._more_results = True - elif more_results_enum in _FINISHED: - self._more_results = False - else: - raise ValueError('Unexpected value returned for `more_results`.') - - page = [ - helpers.entity_from_protobuf(entity) - for entity in entity_pbs] - return page - - def next_page(self): - """Fetch a single "page" of query results. - - Low-level API for fine control: the more convenient API is - to iterate on the current Iterator. - - :rtype: tuple, (entities, more_results, cursor) - :returns: The next page of results. - """ - pb = self._build_protobuf() - transaction = self._client.current_transaction - - query_results = self._client.connection.run_query( - query_pb=pb, - project=self._query.project, - namespace=self._query.namespace, - transaction_id=transaction and transaction.id, - ) - self._page = self._process_query_results(*query_results) - return self._page, self._more_results, self._start_cursor - - def __iter__(self): - """Generator yielding all results matching our query. - - :rtype: sequence of :class:`google.cloud.datastore.entity.Entity` - """ - while True: - self.next_page() - for entity in self._page: - yield entity - if not self._more_results: - break - num_results = len(self._page) - if self._limit is not None: - self._limit -= num_results - if self._offset is not None and self._skipped_results is not None: - # NOTE: The offset goes down relative to the location - # because we are updating the cursor each time. - self._offset -= self._skipped_results - - def _pb_from_query(query): """Convert a Query instance to the corresponding protobuf. diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index 1bc0cb256a8f..789113fed553 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -309,308 +309,34 @@ def test_distinct_on_multiple_calls(self): self.assertEqual(query.distinct_on, _DISTINCT_ON2) def test_fetch_defaults_w_client_attr(self): + from google.cloud.datastore.query import AltIterator + connection = _Connection() client = self._makeClient(connection) query = self._makeOne(client) iterator = query.fetch() + + self.assertIsInstance(iterator, AltIterator) self.assertIs(iterator._query, query) - self.assertIs(iterator._client, client) - self.assertIsNone(iterator._limit) + self.assertIs(iterator.client, client) + self.assertIsNone(iterator.max_results) self.assertEqual(iterator._offset, 0) def test_fetch_w_explicit_client(self): + from google.cloud.datastore.query import AltIterator + connection = _Connection() client = self._makeClient(connection) other_client = self._makeClient(connection) query = self._makeOne(client) iterator = query.fetch(limit=7, offset=8, client=other_client) + self.assertIsInstance(iterator, AltIterator) self.assertIs(iterator._query, query) - self.assertIs(iterator._client, other_client) - self.assertEqual(iterator._limit, 7) + self.assertIs(iterator.client, other_client) + self.assertEqual(iterator.max_results, 7) self.assertEqual(iterator._offset, 8) -class TestIterator(unittest.TestCase): - _PROJECT = 'PROJECT' - _NAMESPACE = 'NAMESPACE' - _KIND = 'KIND' - _ID = 123 - _START = b'\x00' - _END = b'\xFF' - - def _getTargetClass(self): - from google.cloud.datastore.query import Iterator - return Iterator - - def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) - - def _addQueryResults(self, connection, cursor=_END, more=False, - skipped_results=None, no_entity=False): - from google.cloud.datastore._generated import entity_pb2 - from google.cloud.datastore._generated import query_pb2 - from google.cloud.datastore.helpers import _new_value_pb - - if more: - more_enum = query_pb2.QueryResultBatch.NOT_FINISHED - else: - more_enum = query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT - _ID = 123 - if no_entity: - entities = [] - else: - entity_pb = entity_pb2.Entity() - entity_pb.key.partition_id.project_id = self._PROJECT - path_element = entity_pb.key.path.add() - path_element.kind = self._KIND - path_element.id = _ID - value_pb = _new_value_pb(entity_pb, 'foo') - value_pb.string_value = u'Foo' - entities = [entity_pb] - - connection._results.append( - (entities, cursor, more_enum, skipped_results)) - - def _makeClient(self, connection=None): - if connection is None: - connection = _Connection() - return _Client(self._PROJECT, connection) - - def test_ctor_defaults(self): - connection = _Connection() - query = object() - iterator = self._makeOne(query, connection) - self.assertIs(iterator._query, query) - self.assertIsNone(iterator._limit) - self.assertIsNone(iterator._offset) - self.assertIsNone(iterator._skipped_results) - - def test_ctor_explicit(self): - client = self._makeClient() - query = _Query(client) - iterator = self._makeOne(query, client, 13, 29) - self.assertIs(iterator._query, query) - self.assertEqual(iterator._limit, 13) - self.assertEqual(iterator._offset, 29) - - def test_next_page_no_cursors_no_more(self): - from google.cloud.datastore.query import _pb_from_query - connection = _Connection() - client = self._makeClient(connection) - query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) - self._addQueryResults(connection, cursor=b'') - iterator = self._makeOne(query, client) - entities, more_results, cursor = iterator.next_page() - self.assertIsNone(iterator._skipped_results) - - self.assertIsNone(cursor) - self.assertFalse(more_results) - self.assertFalse(iterator._more_results) - self.assertEqual(len(entities), 1) - self.assertEqual(entities[0].key.path, - [{'kind': self._KIND, 'id': self._ID}]) - self.assertEqual(entities[0]['foo'], u'Foo') - qpb = _pb_from_query(query) - qpb.offset = 0 - EXPECTED = { - 'project': self._PROJECT, - 'query_pb': qpb, - 'namespace': self._NAMESPACE, - 'transaction_id': None, - } - self.assertEqual(connection._called_with, [EXPECTED]) - - def test_next_page_no_cursors_no_more_w_offset_and_limit(self): - from google.cloud.datastore.query import _pb_from_query - connection = _Connection() - client = self._makeClient(connection) - query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) - skipped_results = object() - self._addQueryResults(connection, cursor=b'', - skipped_results=skipped_results) - iterator = self._makeOne(query, client, 13, 29) - entities, more_results, cursor = iterator.next_page() - - self.assertIsNone(cursor) - self.assertFalse(more_results) - self.assertFalse(iterator._more_results) - self.assertEqual(iterator._skipped_results, skipped_results) - self.assertEqual(len(entities), 1) - self.assertEqual(entities[0].key.path, - [{'kind': self._KIND, 'id': self._ID}]) - self.assertEqual(entities[0]['foo'], u'Foo') - qpb = _pb_from_query(query) - qpb.limit.value = 13 - qpb.offset = 29 - EXPECTED = { - 'project': self._PROJECT, - 'query_pb': qpb, - 'namespace': self._NAMESPACE, - 'transaction_id': None, - } - self.assertEqual(connection._called_with, [EXPECTED]) - - def test_next_page_w_cursors_w_more(self): - from base64 import urlsafe_b64decode - from base64 import urlsafe_b64encode - from google.cloud.datastore.query import _pb_from_query - connection = _Connection() - client = self._makeClient(connection) - query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) - self._addQueryResults(connection, cursor=self._END, more=True) - iterator = self._makeOne(query, client) - iterator._start_cursor = self._START - iterator._end_cursor = self._END - entities, more_results, cursor = iterator.next_page() - - self.assertEqual(cursor, urlsafe_b64encode(self._END)) - self.assertTrue(more_results) - self.assertTrue(iterator._more_results) - self.assertIsNone(iterator._skipped_results) - self.assertIsNone(iterator._end_cursor) - self.assertEqual(urlsafe_b64decode(iterator._start_cursor), self._END) - self.assertEqual(len(entities), 1) - self.assertEqual(entities[0].key.path, - [{'kind': self._KIND, 'id': self._ID}]) - self.assertEqual(entities[0]['foo'], u'Foo') - qpb = _pb_from_query(query) - qpb.offset = 0 - qpb.start_cursor = urlsafe_b64decode(self._START) - qpb.end_cursor = urlsafe_b64decode(self._END) - EXPECTED = { - 'project': self._PROJECT, - 'query_pb': qpb, - 'namespace': self._NAMESPACE, - 'transaction_id': None, - } - self.assertEqual(connection._called_with, [EXPECTED]) - - def test_next_page_w_cursors_w_bogus_more(self): - connection = _Connection() - client = self._makeClient(connection) - query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) - self._addQueryResults(connection, cursor=self._END, more=True) - epb, cursor, _, _ = connection._results.pop() - connection._results.append((epb, cursor, 5, None)) # invalid enum - iterator = self._makeOne(query, client) - self.assertRaises(ValueError, iterator.next_page) - - def test___iter___no_more(self): - from google.cloud.datastore.query import _pb_from_query - connection = _Connection() - client = self._makeClient(connection) - query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) - self._addQueryResults(connection) - iterator = self._makeOne(query, client) - entities = list(iterator) - - self.assertFalse(iterator._more_results) - self.assertEqual(len(entities), 1) - self.assertEqual(entities[0].key.path, - [{'kind': self._KIND, 'id': self._ID}]) - self.assertEqual(entities[0]['foo'], u'Foo') - qpb = _pb_from_query(query) - qpb.offset = 0 - EXPECTED = { - 'project': self._PROJECT, - 'query_pb': qpb, - 'namespace': self._NAMESPACE, - 'transaction_id': None, - } - self.assertEqual(connection._called_with, [EXPECTED]) - - def test___iter___w_more(self): - from google.cloud.datastore.query import _pb_from_query - connection = _Connection() - client = self._makeClient(connection) - query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) - self._addQueryResults(connection, cursor=self._END, more=True) - self._addQueryResults(connection) - iterator = self._makeOne(query, client) - entities = list(iterator) - - self.assertFalse(iterator._more_results) - self.assertEqual(len(entities), 2) - for entity in entities: - self.assertEqual( - entity.key.path, - [{'kind': self._KIND, 'id': self._ID}]) - self.assertEqual(entities[1]['foo'], u'Foo') - qpb1 = _pb_from_query(query) - qpb2 = _pb_from_query(query) - qpb2.start_cursor = self._END - EXPECTED1 = { - 'project': self._PROJECT, - 'query_pb': qpb1, - 'namespace': self._NAMESPACE, - 'transaction_id': None, - } - EXPECTED2 = { - 'project': self._PROJECT, - 'query_pb': qpb2, - 'namespace': self._NAMESPACE, - 'transaction_id': None, - } - self.assertEqual(len(connection._called_with), 2) - self.assertEqual(connection._called_with[0], EXPECTED1) - self.assertEqual(connection._called_with[1], EXPECTED2) - - def test___iter___w_limit(self): - from google.cloud.datastore.query import _pb_from_query - - connection = _Connection() - client = self._makeClient(connection) - query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) - skip1 = 4 - skip2 = 9 - self._addQueryResults(connection, more=True, skipped_results=skip1, - no_entity=True) - self._addQueryResults(connection, more=True, skipped_results=skip2) - self._addQueryResults(connection) - offset = skip1 + skip2 - iterator = self._makeOne(query, client, limit=2, offset=offset) - entities = list(iterator) - - self.assertFalse(iterator._more_results) - self.assertEqual(len(entities), 2) - for entity in entities: - self.assertEqual( - entity.key.path, - [{'kind': self._KIND, 'id': self._ID}]) - qpb1 = _pb_from_query(query) - qpb1.limit.value = 2 - qpb1.offset = offset - qpb2 = _pb_from_query(query) - qpb2.start_cursor = self._END - qpb2.limit.value = 2 - qpb2.offset = offset - skip1 - qpb3 = _pb_from_query(query) - qpb3.start_cursor = self._END - qpb3.limit.value = 1 - EXPECTED1 = { - 'project': self._PROJECT, - 'query_pb': qpb1, - 'namespace': self._NAMESPACE, - 'transaction_id': None, - } - EXPECTED2 = { - 'project': self._PROJECT, - 'query_pb': qpb2, - 'namespace': self._NAMESPACE, - 'transaction_id': None, - } - EXPECTED3 = { - 'project': self._PROJECT, - 'query_pb': qpb3, - 'namespace': self._NAMESPACE, - 'transaction_id': None, - } - self.assertEqual(len(connection._called_with), 3) - self.assertEqual(connection._called_with[0], EXPECTED1) - self.assertEqual(connection._called_with[1], EXPECTED2) - self.assertEqual(connection._called_with[2], EXPECTED3) - - class TestAltIterator(unittest.TestCase): def _getTargetClass(self): From 39c8b4074b42b7ee4844bc70bcf8b6cea3f765b4 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 23:42:35 -0800 Subject: [PATCH 038/611] Renaming datastore AltIterator-->Iterator. --- .../google/cloud/datastore/query.py | 8 ++++---- .../unit_tests/test_query.py | 14 +++++++------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 3ea8c3ce4df8..1781a06bcfd3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -358,7 +358,7 @@ def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, :param client: client used to connect to datastore. If not supplied, uses the query's value. - :rtype: :class:`AltIterator` + :rtype: :class:`Iterator` :returns: The iterator for the query. :raises: ValueError if ``connection`` is not passed and no implicit default has been set. @@ -366,12 +366,12 @@ def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, if client is None: client = self._client - return AltIterator( + return Iterator( self, client, limit=limit, offset=offset, start_cursor=start_cursor, end_cursor=end_cursor) -class AltIterator(BaseIterator): +class Iterator(BaseIterator): """Represent the state of a given execution of a Query. :type query: :class:`~google.cloud.datastore.query.Query` @@ -399,7 +399,7 @@ class AltIterator(BaseIterator): def __init__(self, query, client, limit=None, offset=None, start_cursor=None, end_cursor=None): - super(AltIterator, self).__init__( + super(Iterator, self).__init__( client=client, item_to_value=_item_to_entity, page_token=start_cursor, max_results=limit) self._query = query diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index 789113fed553..c21d0bc7e580 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -309,39 +309,39 @@ def test_distinct_on_multiple_calls(self): self.assertEqual(query.distinct_on, _DISTINCT_ON2) def test_fetch_defaults_w_client_attr(self): - from google.cloud.datastore.query import AltIterator + from google.cloud.datastore.query import Iterator connection = _Connection() client = self._makeClient(connection) query = self._makeOne(client) iterator = query.fetch() - self.assertIsInstance(iterator, AltIterator) + self.assertIsInstance(iterator, Iterator) self.assertIs(iterator._query, query) self.assertIs(iterator.client, client) self.assertIsNone(iterator.max_results) self.assertEqual(iterator._offset, 0) def test_fetch_w_explicit_client(self): - from google.cloud.datastore.query import AltIterator + from google.cloud.datastore.query import Iterator connection = _Connection() client = self._makeClient(connection) other_client = self._makeClient(connection) query = self._makeOne(client) iterator = query.fetch(limit=7, offset=8, client=other_client) - self.assertIsInstance(iterator, AltIterator) + self.assertIsInstance(iterator, Iterator) self.assertIs(iterator._query, query) self.assertIs(iterator.client, other_client) self.assertEqual(iterator.max_results, 7) self.assertEqual(iterator._offset, 8) -class TestAltIterator(unittest.TestCase): +class TestIterator(unittest.TestCase): def _getTargetClass(self): - from google.cloud.datastore.query import AltIterator - return AltIterator + from google.cloud.datastore.query import Iterator + return Iterator def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) From cdb0b1ccf46457c8cbb1f35b844b825fedeae677 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 8 Nov 2016 09:40:39 -0800 Subject: [PATCH 039/611] Pylint fixes for datastore. Removing an unused import and also adding a class-level attribute for ``next_page_token`` which gets set outside of the constructor (Pylint seems to miss the fact that it gets set in the constructor of the base class). --- packages/google-cloud-datastore/google/cloud/datastore/query.py | 2 ++ packages/google-cloud-datastore/unit_tests/test_query.py | 2 -- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 1781a06bcfd3..a9488db725af 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -397,6 +397,8 @@ class Iterator(BaseIterator): query results. """ + next_page_token = None + def __init__(self, query, client, limit=None, offset=None, start_cursor=None, end_cursor=None): super(Iterator, self).__init__( diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index c21d0bc7e580..7c2d2238410d 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -468,8 +468,6 @@ def test__process_query_results_done(self): self.assertFalse(iterator._more_results) def test__process_query_results_bad_enum(self): - from google.cloud.datastore._generated import query_pb2 - iterator = self._makeOne(None, None) more_results_enum = 999 with self.assertRaises(ValueError): From 873b889578fab4d00117c4b4ce4fb4d526a6db29 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 8 Nov 2016 10:02:22 -0800 Subject: [PATCH 040/611] Adding doc snippet for paging with cursors. --- .../google/cloud/datastore/client.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index e629032929d5..fc9cbf2ea321 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -464,6 +464,23 @@ def query(self, **kwargs): >>> for entity in query_iter: ... do_something(entity) + or manually page through results + + .. code-block:: python + + >>> query_iter = query.fetch(start_cursor='2mdd223i944') + >>> pages = query_iter.pages + >>> + >>> first_page = next(pages) + >>> first_page_entities = list(first_page) + >>> query_iter.next_page_token + 'abc-some-cursor' + >>> + >>> second_page = next(pages) + >>> second_page_entities = list(second_page) + >>> query_iter.next_page_token is None + True + Under the hood this is doing: .. code-block:: python From c2ae88a2a8a4f3617f0aef76289b9d246df28651 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 8 Nov 2016 20:20:59 -0800 Subject: [PATCH 041/611] Renaming _getTargetClass to _get_target_class. Done via: $ git grep -l 'def _getTargetClass(self)' | \ > xargs sed -i s/'def _getTargetClass(self)'/'@staticmethod\n def _get_target_class()'/g --- packages/google-cloud-datastore/unit_tests/test__http.py | 9 ++++++--- packages/google-cloud-datastore/unit_tests/test_batch.py | 3 ++- .../google-cloud-datastore/unit_tests/test_client.py | 3 ++- .../google-cloud-datastore/unit_tests/test_entity.py | 3 ++- .../google-cloud-datastore/unit_tests/test_helpers.py | 3 ++- packages/google-cloud-datastore/unit_tests/test_key.py | 3 ++- packages/google-cloud-datastore/unit_tests/test_query.py | 6 ++++-- .../unit_tests/test_transaction.py | 3 ++- 8 files changed, 22 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index c7f35c3b4772..272da39c2848 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -19,7 +19,8 @@ class Test_DatastoreAPIOverHttp(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.datastore._http import _DatastoreAPIOverHttp return _DatastoreAPIOverHttp @@ -174,7 +175,8 @@ def test_commit_failure_non_grpc_err(self): class Test_DatastoreAPIOverGRPC(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.datastore._http import _DatastoreAPIOverGRPC return _DatastoreAPIOverGRPC @@ -350,7 +352,8 @@ def test_allocate_ids(self): class TestConnection(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.datastore._http import Connection return Connection diff --git a/packages/google-cloud-datastore/unit_tests/test_batch.py b/packages/google-cloud-datastore/unit_tests/test_batch.py index e7ce9dd609b0..1aae623868c3 100644 --- a/packages/google-cloud-datastore/unit_tests/test_batch.py +++ b/packages/google-cloud-datastore/unit_tests/test_batch.py @@ -17,7 +17,8 @@ class TestBatch(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.datastore.batch import Batch return Batch diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index 79dd7b6b8b05..624f4d6db122 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -128,7 +128,8 @@ def tearDown(self): KLASS = self._getTargetClass() KLASS._connection_class = self.original_cnxn_class - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.datastore.client import Client return Client diff --git a/packages/google-cloud-datastore/unit_tests/test_entity.py b/packages/google-cloud-datastore/unit_tests/test_entity.py index 3a5674a85cd4..0aac11d6d263 100644 --- a/packages/google-cloud-datastore/unit_tests/test_entity.py +++ b/packages/google-cloud-datastore/unit_tests/test_entity.py @@ -21,7 +21,8 @@ class TestEntity(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.datastore.entity import Entity return Entity diff --git a/packages/google-cloud-datastore/unit_tests/test_helpers.py b/packages/google-cloud-datastore/unit_tests/test_helpers.py index 5d6a90eed0ff..c2859b9907cb 100644 --- a/packages/google-cloud-datastore/unit_tests/test_helpers.py +++ b/packages/google-cloud-datastore/unit_tests/test_helpers.py @@ -874,7 +874,8 @@ def test_array_value_meaning_partially_unset(self): class TestGeoPoint(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.datastore.helpers import GeoPoint return GeoPoint diff --git a/packages/google-cloud-datastore/unit_tests/test_key.py b/packages/google-cloud-datastore/unit_tests/test_key.py index 22aaefb27656..15c491d044c2 100644 --- a/packages/google-cloud-datastore/unit_tests/test_key.py +++ b/packages/google-cloud-datastore/unit_tests/test_key.py @@ -19,7 +19,8 @@ class TestKey(unittest.TestCase): _DEFAULT_PROJECT = 'PROJECT' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.datastore.key import Key return Key diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index 7c2d2238410d..0e71d74304de 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -19,7 +19,8 @@ class TestQuery(unittest.TestCase): _PROJECT = 'PROJECT' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.datastore.query import Query return Query @@ -339,7 +340,8 @@ def test_fetch_w_explicit_client(self): class TestIterator(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.datastore.query import Iterator return Iterator diff --git a/packages/google-cloud-datastore/unit_tests/test_transaction.py b/packages/google-cloud-datastore/unit_tests/test_transaction.py index 8b28ee0cb277..a4cf9ca87a8d 100644 --- a/packages/google-cloud-datastore/unit_tests/test_transaction.py +++ b/packages/google-cloud-datastore/unit_tests/test_transaction.py @@ -17,7 +17,8 @@ class TestTransaction(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.datastore.transaction import Transaction return Transaction From a7bdb5c4ca0821e37bd6b1121e3adbe9857697dd Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 8 Nov 2016 20:22:12 -0800 Subject: [PATCH 042/611] Changing uses of _getTargetClass to _get_target_class. Done via: $ git grep -l _getTargetClass | \ > xargs sed -i s/_getTargetClass/_get_target_class/g --- packages/google-cloud-datastore/unit_tests/test__http.py | 8 ++++---- packages/google-cloud-datastore/unit_tests/test_batch.py | 4 ++-- packages/google-cloud-datastore/unit_tests/test_client.py | 8 ++++---- packages/google-cloud-datastore/unit_tests/test_entity.py | 4 ++-- .../google-cloud-datastore/unit_tests/test_helpers.py | 2 +- packages/google-cloud-datastore/unit_tests/test_key.py | 4 ++-- packages/google-cloud-datastore/unit_tests/test_query.py | 4 ++-- .../google-cloud-datastore/unit_tests/test_transaction.py | 4 ++-- 8 files changed, 19 insertions(+), 19 deletions(-) diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 272da39c2848..fd1cb08a3ee2 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -25,7 +25,7 @@ def _get_target_class(): return _DatastoreAPIOverHttp def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test__rpc(self): class ReqPB(object): @@ -201,7 +201,7 @@ def mock_make_stub(*args): else: to_monkey = {'make_insecure_stub': mock_make_stub} with _Monkey(MUT, **to_monkey): - return self._getTargetClass()(connection, secure) + return self._get_target_class()(connection, secure) def test_constructor(self): from google.cloud.datastore import _http as MUT @@ -375,7 +375,7 @@ def _makeOne(self, credentials=None, http=None, use_grpc=False): from google.cloud._testing import _Monkey from google.cloud.datastore import _http as MUT with _Monkey(MUT, _USE_GRPC=use_grpc): - return self._getTargetClass()(credentials=credentials, http=http) + return self._get_target_class()(credentials=credentials, http=http) def _verifyProtobufCall(self, called_with, URI, conn): self.assertEqual(called_with['uri'], URI) @@ -386,7 +386,7 @@ def _verifyProtobufCall(self, called_with, URI, conn): conn.USER_AGENT) def test_default_url(self): - klass = self._getTargetClass() + klass = self._get_target_class() conn = self._makeOne() self.assertEqual(conn.api_base_url, klass.API_BASE_URL) diff --git a/packages/google-cloud-datastore/unit_tests/test_batch.py b/packages/google-cloud-datastore/unit_tests/test_batch.py index 1aae623868c3..05180500918d 100644 --- a/packages/google-cloud-datastore/unit_tests/test_batch.py +++ b/packages/google-cloud-datastore/unit_tests/test_batch.py @@ -24,7 +24,7 @@ def _get_target_class(): return Batch def _makeOne(self, client): - return self._getTargetClass()(client) + return self._get_target_class()(client) def test_ctor(self): from google.cloud.datastore._generated import datastore_pb2 @@ -348,7 +348,7 @@ def test_as_context_mgr_w_error(self): self.assertEqual(connection._committed, []) def test_as_context_mgr_enter_fails(self): - klass = self._getTargetClass() + klass = self._get_target_class() class FailedBegin(klass): diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index 624f4d6db122..dc7b9e66c34c 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -120,12 +120,12 @@ class TestClient(unittest.TestCase): PROJECT = 'PROJECT' def setUp(self): - KLASS = self._getTargetClass() + KLASS = self._get_target_class() self.original_cnxn_class = KLASS._connection_class KLASS._connection_class = _MockConnection def tearDown(self): - KLASS = self._getTargetClass() + KLASS = self._get_target_class() KLASS._connection_class = self.original_cnxn_class @staticmethod @@ -135,7 +135,7 @@ def _get_target_class(): def _makeOne(self, project=PROJECT, namespace=None, credentials=None, http=None): - return self._getTargetClass()(project=project, + return self._get_target_class()(project=project, namespace=namespace, credentials=credentials, http=http) @@ -162,7 +162,7 @@ def fallback_mock(project): default_called.append(project) return project or OTHER - klass = self._getTargetClass() + klass = self._get_target_class() with _Monkey(_MUT, _determine_default_project=fallback_mock): with _Monkey(_base_client, diff --git a/packages/google-cloud-datastore/unit_tests/test_entity.py b/packages/google-cloud-datastore/unit_tests/test_entity.py index 0aac11d6d263..6dd91f460886 100644 --- a/packages/google-cloud-datastore/unit_tests/test_entity.py +++ b/packages/google-cloud-datastore/unit_tests/test_entity.py @@ -27,11 +27,11 @@ def _get_target_class(): return Entity def _makeOne(self, key=None, exclude_from_indexes=()): - klass = self._getTargetClass() + klass = self._get_target_class() return klass(key=key, exclude_from_indexes=exclude_from_indexes) def test_ctor_defaults(self): - klass = self._getTargetClass() + klass = self._get_target_class() entity = klass() self.assertIsNone(entity.key) self.assertIsNone(entity.kind) diff --git a/packages/google-cloud-datastore/unit_tests/test_helpers.py b/packages/google-cloud-datastore/unit_tests/test_helpers.py index c2859b9907cb..15713a21b8e1 100644 --- a/packages/google-cloud-datastore/unit_tests/test_helpers.py +++ b/packages/google-cloud-datastore/unit_tests/test_helpers.py @@ -880,7 +880,7 @@ def _get_target_class(): return GeoPoint def _makeOne(self, *args, **kwargs): - return self._getTargetClass()(*args, **kwargs) + return self._get_target_class()(*args, **kwargs) def test_constructor(self): lat = 81.2 diff --git a/packages/google-cloud-datastore/unit_tests/test_key.py b/packages/google-cloud-datastore/unit_tests/test_key.py index 15c491d044c2..b8713663e92a 100644 --- a/packages/google-cloud-datastore/unit_tests/test_key.py +++ b/packages/google-cloud-datastore/unit_tests/test_key.py @@ -25,13 +25,13 @@ def _get_target_class(): return Key def _makeOne(self, *args, **kwargs): - return self._getTargetClass()(*args, **kwargs) + return self._get_target_class()(*args, **kwargs) def test_ctor_empty(self): self.assertRaises(ValueError, self._makeOne) def test_ctor_no_project(self): - klass = self._getTargetClass() + klass = self._get_target_class() self.assertRaises(ValueError, klass, 'KIND') def test_ctor_w_explicit_project_empty_path(self): diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index 0e71d74304de..f3fdc407f7d6 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -25,7 +25,7 @@ def _get_target_class(): return Query def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def _makeClient(self, connection=None): if connection is None: @@ -346,7 +346,7 @@ def _get_target_class(): return Iterator def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_constructor_defaults(self): query = object() diff --git a/packages/google-cloud-datastore/unit_tests/test_transaction.py b/packages/google-cloud-datastore/unit_tests/test_transaction.py index a4cf9ca87a8d..2d12d7405eb6 100644 --- a/packages/google-cloud-datastore/unit_tests/test_transaction.py +++ b/packages/google-cloud-datastore/unit_tests/test_transaction.py @@ -23,7 +23,7 @@ def _get_target_class(): return Transaction def _makeOne(self, client, **kw): - return self._getTargetClass()(client, **kw) + return self._get_target_class()(client, **kw) def test_ctor_defaults(self): from google.cloud.datastore._generated import datastore_pb2 @@ -35,7 +35,7 @@ def test_ctor_defaults(self): self.assertEqual(xact.project, _PROJECT) self.assertEqual(xact.connection, connection) self.assertIsNone(xact.id) - self.assertEqual(xact._status, self._getTargetClass()._INITIAL) + self.assertEqual(xact._status, self._get_target_class()._INITIAL) self.assertIsInstance(xact._commit_request, datastore_pb2.CommitRequest) self.assertIs(xact.mutations, xact._commit_request.mutations) From 0ddcd476f27f70a212f1f33836bcbd950c09a694 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 10 Nov 2016 11:05:35 -0800 Subject: [PATCH 043/611] Changing all instances of _makeOne to _make_one. Done via: $ git grep -l _makeOne | \ > xargs sed -i s/_makeOne/_make_one/g --- .../unit_tests/test__http.py | 90 ++++++------ .../unit_tests/test_batch.py | 48 +++---- .../unit_tests/test_client.py | 86 +++++------ .../unit_tests/test_entity.py | 56 ++++---- .../unit_tests/test_helpers.py | 20 +-- .../unit_tests/test_key.py | 136 +++++++++--------- .../unit_tests/test_query.py | 100 ++++++------- .../unit_tests/test_transaction.py | 24 ++-- 8 files changed, 280 insertions(+), 280 deletions(-) diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index fd1cb08a3ee2..64c6a6258122 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -24,7 +24,7 @@ def _get_target_class(): from google.cloud.datastore._http import _DatastoreAPIOverHttp return _DatastoreAPIOverHttp - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test__rpc(self): @@ -47,7 +47,7 @@ def FromString(cls, pb): METHOD = 'METHOD' URI = 'http://api-url' conn = _Connection(URI) - datastore_api = self._makeOne(conn) + datastore_api = self._make_one(conn) http = conn.http = Http({'status': '200'}, 'CONTENT') response = datastore_api._rpc(PROJECT, METHOD, ReqPB(), RspPB) self.assertIsInstance(response, RspPB) @@ -69,7 +69,7 @@ def test__request_w_200(self): DATA = b'DATA' URI = 'http://api-url' conn = _Connection(URI) - datastore_api = self._makeOne(conn) + datastore_api = self._make_one(conn) http = conn.http = Http({'status': '200'}, 'CONTENT') self.assertEqual(datastore_api._request(PROJECT, METHOD, DATA), 'CONTENT') @@ -97,7 +97,7 @@ def test__request_not_200(self): DATA = 'DATA' URI = 'http://api-url' conn = _Connection(URI) - datastore_api = self._makeOne(conn) + datastore_api = self._make_one(conn) conn.http = Http({'status': '400'}, error.SerializeToString()) with self.assertRaises(BadRequest) as exc: datastore_api._request(PROJECT, METHOD, DATA) @@ -180,7 +180,7 @@ def _get_target_class(): from google.cloud.datastore._http import _DatastoreAPIOverGRPC return _DatastoreAPIOverGRPC - def _makeOne(self, stub, connection=None, secure=True, mock_args=None): + def _make_one(self, stub, connection=None, secure=True, mock_args=None): from google.cloud._testing import _Monkey from google.cloud.datastore import _http as MUT @@ -212,7 +212,7 @@ def test_constructor(self): stub = _GRPCStub() mock_args = [] - datastore_api = self._makeOne(stub, connection=conn, + datastore_api = self._make_one(stub, connection=conn, mock_args=mock_args) self.assertIs(datastore_api._stub, stub) @@ -232,7 +232,7 @@ def test_constructor_insecure(self): stub = _GRPCStub() mock_args = [] - datastore_api = self._makeOne(stub, connection=conn, + datastore_api = self._make_one(stub, connection=conn, secure=False, mock_args=mock_args) self.assertIs(datastore_api._stub, stub) @@ -245,7 +245,7 @@ def test_constructor_insecure(self): def test_lookup(self): return_val = object() stub = _GRPCStub(return_val) - datastore_api = self._makeOne(stub=stub) + datastore_api = self._make_one(stub=stub) request_pb = _RequestPB() project = 'PROJECT' @@ -258,7 +258,7 @@ def test_lookup(self): def test_run_query(self): return_val = object() stub = _GRPCStub(return_val) - datastore_api = self._makeOne(stub=stub) + datastore_api = self._make_one(stub=stub) request_pb = _RequestPB() project = 'PROJECT' @@ -270,7 +270,7 @@ def test_run_query(self): def _run_query_failure_helper(self, exc, err_class): stub = _GRPCStub(side_effect=exc) - datastore_api = self._makeOne(stub=stub) + datastore_api = self._make_one(stub=stub) request_pb = _RequestPB() project = 'PROJECT' @@ -298,7 +298,7 @@ def test_run_query_invalid_argument(self): def test_begin_transaction(self): return_val = object() stub = _GRPCStub(return_val) - datastore_api = self._makeOne(stub=stub) + datastore_api = self._make_one(stub=stub) request_pb = _RequestPB() project = 'PROJECT' @@ -312,7 +312,7 @@ def test_begin_transaction(self): def test_commit_success(self): return_val = object() stub = _GRPCStub(return_val) - datastore_api = self._makeOne(stub=stub) + datastore_api = self._make_one(stub=stub) request_pb = _RequestPB() project = 'PROJECT' @@ -325,7 +325,7 @@ def test_commit_success(self): def test_rollback(self): return_val = object() stub = _GRPCStub(return_val) - datastore_api = self._makeOne(stub=stub) + datastore_api = self._make_one(stub=stub) request_pb = _RequestPB() project = 'PROJECT' @@ -338,7 +338,7 @@ def test_rollback(self): def test_allocate_ids(self): return_val = object() stub = _GRPCStub(return_val) - datastore_api = self._makeOne(stub=stub) + datastore_api = self._make_one(stub=stub) request_pb = _RequestPB() project = 'PROJECT' @@ -371,7 +371,7 @@ def _make_query_pb(self, kind): pb.kind.add().name = kind return pb - def _makeOne(self, credentials=None, http=None, use_grpc=False): + def _make_one(self, credentials=None, http=None, use_grpc=False): from google.cloud._testing import _Monkey from google.cloud.datastore import _http as MUT with _Monkey(MUT, _USE_GRPC=use_grpc): @@ -387,7 +387,7 @@ def _verifyProtobufCall(self, called_with, URI, conn): def test_default_url(self): klass = self._get_target_class() - conn = self._makeOne() + conn = self._make_one() self.assertEqual(conn.api_base_url, klass.API_BASE_URL) def test_custom_url_from_env(self): @@ -400,13 +400,13 @@ def test_custom_url_from_env(self): fake_environ = {GCD_HOST: HOST} with _Monkey(os, environ=fake_environ): - conn = self._makeOne() + conn = self._make_one() self.assertNotEqual(conn.api_base_url, API_BASE_URL) self.assertEqual(conn.api_base_url, 'http://' + HOST) def test_ctor_defaults(self): - conn = self._makeOne() + conn = self._make_one() self.assertIsNone(conn.credentials) def test_ctor_without_grpc(self): @@ -421,7 +421,7 @@ def mock_api(connection): return return_val with _Monkey(MUT, _DatastoreAPIOverHttp=mock_api): - conn = self._makeOne(use_grpc=False) + conn = self._make_one(use_grpc=False) self.assertIsNone(conn.credentials) self.assertIs(conn._datastore_api, return_val) @@ -439,7 +439,7 @@ def mock_api(connection, secure): return return_val with _Monkey(MUT, _DatastoreAPIOverGRPC=mock_api): - conn = self._makeOne(use_grpc=True) + conn = self._make_one(use_grpc=True) self.assertIsNone(conn.credentials) self.assertIs(conn._datastore_api, return_val) @@ -452,18 +452,18 @@ def create_scoped_required(self): return False creds = Creds() - conn = self._makeOne(creds) + conn = self._make_one(creds) self.assertIs(conn.credentials, creds) def test_http_w_existing(self): - conn = self._makeOne() + conn = self._make_one() conn._http = http = object() self.assertIs(conn.http, http) def test_http_wo_creds(self): import httplib2 - conn = self._makeOne() + conn = self._make_one() self.assertIsInstance(conn.http, httplib2.Http) def test_http_w_creds(self): @@ -481,14 +481,14 @@ def create_scoped_required(self): return False creds = Creds() - conn = self._makeOne(creds) + conn = self._make_one(creds) self.assertIs(conn.http, authorized) self.assertIsInstance(creds._called_with, httplib2.Http) def test_build_api_url_w_default_base_version(self): PROJECT = 'PROJECT' METHOD = 'METHOD' - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, @@ -502,7 +502,7 @@ def test_build_api_url_w_explicit_base_version(self): VER = '3.1415926' PROJECT = 'PROJECT' METHOD = 'METHOD' - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ BASE, VER, @@ -518,7 +518,7 @@ def test_lookup_single_key_empty_response(self): PROJECT = 'PROJECT' key_pb = self._make_key_pb(PROJECT) rsp_pb = datastore_pb2.LookupResponse() - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, @@ -545,7 +545,7 @@ def test_lookup_single_key_empty_response_w_eventual(self): PROJECT = 'PROJECT' key_pb = self._make_key_pb(PROJECT) rsp_pb = datastore_pb2.LookupResponse() - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, @@ -574,7 +574,7 @@ def test_lookup_single_key_empty_response_w_eventual_and_transaction(self): PROJECT = 'PROJECT' TRANSACTION = b'TRANSACTION' key_pb = self._make_key_pb(PROJECT) - conn = self._makeOne() + conn = self._make_one() self.assertRaises(ValueError, conn.lookup, PROJECT, key_pb, eventual=True, transaction_id=TRANSACTION) @@ -585,7 +585,7 @@ def test_lookup_single_key_empty_response_w_transaction(self): TRANSACTION = b'TRANSACTION' key_pb = self._make_key_pb(PROJECT) rsp_pb = datastore_pb2.LookupResponse() - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, @@ -618,7 +618,7 @@ def test_lookup_single_key_nonempty_response(self): entity = entity_pb2.Entity() entity.key.CopyFrom(key_pb) rsp_pb.found.add(entity=entity) - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, @@ -647,7 +647,7 @@ def test_lookup_multiple_keys_empty_response(self): key_pb1 = self._make_key_pb(PROJECT) key_pb2 = self._make_key_pb(PROJECT, id_=2345) rsp_pb = datastore_pb2.LookupResponse() - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, @@ -680,7 +680,7 @@ def test_lookup_multiple_keys_w_missing(self): er_1.entity.key.CopyFrom(key_pb1) er_2 = rsp_pb.missing.add() er_2.entity.key.CopyFrom(key_pb2) - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, @@ -712,7 +712,7 @@ def test_lookup_multiple_keys_w_deferred(self): rsp_pb = datastore_pb2.LookupResponse() rsp_pb.deferred.add().CopyFrom(key_pb1) rsp_pb.deferred.add().CopyFrom(key_pb2) - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, @@ -752,7 +752,7 @@ def test_run_query_w_eventual_no_transaction(self): no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, @@ -791,7 +791,7 @@ def test_run_query_wo_eventual_w_transaction(self): no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, @@ -831,7 +831,7 @@ def test_run_query_w_eventual_and_transaction(self): no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL - conn = self._makeOne() + conn = self._make_one() self.assertRaises(ValueError, conn.run_query, PROJECT, q_pb, eventual=True, transaction_id=TRANSACTION) @@ -848,7 +848,7 @@ def test_run_query_wo_namespace_empty_result(self): no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, @@ -881,7 +881,7 @@ def test_run_query_w_namespace_nonempty_result(self): rsp_pb.batch.entity_results.add(entity=entity) rsp_pb.batch.entity_result_type = 1 # FULL rsp_pb.batch.more_results = 3 # NO_MORE_RESULTS - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, @@ -906,7 +906,7 @@ def test_begin_transaction(self): TRANSACTION = b'TRANSACTION' rsp_pb = datastore_pb2.BeginTransactionResponse() rsp_pb.transaction = TRANSACTION - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, @@ -936,7 +936,7 @@ def test_commit_wo_transaction(self): insert.key.CopyFrom(key_pb) value_pb = _new_value_pb(insert, 'foo') value_pb.string_value = u'Foo' - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, @@ -982,7 +982,7 @@ def test_commit_w_transaction(self): insert.key.CopyFrom(key_pb) value_pb = _new_value_pb(insert, 'foo') value_pb.string_value = u'Foo' - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, @@ -1019,7 +1019,7 @@ def test_rollback_ok(self): TRANSACTION = b'xact' rsp_pb = datastore_pb2.RollbackResponse() - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, @@ -1040,7 +1040,7 @@ def test_allocate_ids_empty(self): PROJECT = 'PROJECT' rsp_pb = datastore_pb2.AllocateIdsResponse() - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, @@ -1071,7 +1071,7 @@ def test_allocate_ids_non_empty(self): rsp_pb = datastore_pb2.AllocateIdsResponse() rsp_pb.keys.add().CopyFrom(after_key_pbs[0]) rsp_pb.keys.add().CopyFrom(after_key_pbs[1]) - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, diff --git a/packages/google-cloud-datastore/unit_tests/test_batch.py b/packages/google-cloud-datastore/unit_tests/test_batch.py index 05180500918d..737668af02b4 100644 --- a/packages/google-cloud-datastore/unit_tests/test_batch.py +++ b/packages/google-cloud-datastore/unit_tests/test_batch.py @@ -23,7 +23,7 @@ def _get_target_class(): return Batch - def _makeOne(self, client): + def _make_one(self, client): return self._get_target_class()(client) def test_ctor(self): @@ -32,7 +32,7 @@ def test_ctor(self): _NAMESPACE = 'NAMESPACE' connection = _Connection() client = _Client(_PROJECT, connection, _NAMESPACE) - batch = self._makeOne(client) + batch = self._make_one(client) self.assertEqual(batch.project, _PROJECT) self.assertEqual(batch.connection, connection) @@ -48,8 +48,8 @@ def test_current(self): _PROJECT = 'PROJECT' connection = _Connection() client = _Client(_PROJECT, connection) - batch1 = self._makeOne(client) - batch2 = self._makeOne(client) + batch1 = self._make_one(client) + batch2 = self._make_one(client) self.assertIsNone(batch1.current()) self.assertIsNone(batch2.current()) with batch1: @@ -67,7 +67,7 @@ def test_put_entity_wo_key(self): _PROJECT = 'PROJECT' connection = _Connection() client = _Client(_PROJECT, connection) - batch = self._makeOne(client) + batch = self._make_one(client) batch.begin() self.assertRaises(ValueError, batch.put, _Entity()) @@ -76,7 +76,7 @@ def test_put_entity_wrong_status(self): _PROJECT = 'PROJECT' connection = _Connection() client = _Client(_PROJECT, connection) - batch = self._makeOne(client) + batch = self._make_one(client) entity = _Entity() entity.key = _Key('OTHER') @@ -87,7 +87,7 @@ def test_put_entity_w_key_wrong_project(self): _PROJECT = 'PROJECT' connection = _Connection() client = _Client(_PROJECT, connection) - batch = self._makeOne(client) + batch = self._make_one(client) entity = _Entity() entity.key = _Key('OTHER') @@ -99,7 +99,7 @@ def test_put_entity_w_partial_key(self): _PROPERTIES = {'foo': 'bar'} connection = _Connection() client = _Client(_PROJECT, connection) - batch = self._makeOne(client) + batch = self._make_one(client) entity = _Entity(_PROPERTIES) key = entity.key = _Key(_PROJECT) key._id = None @@ -123,7 +123,7 @@ def test_put_entity_w_completed_key(self): } connection = _Connection() client = _Client(_PROJECT, connection) - batch = self._makeOne(client) + batch = self._make_one(client) entity = _Entity(_PROPERTIES) entity.exclude_from_indexes = ('baz', 'spam') key = entity.key = _Key(_PROJECT) @@ -149,7 +149,7 @@ def test_delete_wrong_status(self): _PROJECT = 'PROJECT' connection = _Connection() client = _Client(_PROJECT, connection) - batch = self._makeOne(client) + batch = self._make_one(client) key = _Key(_PROJECT) key._id = None @@ -160,7 +160,7 @@ def test_delete_w_partial_key(self): _PROJECT = 'PROJECT' connection = _Connection() client = _Client(_PROJECT, connection) - batch = self._makeOne(client) + batch = self._make_one(client) key = _Key(_PROJECT) key._id = None @@ -171,7 +171,7 @@ def test_delete_w_key_wrong_project(self): _PROJECT = 'PROJECT' connection = _Connection() client = _Client(_PROJECT, connection) - batch = self._makeOne(client) + batch = self._make_one(client) key = _Key('OTHER') batch.begin() @@ -181,7 +181,7 @@ def test_delete_w_completed_key(self): _PROJECT = 'PROJECT' connection = _Connection() client = _Client(_PROJECT, connection) - batch = self._makeOne(client) + batch = self._make_one(client) key = _Key(_PROJECT) batch.begin() @@ -193,7 +193,7 @@ def test_delete_w_completed_key(self): def test_begin(self): _PROJECT = 'PROJECT' client = _Client(_PROJECT, None) - batch = self._makeOne(client) + batch = self._make_one(client) self.assertEqual(batch._status, batch._INITIAL) batch.begin() self.assertEqual(batch._status, batch._IN_PROGRESS) @@ -201,7 +201,7 @@ def test_begin(self): def test_begin_fail(self): _PROJECT = 'PROJECT' client = _Client(_PROJECT, None) - batch = self._makeOne(client) + batch = self._make_one(client) batch._status = batch._IN_PROGRESS with self.assertRaises(ValueError): batch.begin() @@ -209,7 +209,7 @@ def test_begin_fail(self): def test_rollback(self): _PROJECT = 'PROJECT' client = _Client(_PROJECT, None) - batch = self._makeOne(client) + batch = self._make_one(client) batch.begin() self.assertEqual(batch._status, batch._IN_PROGRESS) batch.rollback() @@ -218,7 +218,7 @@ def test_rollback(self): def test_rollback_wrong_status(self): _PROJECT = 'PROJECT' client = _Client(_PROJECT, None) - batch = self._makeOne(client) + batch = self._make_one(client) self.assertEqual(batch._status, batch._INITIAL) self.assertRaises(ValueError, batch.rollback) @@ -227,7 +227,7 @@ def test_commit(self): _PROJECT = 'PROJECT' connection = _Connection() client = _Client(_PROJECT, connection) - batch = self._makeOne(client) + batch = self._make_one(client) self.assertEqual(batch._status, batch._INITIAL) batch.begin() @@ -242,7 +242,7 @@ def test_commit_wrong_status(self): _PROJECT = 'PROJECT' connection = _Connection() client = _Client(_PROJECT, connection) - batch = self._makeOne(client) + batch = self._make_one(client) self.assertEqual(batch._status, batch._INITIAL) self.assertRaises(ValueError, batch.commit) @@ -252,7 +252,7 @@ def test_commit_w_partial_key_entities(self): _NEW_ID = 1234 connection = _Connection(_NEW_ID) client = _Client(_PROJECT, connection) - batch = self._makeOne(client) + batch = self._make_one(client) entity = _Entity({}) key = entity.key = _Key(_PROJECT) key._id = None @@ -279,7 +279,7 @@ def test_as_context_mgr_wo_error(self): client = _Client(_PROJECT, connection) self.assertEqual(list(client._batches), []) - with self._makeOne(client) as batch: + with self._make_one(client) as batch: self.assertEqual(list(client._batches), [batch]) batch.put(entity) @@ -302,10 +302,10 @@ def test_as_context_mgr_nested(self): client = _Client(_PROJECT, connection) self.assertEqual(list(client._batches), []) - with self._makeOne(client) as batch1: + with self._make_one(client) as batch1: self.assertEqual(list(client._batches), [batch1]) batch1.put(entity1) - with self._makeOne(client) as batch2: + with self._make_one(client) as batch2: self.assertEqual(list(client._batches), [batch2, batch1]) batch2.put(entity2) @@ -334,7 +334,7 @@ def test_as_context_mgr_w_error(self): self.assertEqual(list(client._batches), []) try: - with self._makeOne(client) as batch: + with self._make_one(client) as batch: self.assertEqual(list(client._batches), [batch]) batch.put(entity) raise ValueError("testing") diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index dc7b9e66c34c..6be62a95d20b 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -133,7 +133,7 @@ def _get_target_class(): from google.cloud.datastore.client import Client return Client - def _makeOne(self, project=PROJECT, namespace=None, + def _make_one(self, project=PROJECT, namespace=None, credentials=None, http=None): return self._get_target_class()(project=project, namespace=namespace, @@ -147,7 +147,7 @@ def test_ctor_w_project_no_environ(self): # Some environments (e.g. AppVeyor CI) run in GCE, so # this test would fail artificially. with _Monkey(_MUT, _base_default_project=lambda project: None): - self.assertRaises(EnvironmentError, self._makeOne, None) + self.assertRaises(EnvironmentError, self._make_one, None) def test_ctor_w_implicit_inputs(self): from google.cloud._testing import _Monkey @@ -182,7 +182,7 @@ def test_ctor_w_explicit_inputs(self): NAMESPACE = 'namespace' creds = object() http = object() - client = self._makeOne(project=OTHER, + client = self._make_one(project=OTHER, namespace=NAMESPACE, credentials=creds, http=http) @@ -196,7 +196,7 @@ def test_ctor_w_explicit_inputs(self): def test__push_batch_and__pop_batch(self): creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) batch = client.batch() xact = client.transaction() client._push_batch(batch) @@ -221,7 +221,7 @@ def _get_multi(*args, **kw): return [] creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) client.get_multi = _get_multi key = object() @@ -244,7 +244,7 @@ def _get_multi(*args, **kw): return [_entity] creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) client.get_multi = _get_multi key, missing, deferred = object(), [], [] @@ -259,7 +259,7 @@ def _get_multi(*args, **kw): def test_get_multi_no_keys(self): creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) results = client.get_multi([]) self.assertEqual(results, []) @@ -267,7 +267,7 @@ def test_get_multi_miss(self): from google.cloud.datastore.key import Key creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) client.connection._add_lookup_result() key = Key('Kind', 1234, project=self.PROJECT) results = client.get_multi([key]) @@ -288,7 +288,7 @@ def test_get_multi_miss_w_missing(self): path_element.id = ID creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) # Set missing entity on mock connection. client.connection._add_lookup_result(missing=[missed]) @@ -303,7 +303,7 @@ def test_get_multi_w_missing_non_empty(self): from google.cloud.datastore.key import Key creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) key = Key('Kind', 1234, project=self.PROJECT) missing = ['this', 'list', 'is', 'not', 'empty'] @@ -314,7 +314,7 @@ def test_get_multi_w_deferred_non_empty(self): from google.cloud.datastore.key import Key creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) key = Key('Kind', 1234, project=self.PROJECT) deferred = ['this', 'list', 'is', 'not', 'empty'] @@ -328,7 +328,7 @@ def test_get_multi_miss_w_deferred(self): # Set deferred entity on mock connection. creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) client.connection._add_lookup_result(deferred=[key.to_protobuf()]) deferred = [] @@ -353,7 +353,7 @@ def test_get_multi_w_deferred_from_backend_but_not_passed(self): entity2_pb.key.CopyFrom(key2_pb) creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) # mock up two separate requests client.connection._add_lookup_result([entity1_pb], deferred=[key2_pb]) client.connection._add_lookup_result([entity2_pb]) @@ -402,7 +402,7 @@ def test_get_multi_hit(self): # Make a connection to return the entity pb. creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) client.connection._add_lookup_result([entity_pb]) key = Key(KIND, ID, project=self.PROJECT) @@ -429,7 +429,7 @@ def test_get_multi_hit_w_transaction(self): # Make a connection to return the entity pb. creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) client.connection._add_lookup_result([entity_pb]) key = Key(KIND, ID, project=self.PROJECT) @@ -463,7 +463,7 @@ def test_get_multi_hit_multiple_keys_same_project(self): # Make a connection to return the entity pbs. creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) client.connection._add_lookup_result([entity_pb1, entity_pb2]) key1 = Key(KIND, ID1, project=self.PROJECT) @@ -489,7 +489,7 @@ def test_get_multi_hit_multiple_keys_different_project(self): key2 = Key('KIND', 1234, project=PROJECT2) creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) with self.assertRaises(ValueError): client.get_multi([key1, key2]) @@ -507,7 +507,7 @@ def test_get_multi_max_loops(self): # Make a connection to return the entity pb. creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) client.connection._add_lookup_result([entity_pb]) key = Key(KIND, ID, project=self.PROJECT) @@ -530,7 +530,7 @@ def _put_multi(*args, **kw): _called_with.append((args, kw)) creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) client.put_multi = _put_multi entity = object() @@ -541,7 +541,7 @@ def _put_multi(*args, **kw): def test_put_multi_no_entities(self): creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) self.assertIsNone(client.put_multi([])) def test_put_multi_w_single_empty_entity(self): @@ -549,7 +549,7 @@ def test_put_multi_w_single_empty_entity(self): from google.cloud.datastore.entity import Entity creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) self.assertRaises(ValueError, client.put_multi, Entity()) def test_put_multi_no_batch_w_partial_key(self): @@ -560,7 +560,7 @@ def test_put_multi_no_batch_w_partial_key(self): key._id = None creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) client.connection._commit.append([_KeyPB(key)]) result = client.put_multi([entity]) @@ -586,7 +586,7 @@ def test_put_multi_existing_batch_w_completed_key(self): from google.cloud.datastore.helpers import _property_tuples creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) entity = _Entity(foo=u'bar') key = entity.key = _Key(self.PROJECT) @@ -610,7 +610,7 @@ def _delete_multi(*args, **kw): _called_with.append((args, kw)) creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) client.delete_multi = _delete_multi key = object() @@ -621,7 +621,7 @@ def _delete_multi(*args, **kw): def test_delete_multi_no_keys(self): creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) result = client.delete_multi([]) self.assertIsNone(result) self.assertEqual(len(client.connection._commit_cw), 0) @@ -630,7 +630,7 @@ def test_delete_multi_no_batch(self): key = _Key(self.PROJECT) creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) client.connection._commit.append([]) result = client.delete_multi([key]) @@ -646,7 +646,7 @@ def test_delete_multi_no_batch(self): def test_delete_multi_w_existing_batch(self): creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) key = _Key(self.PROJECT) with _NoCommitBatch(client) as CURR_BATCH: @@ -659,7 +659,7 @@ def test_delete_multi_w_existing_batch(self): def test_delete_multi_w_existing_transaction(self): creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) key = _Key(self.PROJECT) with _NoCommitTransaction(client) as CURR_XACT: @@ -677,7 +677,7 @@ def test_allocate_ids_w_partial_key(self): INCOMPLETE_KEY._id = None creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) result = client.allocate_ids(INCOMPLETE_KEY, NUM_IDS) @@ -686,7 +686,7 @@ def test_allocate_ids_w_partial_key(self): def test_allocate_ids_with_completed_key(self): creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) COMPLETE_KEY = _Key(self.PROJECT) self.assertRaises(ValueError, client.allocate_ids, COMPLETE_KEY, 2) @@ -696,7 +696,7 @@ def test_key_w_project(self): ID = 1234 creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) self.assertRaises(TypeError, client.key, KIND, ID, project=self.PROJECT) @@ -709,7 +709,7 @@ def test_key_wo_project(self): ID = 1234 creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) with _Monkey(MUT, Key=_Dummy): key = client.key(KIND, ID) @@ -731,7 +731,7 @@ def test_key_w_namespace(self): NAMESPACE = object() creds = object() - client = self._makeOne(namespace=NAMESPACE, credentials=creds) + client = self._make_one(namespace=NAMESPACE, credentials=creds) with _Monkey(MUT, Key=_Dummy): key = client.key(KIND, ID) @@ -753,7 +753,7 @@ def test_key_w_namespace_collision(self): NAMESPACE2 = object() creds = object() - client = self._makeOne(namespace=NAMESPACE1, credentials=creds) + client = self._make_one(namespace=NAMESPACE1, credentials=creds) with _Monkey(MUT, Key=_Dummy): key = client.key(KIND, ID, namespace=NAMESPACE2) @@ -770,7 +770,7 @@ def test_batch(self): from google.cloud._testing import _Monkey creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) with _Monkey(MUT, Batch=_Dummy): batch = client.batch() @@ -784,7 +784,7 @@ def test_transaction_defaults(self): from google.cloud._testing import _Monkey creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) with _Monkey(MUT, Transaction=_Dummy): xact = client.transaction() @@ -797,8 +797,8 @@ def test_query_w_client(self): KIND = 'KIND' creds = object() - client = self._makeOne(credentials=creds) - other = self._makeOne(credentials=object()) + client = self._make_one(credentials=creds) + other = self._make_one(credentials=object()) self.assertRaises(TypeError, client.query, kind=KIND, client=other) @@ -806,7 +806,7 @@ def test_query_w_project(self): KIND = 'KIND' creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) self.assertRaises(TypeError, client.query, kind=KIND, project=self.PROJECT) @@ -816,7 +816,7 @@ def test_query_w_defaults(self): from google.cloud._testing import _Monkey creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) with _Monkey(MUT, Query=_Dummy): query = client.query() @@ -842,7 +842,7 @@ def test_query_explicit(self): DISTINCT_ON = ['DISTINCT_ON'] creds = object() - client = self._makeOne(credentials=creds) + client = self._make_one(credentials=creds) with _Monkey(MUT, Query=_Dummy): query = client.query( @@ -877,7 +877,7 @@ def test_query_w_namespace(self): NAMESPACE = object() creds = object() - client = self._makeOne(namespace=NAMESPACE, credentials=creds) + client = self._make_one(namespace=NAMESPACE, credentials=creds) with _Monkey(MUT, Query=_Dummy): query = client.query(kind=KIND) @@ -900,7 +900,7 @@ def test_query_w_namespace_collision(self): NAMESPACE2 = object() creds = object() - client = self._makeOne(namespace=NAMESPACE1, credentials=creds) + client = self._make_one(namespace=NAMESPACE1, credentials=creds) with _Monkey(MUT, Query=_Dummy): query = client.query(kind=KIND, namespace=NAMESPACE2) diff --git a/packages/google-cloud-datastore/unit_tests/test_entity.py b/packages/google-cloud-datastore/unit_tests/test_entity.py index 6dd91f460886..30d40f947cc6 100644 --- a/packages/google-cloud-datastore/unit_tests/test_entity.py +++ b/packages/google-cloud-datastore/unit_tests/test_entity.py @@ -26,7 +26,7 @@ def _get_target_class(): from google.cloud.datastore.entity import Entity return Entity - def _makeOne(self, key=None, exclude_from_indexes=()): + def _make_one(self, key=None, exclude_from_indexes=()): klass = self._get_target_class() return klass(key=key, exclude_from_indexes=exclude_from_indexes) @@ -40,7 +40,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): _EXCLUDE_FROM_INDEXES = ['foo', 'bar'] key = _Key() - entity = self._makeOne( + entity = self._make_one( key=key, exclude_from_indexes=_EXCLUDE_FROM_INDEXES) self.assertEqual(sorted(entity.exclude_from_indexes), sorted(_EXCLUDE_FROM_INDEXES)) @@ -48,13 +48,13 @@ def test_ctor_explicit(self): def test_ctor_bad_exclude_from_indexes(self): BAD_EXCLUDE_FROM_INDEXES = object() key = _Key() - self.assertRaises(TypeError, self._makeOne, key=key, + self.assertRaises(TypeError, self._make_one, key=key, exclude_from_indexes=BAD_EXCLUDE_FROM_INDEXES) def test___eq_____ne___w_non_entity(self): from google.cloud.datastore.key import Key key = Key(_KIND, _ID, project=_PROJECT) - entity = self._makeOne(key=key) + entity = self._make_one(key=key) self.assertFalse(entity == object()) self.assertTrue(entity != object()) @@ -63,9 +63,9 @@ def test___eq_____ne___w_different_keys(self): _ID1 = 1234 _ID2 = 2345 key1 = Key(_KIND, _ID1, project=_PROJECT) - entity1 = self._makeOne(key=key1) + entity1 = self._make_one(key=key1) key2 = Key(_KIND, _ID2, project=_PROJECT) - entity2 = self._makeOne(key=key2) + entity2 = self._make_one(key=key2) self.assertFalse(entity1 == entity2) self.assertTrue(entity1 != entity2) @@ -77,12 +77,12 @@ def test___eq_____ne___w_same_keys(self): meaning = 9 key1 = Key(_KIND, _ID, project=_PROJECT) - entity1 = self._makeOne(key=key1, exclude_from_indexes=(name,)) + entity1 = self._make_one(key=key1, exclude_from_indexes=(name,)) entity1[name] = value entity1._meanings[name] = (meaning, value) key2 = Key(_KIND, _ID, project=_PROJECT) - entity2 = self._makeOne(key=key2, exclude_from_indexes=(name,)) + entity2 = self._make_one(key=key2, exclude_from_indexes=(name,)) entity2[name] = value entity2._meanings[name] = (meaning, value) @@ -92,10 +92,10 @@ def test___eq_____ne___w_same_keys(self): def test___eq_____ne___w_same_keys_different_props(self): from google.cloud.datastore.key import Key key1 = Key(_KIND, _ID, project=_PROJECT) - entity1 = self._makeOne(key=key1) + entity1 = self._make_one(key=key1) entity1['foo'] = 'Foo' key2 = Key(_KIND, _ID, project=_PROJECT) - entity2 = self._makeOne(key=key2) + entity2 = self._make_one(key=key2) entity1['bar'] = 'Bar' self.assertFalse(entity1 == entity2) self.assertTrue(entity1 != entity2) @@ -104,9 +104,9 @@ def test___eq_____ne___w_same_keys_props_w_equiv_keys_as_value(self): from google.cloud.datastore.key import Key key1 = Key(_KIND, _ID, project=_PROJECT) key2 = Key(_KIND, _ID, project=_PROJECT) - entity1 = self._makeOne(key=key1) + entity1 = self._make_one(key=key1) entity1['some_key'] = key1 - entity2 = self._makeOne(key=key1) + entity2 = self._make_one(key=key1) entity2['some_key'] = key2 self.assertTrue(entity1 == entity2) self.assertFalse(entity1 != entity2) @@ -117,9 +117,9 @@ def test___eq_____ne___w_same_keys_props_w_diff_keys_as_value(self): _ID2 = 2345 key1 = Key(_KIND, _ID1, project=_PROJECT) key2 = Key(_KIND, _ID2, project=_PROJECT) - entity1 = self._makeOne(key=key1) + entity1 = self._make_one(key=key1) entity1['some_key'] = key1 - entity2 = self._makeOne(key=key1) + entity2 = self._make_one(key=key1) entity2['some_key'] = key2 self.assertFalse(entity1 == entity2) self.assertTrue(entity1 != entity2) @@ -127,12 +127,12 @@ def test___eq_____ne___w_same_keys_props_w_diff_keys_as_value(self): def test___eq_____ne___w_same_keys_props_w_equiv_entities_as_value(self): from google.cloud.datastore.key import Key key = Key(_KIND, _ID, project=_PROJECT) - entity1 = self._makeOne(key=key) - sub1 = self._makeOne() + entity1 = self._make_one(key=key) + sub1 = self._make_one() sub1.update({'foo': 'Foo'}) entity1['some_entity'] = sub1 - entity2 = self._makeOne(key=key) - sub2 = self._makeOne() + entity2 = self._make_one(key=key) + sub2 = self._make_one() sub2.update({'foo': 'Foo'}) entity2['some_entity'] = sub2 self.assertTrue(entity1 == entity2) @@ -141,12 +141,12 @@ def test___eq_____ne___w_same_keys_props_w_equiv_entities_as_value(self): def test___eq_____ne___w_same_keys_props_w_diff_entities_as_value(self): from google.cloud.datastore.key import Key key = Key(_KIND, _ID, project=_PROJECT) - entity1 = self._makeOne(key=key) - sub1 = self._makeOne() + entity1 = self._make_one(key=key) + sub1 = self._make_one() sub1.update({'foo': 'Foo'}) entity1['some_entity'] = sub1 - entity2 = self._makeOne(key=key) - sub2 = self._makeOne() + entity2 = self._make_one(key=key) + sub2 = self._make_one() sub2.update({'foo': 'Bar'}) entity2['some_entity'] = sub2 self.assertFalse(entity1 == entity2) @@ -159,10 +159,10 @@ def test__eq__same_value_different_exclude(self): value = 42 key = Key(_KIND, _ID, project=_PROJECT) - entity1 = self._makeOne(key=key, exclude_from_indexes=(name,)) + entity1 = self._make_one(key=key, exclude_from_indexes=(name,)) entity1[name] = value - entity2 = self._makeOne(key=key, exclude_from_indexes=()) + entity2 = self._make_one(key=key, exclude_from_indexes=()) entity2[name] = value self.assertFalse(entity1 == entity2) @@ -175,23 +175,23 @@ def test__eq__same_value_different_meanings(self): meaning = 9 key = Key(_KIND, _ID, project=_PROJECT) - entity1 = self._makeOne(key=key, exclude_from_indexes=(name,)) + entity1 = self._make_one(key=key, exclude_from_indexes=(name,)) entity1[name] = value - entity2 = self._makeOne(key=key, exclude_from_indexes=(name,)) + entity2 = self._make_one(key=key, exclude_from_indexes=(name,)) entity2[name] = value entity2._meanings[name] = (meaning, value) self.assertFalse(entity1 == entity2) def test___repr___no_key_empty(self): - entity = self._makeOne() + entity = self._make_one() self.assertEqual(repr(entity), '') def test___repr___w_key_non_empty(self): key = _Key() key._path = '/bar/baz' - entity = self._makeOne(key=key) + entity = self._make_one(key=key) entity['foo'] = 'Foo' self.assertEqual(repr(entity), "") diff --git a/packages/google-cloud-datastore/unit_tests/test_helpers.py b/packages/google-cloud-datastore/unit_tests/test_helpers.py index 15713a21b8e1..eea61f956524 100644 --- a/packages/google-cloud-datastore/unit_tests/test_helpers.py +++ b/packages/google-cloud-datastore/unit_tests/test_helpers.py @@ -879,13 +879,13 @@ def _get_target_class(): from google.cloud.datastore.helpers import GeoPoint return GeoPoint - def _makeOne(self, *args, **kwargs): + def _make_one(self, *args, **kwargs): return self._get_target_class()(*args, **kwargs) def test_constructor(self): lat = 81.2 lng = 359.9999 - geo_pt = self._makeOne(lat, lng) + geo_pt = self._make_one(lat, lng) self.assertEqual(geo_pt.latitude, lat) self.assertEqual(geo_pt.longitude, lng) @@ -894,7 +894,7 @@ def test_to_protobuf(self): lat = 0.0001 lng = 20.03 - geo_pt = self._makeOne(lat, lng) + geo_pt = self._make_one(lat, lng) result = geo_pt.to_protobuf() geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) self.assertEqual(result, geo_pt_pb) @@ -902,26 +902,26 @@ def test_to_protobuf(self): def test___eq__(self): lat = 0.0001 lng = 20.03 - geo_pt1 = self._makeOne(lat, lng) - geo_pt2 = self._makeOne(lat, lng) + geo_pt1 = self._make_one(lat, lng) + geo_pt2 = self._make_one(lat, lng) self.assertEqual(geo_pt1, geo_pt2) def test___eq__type_differ(self): lat = 0.0001 lng = 20.03 - geo_pt1 = self._makeOne(lat, lng) + geo_pt1 = self._make_one(lat, lng) geo_pt2 = object() self.assertNotEqual(geo_pt1, geo_pt2) def test___ne__same_value(self): lat = 0.0001 lng = 20.03 - geo_pt1 = self._makeOne(lat, lng) - geo_pt2 = self._makeOne(lat, lng) + geo_pt1 = self._make_one(lat, lng) + geo_pt2 = self._make_one(lat, lng) comparison_val = (geo_pt1 != geo_pt2) self.assertFalse(comparison_val) def test___ne__(self): - geo_pt1 = self._makeOne(0.0, 1.0) - geo_pt2 = self._makeOne(2.0, 3.0) + geo_pt1 = self._make_one(0.0, 1.0) + geo_pt2 = self._make_one(2.0, 3.0) self.assertNotEqual(geo_pt1, geo_pt2) diff --git a/packages/google-cloud-datastore/unit_tests/test_key.py b/packages/google-cloud-datastore/unit_tests/test_key.py index b8713663e92a..ca5530868893 100644 --- a/packages/google-cloud-datastore/unit_tests/test_key.py +++ b/packages/google-cloud-datastore/unit_tests/test_key.py @@ -24,11 +24,11 @@ def _get_target_class(): from google.cloud.datastore.key import Key return Key - def _makeOne(self, *args, **kwargs): + def _make_one(self, *args, **kwargs): return self._get_target_class()(*args, **kwargs) def test_ctor_empty(self): - self.assertRaises(ValueError, self._makeOne) + self.assertRaises(ValueError, self._make_one) def test_ctor_no_project(self): klass = self._get_target_class() @@ -36,7 +36,7 @@ def test_ctor_no_project(self): def test_ctor_w_explicit_project_empty_path(self): _PROJECT = 'PROJECT' - self.assertRaises(ValueError, self._makeOne, project=_PROJECT) + self.assertRaises(ValueError, self._make_one, project=_PROJECT) def test_ctor_parent(self): _PARENT_KIND = 'KIND1' @@ -49,10 +49,10 @@ def test_ctor_parent(self): {'kind': _PARENT_KIND, 'id': _PARENT_ID}, {'kind': _CHILD_KIND, 'id': _CHILD_ID}, ] - parent_key = self._makeOne(_PARENT_KIND, _PARENT_ID, + parent_key = self._make_one(_PARENT_KIND, _PARENT_ID, project=_PARENT_PROJECT, namespace=_PARENT_NAMESPACE) - key = self._makeOne(_CHILD_KIND, _CHILD_ID, parent=parent_key) + key = self._make_one(_CHILD_KIND, _CHILD_ID, parent=parent_key) self.assertEqual(key.project, parent_key.project) self.assertEqual(key.namespace, parent_key.namespace) self.assertEqual(key.kind, _CHILD_KIND) @@ -60,33 +60,33 @@ def test_ctor_parent(self): self.assertIs(key.parent, parent_key) def test_ctor_partial_parent(self): - parent_key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + parent_key = self._make_one('KIND', project=self._DEFAULT_PROJECT) with self.assertRaises(ValueError): - self._makeOne('KIND2', 1234, parent=parent_key) + self._make_one('KIND2', 1234, parent=parent_key) def test_ctor_parent_bad_type(self): with self.assertRaises(AttributeError): - self._makeOne('KIND2', 1234, parent=('KIND1', 1234), + self._make_one('KIND2', 1234, parent=('KIND1', 1234), project=self._DEFAULT_PROJECT) def test_ctor_parent_bad_namespace(self): - parent_key = self._makeOne('KIND', 1234, namespace='FOO', + parent_key = self._make_one('KIND', 1234, namespace='FOO', project=self._DEFAULT_PROJECT) with self.assertRaises(ValueError): - self._makeOne('KIND2', 1234, namespace='BAR', parent=parent_key, + self._make_one('KIND2', 1234, namespace='BAR', parent=parent_key, project=self._DEFAULT_PROJECT) def test_ctor_parent_bad_project(self): - parent_key = self._makeOne('KIND', 1234, project='FOO') + parent_key = self._make_one('KIND', 1234, project='FOO') with self.assertRaises(ValueError): - self._makeOne('KIND2', 1234, parent=parent_key, + self._make_one('KIND2', 1234, parent=parent_key, project='BAR') def test_ctor_parent_empty_path(self): - parent_key = self._makeOne('KIND', 1234, + parent_key = self._make_one('KIND', 1234, project=self._DEFAULT_PROJECT) with self.assertRaises(ValueError): - self._makeOne(parent=parent_key) + self._make_one(parent=parent_key) def test_ctor_explicit(self): _PROJECT = 'PROJECT-ALT' @@ -94,7 +94,7 @@ def test_ctor_explicit(self): _KIND = 'KIND' _ID = 1234 _PATH = [{'kind': _KIND, 'id': _ID}] - key = self._makeOne(_KIND, _ID, namespace=_NAMESPACE, + key = self._make_one(_KIND, _ID, namespace=_NAMESPACE, project=_PROJECT) self.assertEqual(key.project, _PROJECT) self.assertEqual(key.namespace, _NAMESPACE) @@ -102,15 +102,15 @@ def test_ctor_explicit(self): self.assertEqual(key.path, _PATH) def test_ctor_bad_kind(self): - self.assertRaises(ValueError, self._makeOne, object(), + self.assertRaises(ValueError, self._make_one, object(), project=self._DEFAULT_PROJECT) def test_ctor_bad_id_or_name(self): - self.assertRaises(ValueError, self._makeOne, 'KIND', object(), + self.assertRaises(ValueError, self._make_one, 'KIND', object(), project=self._DEFAULT_PROJECT) - self.assertRaises(ValueError, self._makeOne, 'KIND', None, + self.assertRaises(ValueError, self._make_one, 'KIND', None, project=self._DEFAULT_PROJECT) - self.assertRaises(ValueError, self._makeOne, 'KIND', 10, 'KIND2', None, + self.assertRaises(ValueError, self._make_one, 'KIND', 10, 'KIND2', None, project=self._DEFAULT_PROJECT) def test__clone(self): @@ -119,7 +119,7 @@ def test__clone(self): _KIND = 'KIND' _ID = 1234 _PATH = [{'kind': _KIND, 'id': _ID}] - key = self._makeOne(_KIND, _ID, namespace=_NAMESPACE, + key = self._make_one(_KIND, _ID, namespace=_NAMESPACE, project=_PROJECT) clone = key._clone() self.assertEqual(clone.project, _PROJECT) @@ -136,9 +136,9 @@ def test__clone_with_parent(self): _ID2 = 2345 _PATH = [{'kind': _KIND1, 'id': _ID1}, {'kind': _KIND2, 'id': _ID2}] - parent = self._makeOne(_KIND1, _ID1, namespace=_NAMESPACE, + parent = self._make_one(_KIND1, _ID1, namespace=_NAMESPACE, project=_PROJECT) - key = self._makeOne(_KIND2, _ID2, parent=parent) + key = self._make_one(_KIND2, _ID2, parent=parent) self.assertIs(key.parent, parent) clone = key._clone() self.assertIs(clone.parent, key.parent) @@ -150,15 +150,15 @@ def test___eq_____ne___w_non_key(self): _PROJECT = 'PROJECT' _KIND = 'KIND' _NAME = 'one' - key = self._makeOne(_KIND, _NAME, project=_PROJECT) + key = self._make_one(_KIND, _NAME, project=_PROJECT) self.assertFalse(key == object()) self.assertTrue(key != object()) def test___eq_____ne___two_incomplete_keys_same_kind(self): _PROJECT = 'PROJECT' _KIND = 'KIND' - key1 = self._makeOne(_KIND, project=_PROJECT) - key2 = self._makeOne(_KIND, project=_PROJECT) + key1 = self._make_one(_KIND, project=_PROJECT) + key2 = self._make_one(_KIND, project=_PROJECT) self.assertFalse(key1 == key2) self.assertTrue(key1 != key2) @@ -166,8 +166,8 @@ def test___eq_____ne___incomplete_key_w_complete_key_same_kind(self): _PROJECT = 'PROJECT' _KIND = 'KIND' _ID = 1234 - key1 = self._makeOne(_KIND, project=_PROJECT) - key2 = self._makeOne(_KIND, _ID, project=_PROJECT) + key1 = self._make_one(_KIND, project=_PROJECT) + key2 = self._make_one(_KIND, _ID, project=_PROJECT) self.assertFalse(key1 == key2) self.assertTrue(key1 != key2) @@ -175,8 +175,8 @@ def test___eq_____ne___complete_key_w_incomplete_key_same_kind(self): _PROJECT = 'PROJECT' _KIND = 'KIND' _ID = 1234 - key1 = self._makeOne(_KIND, _ID, project=_PROJECT) - key2 = self._makeOne(_KIND, project=_PROJECT) + key1 = self._make_one(_KIND, _ID, project=_PROJECT) + key2 = self._make_one(_KIND, project=_PROJECT) self.assertFalse(key1 == key2) self.assertTrue(key1 != key2) @@ -185,8 +185,8 @@ def test___eq_____ne___same_kind_different_ids(self): _KIND = 'KIND' _ID1 = 1234 _ID2 = 2345 - key1 = self._makeOne(_KIND, _ID1, project=_PROJECT) - key2 = self._makeOne(_KIND, _ID2, project=_PROJECT) + key1 = self._make_one(_KIND, _ID1, project=_PROJECT) + key2 = self._make_one(_KIND, _ID2, project=_PROJECT) self.assertFalse(key1 == key2) self.assertTrue(key1 != key2) @@ -194,8 +194,8 @@ def test___eq_____ne___same_kind_and_id(self): _PROJECT = 'PROJECT' _KIND = 'KIND' _ID = 1234 - key1 = self._makeOne(_KIND, _ID, project=_PROJECT) - key2 = self._makeOne(_KIND, _ID, project=_PROJECT) + key1 = self._make_one(_KIND, _ID, project=_PROJECT) + key2 = self._make_one(_KIND, _ID, project=_PROJECT) self.assertTrue(key1 == key2) self.assertFalse(key1 != key2) @@ -204,8 +204,8 @@ def test___eq_____ne___same_kind_and_id_different_project(self): _PROJECT2 = 'PROJECT2' _KIND = 'KIND' _ID = 1234 - key1 = self._makeOne(_KIND, _ID, project=_PROJECT1) - key2 = self._makeOne(_KIND, _ID, project=_PROJECT2) + key1 = self._make_one(_KIND, _ID, project=_PROJECT1) + key2 = self._make_one(_KIND, _ID, project=_PROJECT2) self.assertFalse(key1 == key2) self.assertTrue(key1 != key2) @@ -215,9 +215,9 @@ def test___eq_____ne___same_kind_and_id_different_namespace(self): _NAMESPACE2 = 'NAMESPACE2' _KIND = 'KIND' _ID = 1234 - key1 = self._makeOne(_KIND, _ID, project=_PROJECT, + key1 = self._make_one(_KIND, _ID, project=_PROJECT, namespace=_NAMESPACE1) - key2 = self._makeOne(_KIND, _ID, project=_PROJECT, + key2 = self._make_one(_KIND, _ID, project=_PROJECT, namespace=_NAMESPACE2) self.assertFalse(key1 == key2) self.assertTrue(key1 != key2) @@ -227,8 +227,8 @@ def test___eq_____ne___same_kind_different_names(self): _KIND = 'KIND' _NAME1 = 'one' _NAME2 = 'two' - key1 = self._makeOne(_KIND, _NAME1, project=_PROJECT) - key2 = self._makeOne(_KIND, _NAME2, project=_PROJECT) + key1 = self._make_one(_KIND, _NAME1, project=_PROJECT) + key2 = self._make_one(_KIND, _NAME2, project=_PROJECT) self.assertFalse(key1 == key2) self.assertTrue(key1 != key2) @@ -236,8 +236,8 @@ def test___eq_____ne___same_kind_and_name(self): _PROJECT = 'PROJECT' _KIND = 'KIND' _NAME = 'one' - key1 = self._makeOne(_KIND, _NAME, project=_PROJECT) - key2 = self._makeOne(_KIND, _NAME, project=_PROJECT) + key1 = self._make_one(_KIND, _NAME, project=_PROJECT) + key2 = self._make_one(_KIND, _NAME, project=_PROJECT) self.assertTrue(key1 == key2) self.assertFalse(key1 != key2) @@ -246,8 +246,8 @@ def test___eq_____ne___same_kind_and_name_different_project(self): _PROJECT2 = 'PROJECT2' _KIND = 'KIND' _NAME = 'one' - key1 = self._makeOne(_KIND, _NAME, project=_PROJECT1) - key2 = self._makeOne(_KIND, _NAME, project=_PROJECT2) + key1 = self._make_one(_KIND, _NAME, project=_PROJECT1) + key2 = self._make_one(_KIND, _NAME, project=_PROJECT2) self.assertFalse(key1 == key2) self.assertTrue(key1 != key2) @@ -257,9 +257,9 @@ def test___eq_____ne___same_kind_and_name_different_namespace(self): _NAMESPACE2 = 'NAMESPACE2' _KIND = 'KIND' _NAME = 'one' - key1 = self._makeOne(_KIND, _NAME, project=_PROJECT, + key1 = self._make_one(_KIND, _NAME, project=_PROJECT, namespace=_NAMESPACE1) - key2 = self._makeOne(_KIND, _NAME, project=_PROJECT, + key2 = self._make_one(_KIND, _NAME, project=_PROJECT, namespace=_NAMESPACE2) self.assertFalse(key1 == key2) self.assertTrue(key1 != key2) @@ -267,7 +267,7 @@ def test___eq_____ne___same_kind_and_name_different_namespace(self): def test___hash___incomplete(self): _PROJECT = 'PROJECT' _KIND = 'KIND' - key = self._makeOne(_KIND, project=_PROJECT) + key = self._make_one(_KIND, project=_PROJECT) self.assertNotEqual(hash(key), hash(_KIND) + hash(_PROJECT) + hash(None)) @@ -275,7 +275,7 @@ def test___hash___completed_w_id(self): _PROJECT = 'PROJECT' _KIND = 'KIND' _ID = 1234 - key = self._makeOne(_KIND, _ID, project=_PROJECT) + key = self._make_one(_KIND, _ID, project=_PROJECT) self.assertNotEqual(hash(key), hash(_KIND) + hash(_ID) + hash(_PROJECT) + hash(None)) @@ -284,13 +284,13 @@ def test___hash___completed_w_name(self): _PROJECT = 'PROJECT' _KIND = 'KIND' _NAME = 'NAME' - key = self._makeOne(_KIND, _NAME, project=_PROJECT) + key = self._make_one(_KIND, _NAME, project=_PROJECT) self.assertNotEqual(hash(key), hash(_KIND) + hash(_NAME) + hash(_PROJECT) + hash(None)) def test_completed_key_on_partial_w_id(self): - key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + key = self._make_one('KIND', project=self._DEFAULT_PROJECT) _ID = 1234 new_key = key.completed_key(_ID) self.assertIsNot(key, new_key) @@ -298,7 +298,7 @@ def test_completed_key_on_partial_w_id(self): self.assertIsNone(new_key.name) def test_completed_key_on_partial_w_name(self): - key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + key = self._make_one('KIND', project=self._DEFAULT_PROJECT) _NAME = 'NAME' new_key = key.completed_key(_NAME) self.assertIsNot(key, new_key) @@ -306,18 +306,18 @@ def test_completed_key_on_partial_w_name(self): self.assertEqual(new_key.name, _NAME) def test_completed_key_on_partial_w_invalid(self): - key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + key = self._make_one('KIND', project=self._DEFAULT_PROJECT) self.assertRaises(ValueError, key.completed_key, object()) def test_completed_key_on_complete(self): - key = self._makeOne('KIND', 1234, project=self._DEFAULT_PROJECT) + key = self._make_one('KIND', 1234, project=self._DEFAULT_PROJECT) self.assertRaises(ValueError, key.completed_key, 5678) def test_to_protobuf_defaults(self): from google.cloud.datastore._generated import entity_pb2 _KIND = 'KIND' - key = self._makeOne(_KIND, project=self._DEFAULT_PROJECT) + key = self._make_one(_KIND, project=self._DEFAULT_PROJECT) pb = key.to_protobuf() self.assertIsInstance(pb, entity_pb2.Key) @@ -336,13 +336,13 @@ def test_to_protobuf_defaults(self): def test_to_protobuf_w_explicit_project(self): _PROJECT = 'PROJECT-ALT' - key = self._makeOne('KIND', project=_PROJECT) + key = self._make_one('KIND', project=_PROJECT) pb = key.to_protobuf() self.assertEqual(pb.partition_id.project_id, _PROJECT) def test_to_protobuf_w_explicit_namespace(self): _NAMESPACE = 'NAMESPACE' - key = self._makeOne('KIND', namespace=_NAMESPACE, + key = self._make_one('KIND', namespace=_NAMESPACE, project=self._DEFAULT_PROJECT) pb = key.to_protobuf() self.assertEqual(pb.partition_id.namespace_id, _NAMESPACE) @@ -352,7 +352,7 @@ def test_to_protobuf_w_explicit_path(self): _CHILD = 'CHILD' _ID = 1234 _NAME = 'NAME' - key = self._makeOne(_PARENT, _NAME, _CHILD, _ID, + key = self._make_one(_PARENT, _NAME, _CHILD, _ID, project=self._DEFAULT_PROJECT) pb = key.to_protobuf() elems = list(pb.path) @@ -363,7 +363,7 @@ def test_to_protobuf_w_explicit_path(self): self.assertEqual(elems[1].id, _ID) def test_to_protobuf_w_no_kind(self): - key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + key = self._make_one('KIND', project=self._DEFAULT_PROJECT) # Force the 'kind' to be unset. Maybe `to_protobuf` should fail # on this? The backend certainly will. key._path[-1].pop('kind') @@ -372,51 +372,51 @@ def test_to_protobuf_w_no_kind(self): self.assertEqual(pb.path[0].kind, '') def test_is_partial_no_name_or_id(self): - key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + key = self._make_one('KIND', project=self._DEFAULT_PROJECT) self.assertTrue(key.is_partial) def test_is_partial_w_id(self): _ID = 1234 - key = self._makeOne('KIND', _ID, project=self._DEFAULT_PROJECT) + key = self._make_one('KIND', _ID, project=self._DEFAULT_PROJECT) self.assertFalse(key.is_partial) def test_is_partial_w_name(self): _NAME = 'NAME' - key = self._makeOne('KIND', _NAME, project=self._DEFAULT_PROJECT) + key = self._make_one('KIND', _NAME, project=self._DEFAULT_PROJECT) self.assertFalse(key.is_partial) def test_id_or_name_no_name_or_id(self): - key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + key = self._make_one('KIND', project=self._DEFAULT_PROJECT) self.assertIsNone(key.id_or_name) def test_id_or_name_no_name_or_id_child(self): - key = self._makeOne('KIND1', 1234, 'KIND2', + key = self._make_one('KIND1', 1234, 'KIND2', project=self._DEFAULT_PROJECT) self.assertIsNone(key.id_or_name) def test_id_or_name_w_id_only(self): _ID = 1234 - key = self._makeOne('KIND', _ID, project=self._DEFAULT_PROJECT) + key = self._make_one('KIND', _ID, project=self._DEFAULT_PROJECT) self.assertEqual(key.id_or_name, _ID) def test_id_or_name_w_name_only(self): _NAME = 'NAME' - key = self._makeOne('KIND', _NAME, project=self._DEFAULT_PROJECT) + key = self._make_one('KIND', _NAME, project=self._DEFAULT_PROJECT) self.assertEqual(key.id_or_name, _NAME) def test_parent_default(self): - key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + key = self._make_one('KIND', project=self._DEFAULT_PROJECT) self.assertIsNone(key.parent) def test_parent_explicit_top_level(self): - key = self._makeOne('KIND', 1234, project=self._DEFAULT_PROJECT) + key = self._make_one('KIND', 1234, project=self._DEFAULT_PROJECT) self.assertIsNone(key.parent) def test_parent_explicit_nested(self): _PARENT_KIND = 'KIND1' _PARENT_ID = 1234 _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] - key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2', + key = self._make_one(_PARENT_KIND, _PARENT_ID, 'KIND2', project=self._DEFAULT_PROJECT) self.assertEqual(key.parent.path, _PARENT_PATH) @@ -424,7 +424,7 @@ def test_parent_multiple_calls(self): _PARENT_KIND = 'KIND1' _PARENT_ID = 1234 _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] - key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2', + key = self._make_one(_PARENT_KIND, _PARENT_ID, 'KIND2', project=self._DEFAULT_PROJECT) parent = key.parent self.assertEqual(parent.path, _PARENT_PATH) diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index f3fdc407f7d6..b01b060491ef 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -24,7 +24,7 @@ def _get_target_class(): from google.cloud.datastore.query import Query return Query - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def _makeClient(self, connection=None): @@ -34,7 +34,7 @@ def _makeClient(self, connection=None): def test_ctor_defaults(self): client = self._makeClient() - query = self._makeOne(client) + query = self._make_one(client) self.assertIs(query._client, client) self.assertEqual(query.project, client.project) self.assertIsNone(query.kind) @@ -56,7 +56,7 @@ def test_ctor_explicit(self): PROJECTION = ['foo', 'bar', 'baz'] ORDER = ['foo', 'bar'] DISTINCT_ON = ['foo'] - query = self._makeOne( + query = self._make_one( client, kind=_KIND, project=_PROJECT, @@ -79,26 +79,26 @@ def test_ctor_explicit(self): def test_ctor_bad_projection(self): BAD_PROJECTION = object() - self.assertRaises(TypeError, self._makeOne, self._makeClient(), + self.assertRaises(TypeError, self._make_one, self._makeClient(), projection=BAD_PROJECTION) def test_ctor_bad_order(self): BAD_ORDER = object() - self.assertRaises(TypeError, self._makeOne, self._makeClient(), + self.assertRaises(TypeError, self._make_one, self._makeClient(), order=BAD_ORDER) def test_ctor_bad_distinct_on(self): BAD_DISTINCT_ON = object() - self.assertRaises(TypeError, self._makeOne, self._makeClient(), + self.assertRaises(TypeError, self._make_one, self._makeClient(), distinct_on=BAD_DISTINCT_ON) def test_ctor_bad_filters(self): FILTERS_CANT_UNPACK = [('one', 'two')] - self.assertRaises(ValueError, self._makeOne, self._makeClient(), + self.assertRaises(ValueError, self._make_one, self._makeClient(), filters=FILTERS_CANT_UNPACK) def test_namespace_setter_w_non_string(self): - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) def _assign(val): query.namespace = val @@ -107,12 +107,12 @@ def _assign(val): def test_namespace_setter(self): _NAMESPACE = 'OTHER_NAMESPACE' - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) query.namespace = _NAMESPACE self.assertEqual(query.namespace, _NAMESPACE) def test_kind_setter_w_non_string(self): - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) def _assign(val): query.kind = val @@ -121,21 +121,21 @@ def _assign(val): def test_kind_setter_wo_existing(self): _KIND = 'KIND' - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) query.kind = _KIND self.assertEqual(query.kind, _KIND) def test_kind_setter_w_existing(self): _KIND_BEFORE = 'KIND_BEFORE' _KIND_AFTER = 'KIND_AFTER' - query = self._makeOne(self._makeClient(), kind=_KIND_BEFORE) + query = self._make_one(self._makeClient(), kind=_KIND_BEFORE) self.assertEqual(query.kind, _KIND_BEFORE) query.kind = _KIND_AFTER self.assertEqual(query.project, self._PROJECT) self.assertEqual(query.kind, _KIND_AFTER) def test_ancestor_setter_w_non_key(self): - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) def _assign(val): query.ancestor = val @@ -147,7 +147,7 @@ def test_ancestor_setter_w_key(self): from google.cloud.datastore.key import Key _NAME = u'NAME' key = Key('KIND', 123, project=self._PROJECT) - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) query.add_filter('name', '=', _NAME) query.ancestor = key self.assertEqual(query.ancestor.path, key.path) @@ -155,22 +155,22 @@ def test_ancestor_setter_w_key(self): def test_ancestor_deleter_w_key(self): from google.cloud.datastore.key import Key key = Key('KIND', 123, project=self._PROJECT) - query = self._makeOne(client=self._makeClient(), ancestor=key) + query = self._make_one(client=self._makeClient(), ancestor=key) del query.ancestor self.assertIsNone(query.ancestor) def test_add_filter_setter_w_unknown_operator(self): - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) self.assertRaises(ValueError, query.add_filter, 'firstname', '~~', 'John') def test_add_filter_w_known_operator(self): - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) query.add_filter('firstname', '=', u'John') self.assertEqual(query.filters, [('firstname', '=', u'John')]) def test_add_filter_w_all_operators(self): - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) query.add_filter('leq_prop', '<=', u'val1') query.add_filter('geq_prop', '>=', u'val2') query.add_filter('lt_prop', '<', u'val3') @@ -185,7 +185,7 @@ def test_add_filter_w_all_operators(self): def test_add_filter_w_known_operator_and_entity(self): from google.cloud.datastore.entity import Entity - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) other = Entity() other['firstname'] = u'John' other['lastname'] = u'Smith' @@ -193,14 +193,14 @@ def test_add_filter_w_known_operator_and_entity(self): self.assertEqual(query.filters, [('other', '=', other)]) def test_add_filter_w_whitespace_property_name(self): - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) PROPERTY_NAME = ' property with lots of space ' query.add_filter(PROPERTY_NAME, '=', u'John') self.assertEqual(query.filters, [(PROPERTY_NAME, '=', u'John')]) def test_add_filter___key__valid_key(self): from google.cloud.datastore.key import Key - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) key = Key('Foo', project=self._PROJECT) query.add_filter('__key__', '=', key) self.assertEqual(query.filters, [('__key__', '=', key)]) @@ -208,40 +208,40 @@ def test_add_filter___key__valid_key(self): def test_filter___key__not_equal_operator(self): from google.cloud.datastore.key import Key key = Key('Foo', project=self._PROJECT) - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) query.add_filter('__key__', '<', key) self.assertEqual(query.filters, [('__key__', '<', key)]) def test_filter___key__invalid_value(self): - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) self.assertRaises(ValueError, query.add_filter, '__key__', '=', None) def test_projection_setter_empty(self): - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) query.projection = [] self.assertEqual(query.projection, []) def test_projection_setter_string(self): - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) query.projection = 'field1' self.assertEqual(query.projection, ['field1']) def test_projection_setter_non_empty(self): - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) query.projection = ['field1', 'field2'] self.assertEqual(query.projection, ['field1', 'field2']) def test_projection_setter_multiple_calls(self): _PROJECTION1 = ['field1', 'field2'] _PROJECTION2 = ['field3'] - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) query.projection = _PROJECTION1 self.assertEqual(query.projection, _PROJECTION1) query.projection = _PROJECTION2 self.assertEqual(query.projection, _PROJECTION2) def test_keys_only(self): - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) query.keys_only() self.assertEqual(query.projection, ['__key__']) @@ -249,7 +249,7 @@ def test_key_filter_defaults(self): from google.cloud.datastore.key import Key client = self._makeClient() - query = self._makeOne(client) + query = self._make_one(client) self.assertEqual(query.filters, []) key = Key('Kind', 1234, project='project') query.key_filter(key) @@ -259,51 +259,51 @@ def test_key_filter_explicit(self): from google.cloud.datastore.key import Key client = self._makeClient() - query = self._makeOne(client) + query = self._make_one(client) self.assertEqual(query.filters, []) key = Key('Kind', 1234, project='project') query.key_filter(key, operator='>') self.assertEqual(query.filters, [('__key__', '>', key)]) def test_order_setter_empty(self): - query = self._makeOne(self._makeClient(), order=['foo', '-bar']) + query = self._make_one(self._makeClient(), order=['foo', '-bar']) query.order = [] self.assertEqual(query.order, []) def test_order_setter_string(self): - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) query.order = 'field' self.assertEqual(query.order, ['field']) def test_order_setter_single_item_list_desc(self): - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) query.order = ['-field'] self.assertEqual(query.order, ['-field']) def test_order_setter_multiple(self): - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) query.order = ['foo', '-bar'] self.assertEqual(query.order, ['foo', '-bar']) def test_distinct_on_setter_empty(self): - query = self._makeOne(self._makeClient(), distinct_on=['foo', 'bar']) + query = self._make_one(self._makeClient(), distinct_on=['foo', 'bar']) query.distinct_on = [] self.assertEqual(query.distinct_on, []) def test_distinct_on_setter_string(self): - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) query.distinct_on = 'field1' self.assertEqual(query.distinct_on, ['field1']) def test_distinct_on_setter_non_empty(self): - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) query.distinct_on = ['field1', 'field2'] self.assertEqual(query.distinct_on, ['field1', 'field2']) def test_distinct_on_multiple_calls(self): _DISTINCT_ON1 = ['field1', 'field2'] _DISTINCT_ON2 = ['field3'] - query = self._makeOne(self._makeClient()) + query = self._make_one(self._makeClient()) query.distinct_on = _DISTINCT_ON1 self.assertEqual(query.distinct_on, _DISTINCT_ON1) query.distinct_on = _DISTINCT_ON2 @@ -314,7 +314,7 @@ def test_fetch_defaults_w_client_attr(self): connection = _Connection() client = self._makeClient(connection) - query = self._makeOne(client) + query = self._make_one(client) iterator = query.fetch() self.assertIsInstance(iterator, Iterator) @@ -329,7 +329,7 @@ def test_fetch_w_explicit_client(self): connection = _Connection() client = self._makeClient(connection) other_client = self._makeClient(connection) - query = self._makeOne(client) + query = self._make_one(client) iterator = query.fetch(limit=7, offset=8, client=other_client) self.assertIsInstance(iterator, Iterator) self.assertIs(iterator._query, query) @@ -345,13 +345,13 @@ def _get_target_class(): from google.cloud.datastore.query import Iterator return Iterator - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_constructor_defaults(self): query = object() client = object() - iterator = self._makeOne(query, client) + iterator = self._make_one(query, client) self.assertFalse(iterator._started) self.assertIs(iterator.client, client) @@ -372,7 +372,7 @@ def test_constructor_explicit(self): offset = 9 start_cursor = b'8290\xff' end_cursor = b'so20rc\ta' - iterator = self._makeOne( + iterator = self._make_one( query, client, limit=limit, offset=offset, start_cursor=start_cursor, end_cursor=end_cursor) @@ -394,7 +394,7 @@ def test__build_protobuf_empty(self): client = _Client(None, None) query = Query(client) - iterator = self._makeOne(query, client) + iterator = self._make_one(query, client) pb = iterator._build_protobuf() expected_pb = query_pb2.Query() @@ -412,7 +412,7 @@ def test__build_protobuf_all_values(self): start_cursor = 'abcd' end_bytes = b'\xc3\x1c\xb3' end_cursor = 'wxyz' - iterator = self._makeOne( + iterator = self._make_one( query, client, limit=limit, offset=offset, start_cursor=start_cursor, end_cursor=end_cursor) self.assertEqual(iterator.max_results, limit) @@ -431,7 +431,7 @@ def test__build_protobuf_all_values(self): def test__process_query_results(self): from google.cloud.datastore._generated import query_pb2 - iterator = self._makeOne(None, None, + iterator = self._make_one(None, None, end_cursor='abcd') self.assertIsNotNone(iterator._end_cursor) @@ -452,7 +452,7 @@ def test__process_query_results(self): def test__process_query_results_done(self): from google.cloud.datastore._generated import query_pb2 - iterator = self._makeOne(None, None, + iterator = self._make_one(None, None, end_cursor='abcd') self.assertIsNotNone(iterator._end_cursor) @@ -470,7 +470,7 @@ def test__process_query_results_done(self): self.assertFalse(iterator._more_results) def test__process_query_results_bad_enum(self): - iterator = self._makeOne(None, None) + iterator = self._make_one(None, None) more_results_enum = 999 with self.assertRaises(ValueError): iterator._process_query_results( @@ -488,7 +488,7 @@ def test__next_page(self): project = 'prujekt' client = _Client(project, connection) query = Query(client) - iterator = self._makeOne(query, client) + iterator = self._make_one(query, client) page = iterator._next_page() self.assertIsInstance(page, Page) @@ -507,7 +507,7 @@ def test__next_page_no_more(self): connection = _Connection() client = _Client(None, connection) query = Query(client) - iterator = self._makeOne(query, client) + iterator = self._make_one(query, client) iterator._more_results = False page = iterator._next_page() diff --git a/packages/google-cloud-datastore/unit_tests/test_transaction.py b/packages/google-cloud-datastore/unit_tests/test_transaction.py index 2d12d7405eb6..46fbf21320e5 100644 --- a/packages/google-cloud-datastore/unit_tests/test_transaction.py +++ b/packages/google-cloud-datastore/unit_tests/test_transaction.py @@ -22,7 +22,7 @@ def _get_target_class(): from google.cloud.datastore.transaction import Transaction return Transaction - def _makeOne(self, client, **kw): + def _make_one(self, client, **kw): return self._get_target_class()(client, **kw) def test_ctor_defaults(self): @@ -31,7 +31,7 @@ def test_ctor_defaults(self): _PROJECT = 'PROJECT' connection = _Connection() client = _Client(_PROJECT, connection) - xact = self._makeOne(client) + xact = self._make_one(client) self.assertEqual(xact.project, _PROJECT) self.assertEqual(xact.connection, connection) self.assertIsNone(xact.id) @@ -45,8 +45,8 @@ def test_current(self): _PROJECT = 'PROJECT' connection = _Connection() client = _Client(_PROJECT, connection) - xact1 = self._makeOne(client) - xact2 = self._makeOne(client) + xact1 = self._make_one(client) + xact2 = self._make_one(client) self.assertIsNone(xact1.current()) self.assertIsNone(xact2.current()) with xact1: @@ -70,7 +70,7 @@ def test_begin(self): _PROJECT = 'PROJECT' connection = _Connection(234) client = _Client(_PROJECT, connection) - xact = self._makeOne(client) + xact = self._make_one(client) xact.begin() self.assertEqual(xact.id, 234) self.assertEqual(connection._begun, _PROJECT) @@ -79,7 +79,7 @@ def test_begin_tombstoned(self): _PROJECT = 'PROJECT' connection = _Connection(234) client = _Client(_PROJECT, connection) - xact = self._makeOne(client) + xact = self._make_one(client) xact.begin() self.assertEqual(xact.id, 234) self.assertEqual(connection._begun, _PROJECT) @@ -93,7 +93,7 @@ def test_begin_w_begin_transaction_failure(self): _PROJECT = 'PROJECT' connection = _Connection(234) client = _Client(_PROJECT, connection) - xact = self._makeOne(client) + xact = self._make_one(client) connection._side_effect = RuntimeError with self.assertRaises(RuntimeError): @@ -106,7 +106,7 @@ def test_rollback(self): _PROJECT = 'PROJECT' connection = _Connection(234) client = _Client(_PROJECT, connection) - xact = self._makeOne(client) + xact = self._make_one(client) xact.begin() xact.rollback() self.assertIsNone(xact.id) @@ -116,7 +116,7 @@ def test_commit_no_partial_keys(self): _PROJECT = 'PROJECT' connection = _Connection(234) client = _Client(_PROJECT, connection) - xact = self._makeOne(client) + xact = self._make_one(client) xact._commit_request = commit_request = object() xact.begin() xact.commit() @@ -131,7 +131,7 @@ def test_commit_w_partial_keys(self): connection = _Connection(234) connection._completed_keys = [_make_key(_KIND, _ID, _PROJECT)] client = _Client(_PROJECT, connection) - xact = self._makeOne(client) + xact = self._make_one(client) xact.begin() entity = _Entity() xact.put(entity) @@ -146,7 +146,7 @@ def test_context_manager_no_raise(self): _PROJECT = 'PROJECT' connection = _Connection(234) client = _Client(_PROJECT, connection) - xact = self._makeOne(client) + xact = self._make_one(client) xact._commit_request = commit_request = object() with xact: self.assertEqual(xact.id, 234) @@ -163,7 +163,7 @@ class Foo(Exception): _PROJECT = 'PROJECT' connection = _Connection(234) client = _Client(_PROJECT, connection) - xact = self._makeOne(client) + xact = self._make_one(client) xact._mutation = object() try: with xact: From 257fc28d9cae537af29e361f468aa8bf9000b4e8 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 10 Nov 2016 11:06:21 -0800 Subject: [PATCH 044/611] Changing all instances of _callFUT to _call_fut. Done via: $ git grep -l _callFUT | \ > xargs sed -i s/_callFUT/_call_fut/g --- .../unit_tests/test__http.py | 16 +- .../unit_tests/test_client.py | 10 +- .../unit_tests/test_helpers.py | 148 +++++++++--------- .../unit_tests/test_query.py | 22 +-- 4 files changed, 98 insertions(+), 98 deletions(-) diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 64c6a6258122..ceefaa8ff828 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -110,7 +110,7 @@ def test__request_not_200(self): @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') class Test__grpc_catch_rendezvous(unittest.TestCase): - def _callFUT(self): + def _call_fut(self): from google.cloud.datastore._http import _grpc_catch_rendezvous return _grpc_catch_rendezvous() @@ -123,7 +123,7 @@ def _fake_method(exc, result=None): def test_success(self): expected = object() - with self._callFUT(): + with self._call_fut(): result = self._fake_method(None, expected) self.assertIs(result, expected) @@ -137,7 +137,7 @@ def test_failure_aborted(self): exc_state = _RPCState((), None, None, StatusCode.ABORTED, details) exc = GrpcRendezvous(exc_state, None, None, None) with self.assertRaises(Conflict): - with self._callFUT(): + with self._call_fut(): self._fake_method(exc) def test_failure_invalid_argument(self): @@ -152,7 +152,7 @@ def test_failure_invalid_argument(self): StatusCode.INVALID_ARGUMENT, details) exc = GrpcRendezvous(exc_state, None, None, None) with self.assertRaises(BadRequest): - with self._callFUT(): + with self._call_fut(): self._fake_method(exc) def test_failure_cancelled(self): @@ -163,13 +163,13 @@ def test_failure_cancelled(self): exc_state = _RPCState((), None, None, StatusCode.CANCELLED, None) exc = GrpcRendezvous(exc_state, None, None, None) with self.assertRaises(GrpcRendezvous): - with self._callFUT(): + with self._call_fut(): self._fake_method(exc) def test_commit_failure_non_grpc_err(self): exc = RuntimeError('Not a gRPC error') with self.assertRaises(RuntimeError): - with self._callFUT(): + with self._call_fut(): self._fake_method(exc) @@ -1093,7 +1093,7 @@ def test_allocate_ids_non_empty(self): class Test__parse_commit_response(unittest.TestCase): - def _callFUT(self, commit_response_pb): + def _call_fut(self, commit_response_pb): from google.cloud.datastore._http import _parse_commit_response return _parse_commit_response(commit_response_pb) @@ -1126,7 +1126,7 @@ def test_it(self): ], index_updates=index_updates, ) - result = self._callFUT(response) + result = self._call_fut(response) self.assertEqual(result, (index_updates, keys)) diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index 6be62a95d20b..fb356b8ddc10 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -33,7 +33,7 @@ def _make_entity_pb(project, kind, integer_id, name=None, str_val=None): class Test__get_gcd_project(unittest.TestCase): - def _callFUT(self): + def _call_fut(self): from google.cloud.datastore.client import _get_gcd_project return _get_gcd_project() @@ -43,7 +43,7 @@ def test_no_value(self): environ = {} with _Monkey(os, getenv=environ.get): - project = self._callFUT() + project = self._call_fut() self.assertIsNone(project) def test_value_set(self): @@ -54,13 +54,13 @@ def test_value_set(self): MOCK_PROJECT = object() environ = {GCD_DATASET: MOCK_PROJECT} with _Monkey(os, getenv=environ.get): - project = self._callFUT() + project = self._call_fut() self.assertEqual(project, MOCK_PROJECT) class Test__determine_default_project(unittest.TestCase): - def _callFUT(self, project=None): + def _call_fut(self, project=None): from google.cloud.datastore.client import ( _determine_default_project) return _determine_default_project(project=project) @@ -86,7 +86,7 @@ def fallback_mock(project=None): } with _Monkey(client, **patched_methods): - returned_project = self._callFUT(project_called) + returned_project = self._call_fut(project_called) return returned_project, _callers diff --git a/packages/google-cloud-datastore/unit_tests/test_helpers.py b/packages/google-cloud-datastore/unit_tests/test_helpers.py index eea61f956524..f3fa3391bbb7 100644 --- a/packages/google-cloud-datastore/unit_tests/test_helpers.py +++ b/packages/google-cloud-datastore/unit_tests/test_helpers.py @@ -17,7 +17,7 @@ class Test__new_value_pb(unittest.TestCase): - def _callFUT(self, entity_pb, name): + def _call_fut(self, entity_pb, name): from google.cloud.datastore.helpers import _new_value_pb return _new_value_pb(entity_pb, name) @@ -26,7 +26,7 @@ def test_it(self): entity_pb = entity_pb2.Entity() name = 'foo' - result = self._callFUT(entity_pb, name) + result = self._call_fut(entity_pb, name) self.assertIsInstance(result, entity_pb2.Value) self.assertEqual(len(entity_pb.properties), 1) @@ -35,7 +35,7 @@ def test_it(self): class Test__property_tuples(unittest.TestCase): - def _callFUT(self, entity_pb): + def _call_fut(self, entity_pb): from google.cloud.datastore.helpers import _property_tuples return _property_tuples(entity_pb) @@ -50,7 +50,7 @@ def test_it(self): val_pb1 = _new_value_pb(entity_pb, name1) val_pb2 = _new_value_pb(entity_pb, name2) - result = self._callFUT(entity_pb) + result = self._call_fut(entity_pb) self.assertIsInstance(result, types.GeneratorType) self.assertEqual(sorted(result), sorted([(name1, val_pb1), (name2, val_pb2)])) @@ -58,7 +58,7 @@ def test_it(self): class Test_entity_from_protobuf(unittest.TestCase): - def _callFUT(self, val): + def _call_fut(self, val): from google.cloud.datastore.helpers import entity_from_protobuf return entity_from_protobuf(val) @@ -93,7 +93,7 @@ def test_it(self): indexed_array_val_pb = array_pb2.add() indexed_array_val_pb.integer_value = 12 - entity = self._callFUT(entity_pb) + entity = self._call_fut(entity_pb) self.assertEqual(entity.kind, _KIND) self.assertEqual(entity.exclude_from_indexes, frozenset(['bar', 'baz'])) @@ -130,13 +130,13 @@ def test_mismatched_value_indexed(self): unindexed_value_pb2.integer_value = 11 with self.assertRaises(ValueError): - self._callFUT(entity_pb) + self._call_fut(entity_pb) def test_entity_no_key(self): from google.cloud.datastore._generated import entity_pb2 entity_pb = entity_pb2.Entity() - entity = self._callFUT(entity_pb) + entity = self._call_fut(entity_pb) self.assertIsNone(entity.key) self.assertEqual(dict(entity), {}) @@ -151,7 +151,7 @@ def test_entity_with_meaning(self): value_pb.meaning = meaning = 9 value_pb.string_value = val = u'something' - entity = self._callFUT(entity_pb) + entity = self._call_fut(entity_pb) self.assertIsNone(entity.key) self.assertEqual(dict(entity), {name: val}) self.assertEqual(entity._meanings, {name: (meaning, val)}) @@ -178,7 +178,7 @@ def test_nested_entity_no_key(self): outside_val_pb = _new_value_pb(entity_pb, OUTSIDE_NAME) outside_val_pb.entity_value.CopyFrom(entity_inside) - entity = self._callFUT(entity_pb) + entity = self._call_fut(entity_pb) self.assertEqual(entity.key.project, PROJECT) self.assertEqual(entity.key.flat_path, (KIND,)) self.assertEqual(len(entity), 1) @@ -191,7 +191,7 @@ def test_nested_entity_no_key(self): class Test_entity_to_protobuf(unittest.TestCase): - def _callFUT(self, entity): + def _call_fut(self, entity): from google.cloud.datastore.helpers import entity_to_protobuf return entity_to_protobuf(entity) @@ -218,7 +218,7 @@ def test_empty(self): from google.cloud.datastore.entity import Entity entity = Entity() - entity_pb = self._callFUT(entity) + entity_pb = self._call_fut(entity) self._compareEntityProto(entity_pb, entity_pb2.Entity()) def test_key_only(self): @@ -230,7 +230,7 @@ def test_key_only(self): project = 'PROJECT' key = Key(kind, name, project=project) entity = Entity(key=key) - entity_pb = self._callFUT(entity) + entity_pb = self._call_fut(entity) expected_pb = entity_pb2.Entity() expected_pb.key.partition_id.project_id = project @@ -250,7 +250,7 @@ def test_simple_fields(self): entity[name1] = value1 = 42 name2 = 'bar' entity[name2] = value2 = u'some-string' - entity_pb = self._callFUT(entity) + entity_pb = self._call_fut(entity) expected_pb = entity_pb2.Entity() val_pb1 = _new_value_pb(expected_pb, name1) @@ -266,7 +266,7 @@ def test_with_empty_list(self): entity = Entity() entity['foo'] = [] - entity_pb = self._callFUT(entity) + entity_pb = self._call_fut(entity) self._compareEntityProto(entity_pb, entity_pb2.Entity()) @@ -317,7 +317,7 @@ def test_inverts_to_protobuf(self): # Convert to the user-space Entity. entity = entity_from_protobuf(original_pb) # Convert the user-space Entity back to a protobuf. - new_pb = self._callFUT(entity) + new_pb = self._call_fut(entity) # NOTE: entity_to_protobuf() strips the project so we "cheat". new_pb.key.partition_id.project_id = project @@ -332,7 +332,7 @@ def test_meaning_with_change(self): name = 'foo' entity[name] = value = 42 entity._meanings[name] = (9, 1337) - entity_pb = self._callFUT(entity) + entity_pb = self._call_fut(entity) expected_pb = entity_pb2.Entity() value_pb = _new_value_pb(expected_pb, name) @@ -351,7 +351,7 @@ def test_variable_meanings(self): entity[name] = values = [1, 20, 300] meaning = 9 entity._meanings[name] = ([None, meaning, None], values) - entity_pb = self._callFUT(entity) + entity_pb = self._call_fut(entity) # Construct the expected protobuf. expected_pb = entity_pb2.Entity() @@ -370,7 +370,7 @@ def test_variable_meanings(self): class Test_key_from_protobuf(unittest.TestCase): - def _callFUT(self, val): + def _call_fut(self, val): from google.cloud.datastore.helpers import key_from_protobuf return key_from_protobuf(val) @@ -394,7 +394,7 @@ def _makePB(self, project=None, namespace=None, path=()): def test_wo_namespace_in_pb(self): _PROJECT = 'PROJECT' pb = self._makePB(path=[{'kind': 'KIND'}], project=_PROJECT) - key = self._callFUT(pb) + key = self._call_fut(pb) self.assertEqual(key.project, _PROJECT) self.assertIsNone(key.namespace) @@ -403,7 +403,7 @@ def test_w_namespace_in_pb(self): _NAMESPACE = 'NAMESPACE' pb = self._makePB(path=[{'kind': 'KIND'}], namespace=_NAMESPACE, project=_PROJECT) - key = self._callFUT(pb) + key = self._call_fut(pb) self.assertEqual(key.project, _PROJECT) self.assertEqual(key.namespace, _NAMESPACE) @@ -414,17 +414,17 @@ def test_w_nested_path_in_pb(self): {'kind': 'GRANDCHILD', 'id': 5678}, ] pb = self._makePB(path=_PATH, project='PROJECT') - key = self._callFUT(pb) + key = self._call_fut(pb) self.assertEqual(key.path, _PATH) def test_w_nothing_in_pb(self): pb = self._makePB() - self.assertRaises(ValueError, self._callFUT, pb) + self.assertRaises(ValueError, self._call_fut, pb) class Test__pb_attr_value(unittest.TestCase): - def _callFUT(self, val): + def _call_fut(self, val): from google.cloud.datastore.helpers import _pb_attr_value return _pb_attr_value(val) @@ -437,7 +437,7 @@ def test_datetime_naive(self): micros = 4375 naive = datetime.datetime(2014, 9, 16, 10, 19, 32, micros) # No zone. utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) - name, value = self._callFUT(naive) + name, value = self._call_fut(naive) self.assertEqual(name, 'timestamp_value') self.assertEqual(value.seconds, calendar.timegm(utc.timetuple())) self.assertEqual(value.nanos, 1000 * micros) @@ -449,7 +449,7 @@ def test_datetime_w_zone(self): micros = 4375 utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) - name, value = self._callFUT(utc) + name, value = self._call_fut(utc) self.assertEqual(name, 'timestamp_value') self.assertEqual(value.seconds, calendar.timegm(utc.timetuple())) self.assertEqual(value.nanos, 1000 * micros) @@ -458,34 +458,34 @@ def test_key(self): from google.cloud.datastore.key import Key key = Key('PATH', 1234, project='PROJECT') - name, value = self._callFUT(key) + name, value = self._call_fut(key) self.assertEqual(name, 'key_value') self.assertEqual(value, key.to_protobuf()) def test_bool(self): - name, value = self._callFUT(False) + name, value = self._call_fut(False) self.assertEqual(name, 'boolean_value') self.assertEqual(value, False) def test_float(self): - name, value = self._callFUT(3.1415926) + name, value = self._call_fut(3.1415926) self.assertEqual(name, 'double_value') self.assertEqual(value, 3.1415926) def test_int(self): - name, value = self._callFUT(42) + name, value = self._call_fut(42) self.assertEqual(name, 'integer_value') self.assertEqual(value, 42) def test_long(self): must_be_long = (1 << 63) - 1 - name, value = self._callFUT(must_be_long) + name, value = self._call_fut(must_be_long) self.assertEqual(name, 'integer_value') self.assertEqual(value, must_be_long) def test_native_str(self): import six - name, value = self._callFUT('str') + name, value = self._call_fut('str') if six.PY2: self.assertEqual(name, 'blob_value') else: # pragma: NO COVER Python 3 @@ -493,25 +493,25 @@ def test_native_str(self): self.assertEqual(value, 'str') def test_bytes(self): - name, value = self._callFUT(b'bytes') + name, value = self._call_fut(b'bytes') self.assertEqual(name, 'blob_value') self.assertEqual(value, b'bytes') def test_unicode(self): - name, value = self._callFUT(u'str') + name, value = self._call_fut(u'str') self.assertEqual(name, 'string_value') self.assertEqual(value, u'str') def test_entity(self): from google.cloud.datastore.entity import Entity entity = Entity() - name, value = self._callFUT(entity) + name, value = self._call_fut(entity) self.assertEqual(name, 'entity_value') self.assertIs(value, entity) def test_array(self): values = ['a', 0, 3.14] - name, value = self._callFUT(values) + name, value = self._call_fut(values) self.assertEqual(name, 'array_value') self.assertIs(value, values) @@ -523,24 +523,24 @@ def test_geo_point(self): lng = 99.0007 geo_pt = GeoPoint(latitude=lat, longitude=lng) geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) - name, value = self._callFUT(geo_pt) + name, value = self._call_fut(geo_pt) self.assertEqual(name, 'geo_point_value') self.assertEqual(value, geo_pt_pb) def test_null(self): from google.protobuf import struct_pb2 - name, value = self._callFUT(None) + name, value = self._call_fut(None) self.assertEqual(name, 'null_value') self.assertEqual(value, struct_pb2.NULL_VALUE) def test_object(self): - self.assertRaises(ValueError, self._callFUT, object()) + self.assertRaises(ValueError, self._call_fut, object()) class Test__get_value_from_value_pb(unittest.TestCase): - def _callFUT(self, pb): + def _call_fut(self, pb): from google.cloud.datastore.helpers import _get_value_from_value_pb return _get_value_from_value_pb(pb) @@ -563,7 +563,7 @@ def test_datetime(self): pb = entity_pb2.Value() pb.timestamp_value.seconds = calendar.timegm(utc.timetuple()) pb.timestamp_value.nanos = 1000 * micros - self.assertEqual(self._callFUT(pb), utc) + self.assertEqual(self._call_fut(pb), utc) def test_key(self): from google.cloud.datastore._generated import entity_pb2 @@ -572,28 +572,28 @@ def test_key(self): pb = entity_pb2.Value() expected = Key('KIND', 1234, project='PROJECT').to_protobuf() pb.key_value.CopyFrom(expected) - found = self._callFUT(pb) + found = self._call_fut(pb) self.assertEqual(found.to_protobuf(), expected) def test_bool(self): pb = self._makePB('boolean_value', False) - self.assertEqual(self._callFUT(pb), False) + self.assertEqual(self._call_fut(pb), False) def test_float(self): pb = self._makePB('double_value', 3.1415926) - self.assertEqual(self._callFUT(pb), 3.1415926) + self.assertEqual(self._call_fut(pb), 3.1415926) def test_int(self): pb = self._makePB('integer_value', 42) - self.assertEqual(self._callFUT(pb), 42) + self.assertEqual(self._call_fut(pb), 42) def test_bytes(self): pb = self._makePB('blob_value', b'str') - self.assertEqual(self._callFUT(pb), b'str') + self.assertEqual(self._call_fut(pb), b'str') def test_unicode(self): pb = self._makePB('string_value', u'str') - self.assertEqual(self._callFUT(pb), u'str') + self.assertEqual(self._call_fut(pb), u'str') def test_entity(self): from google.cloud.datastore._generated import entity_pb2 @@ -607,7 +607,7 @@ def test_entity(self): value_pb = _new_value_pb(entity_pb, 'foo') value_pb.string_value = 'Foo' - entity = self._callFUT(pb) + entity = self._call_fut(pb) self.assertIsInstance(entity, Entity) self.assertEqual(entity['foo'], 'Foo') @@ -620,7 +620,7 @@ def test_array(self): item_pb.string_value = 'Foo' item_pb = array_pb.add() item_pb.string_value = 'Bar' - items = self._callFUT(pb) + items = self._call_fut(pb) self.assertEqual(items, ['Foo', 'Bar']) def test_geo_point(self): @@ -632,7 +632,7 @@ def test_geo_point(self): lng = 13.37 geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) pb = entity_pb2.Value(geo_point_value=geo_pt_pb) - result = self._callFUT(pb) + result = self._call_fut(pb) self.assertIsInstance(result, GeoPoint) self.assertEqual(result.latitude, lat) self.assertEqual(result.longitude, lng) @@ -642,7 +642,7 @@ def test_null(self): from google.cloud.datastore._generated import entity_pb2 pb = entity_pb2.Value(null_value=struct_pb2.NULL_VALUE) - result = self._callFUT(pb) + result = self._call_fut(pb) self.assertIsNone(result) def test_unknown(self): @@ -650,12 +650,12 @@ def test_unknown(self): pb = entity_pb2.Value() with self.assertRaises(ValueError): - self._callFUT(pb) + self._call_fut(pb) class Test_set_protobuf_value(unittest.TestCase): - def _callFUT(self, value_pb, val): + def _call_fut(self, value_pb, val): from google.cloud.datastore.helpers import _set_protobuf_value return _set_protobuf_value(value_pb, val) @@ -672,7 +672,7 @@ def test_datetime(self): pb = self._makePB() micros = 4375 utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) - self._callFUT(pb, utc) + self._call_fut(pb, utc) value = pb.timestamp_value self.assertEqual(value.seconds, calendar.timegm(utc.timetuple())) self.assertEqual(value.nanos, 1000 * micros) @@ -682,44 +682,44 @@ def test_key(self): pb = self._makePB() key = Key('KIND', 1234, project='PROJECT') - self._callFUT(pb, key) + self._call_fut(pb, key) value = pb.key_value self.assertEqual(value, key.to_protobuf()) def test_none(self): pb = self._makePB() - self._callFUT(pb, None) + self._call_fut(pb, None) self.assertEqual(pb.WhichOneof('value_type'), 'null_value') def test_bool(self): pb = self._makePB() - self._callFUT(pb, False) + self._call_fut(pb, False) value = pb.boolean_value self.assertEqual(value, False) def test_float(self): pb = self._makePB() - self._callFUT(pb, 3.1415926) + self._call_fut(pb, 3.1415926) value = pb.double_value self.assertEqual(value, 3.1415926) def test_int(self): pb = self._makePB() - self._callFUT(pb, 42) + self._call_fut(pb, 42) value = pb.integer_value self.assertEqual(value, 42) def test_long(self): pb = self._makePB() must_be_long = (1 << 63) - 1 - self._callFUT(pb, must_be_long) + self._call_fut(pb, must_be_long) value = pb.integer_value self.assertEqual(value, must_be_long) def test_native_str(self): import six pb = self._makePB() - self._callFUT(pb, 'str') + self._call_fut(pb, 'str') if six.PY2: value = pb.blob_value else: # pragma: NO COVER Python 3 @@ -728,13 +728,13 @@ def test_native_str(self): def test_bytes(self): pb = self._makePB() - self._callFUT(pb, b'str') + self._call_fut(pb, b'str') value = pb.blob_value self.assertEqual(value, b'str') def test_unicode(self): pb = self._makePB() - self._callFUT(pb, u'str') + self._call_fut(pb, u'str') value = pb.string_value self.assertEqual(value, u'str') @@ -744,7 +744,7 @@ def test_entity_empty_wo_key(self): pb = self._makePB() entity = Entity() - self._callFUT(pb, entity) + self._call_fut(pb, entity) value = pb.entity_value self.assertEqual(value.key.SerializeToString(), b'') self.assertEqual(len(list(_property_tuples(value))), 0) @@ -760,7 +760,7 @@ def test_entity_w_key(self): key = Key('KIND', 123, project='PROJECT') entity = Entity(key=key) entity[name] = value - self._callFUT(pb, entity) + self._call_fut(pb, entity) entity_pb = pb.entity_value self.assertEqual(entity_pb.key, key.to_protobuf()) @@ -772,7 +772,7 @@ def test_entity_w_key(self): def test_array(self): pb = self._makePB() values = [u'a', 0, 3.14] - self._callFUT(pb, values) + self._call_fut(pb, values) marshalled = pb.array_value.values self.assertEqual(len(marshalled), len(values)) self.assertEqual(marshalled[0].string_value, values[0]) @@ -788,13 +788,13 @@ def test_geo_point(self): lng = 3.337 geo_pt = GeoPoint(latitude=lat, longitude=lng) geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) - self._callFUT(pb, geo_pt) + self._call_fut(pb, geo_pt) self.assertEqual(pb.geo_point_value, geo_pt_pb) class Test__get_meaning(unittest.TestCase): - def _callFUT(self, *args, **kwargs): + def _call_fut(self, *args, **kwargs): from google.cloud.datastore.helpers import _get_meaning return _get_meaning(*args, **kwargs) @@ -802,7 +802,7 @@ def test_no_meaning(self): from google.cloud.datastore._generated import entity_pb2 value_pb = entity_pb2.Value() - result = self._callFUT(value_pb) + result = self._call_fut(value_pb) self.assertIsNone(result) def test_single(self): @@ -811,7 +811,7 @@ def test_single(self): value_pb = entity_pb2.Value() value_pb.meaning = meaning = 22 value_pb.string_value = u'hi' - result = self._callFUT(value_pb) + result = self._call_fut(value_pb) self.assertEqual(meaning, result) def test_empty_array_value(self): @@ -821,7 +821,7 @@ def test_empty_array_value(self): value_pb.array_value.values.add() value_pb.array_value.values.pop() - result = self._callFUT(value_pb, is_list=True) + result = self._call_fut(value_pb, is_list=True) self.assertEqual(None, result) def test_array_value(self): @@ -836,7 +836,7 @@ def test_array_value(self): sub_value_pb1.string_value = u'hi' sub_value_pb2.string_value = u'bye' - result = self._callFUT(value_pb, is_list=True) + result = self._call_fut(value_pb, is_list=True) self.assertEqual(meaning, result) def test_array_value_multiple_meanings(self): @@ -853,7 +853,7 @@ def test_array_value_multiple_meanings(self): sub_value_pb1.string_value = u'hi' sub_value_pb2.string_value = u'bye' - result = self._callFUT(value_pb, is_list=True) + result = self._call_fut(value_pb, is_list=True) self.assertEqual(result, [meaning1, meaning2]) def test_array_value_meaning_partially_unset(self): @@ -868,7 +868,7 @@ def test_array_value_meaning_partially_unset(self): sub_value_pb1.string_value = u'hi' sub_value_pb2.string_value = u'bye' - result = self._callFUT(value_pb, is_list=True) + result = self._call_fut(value_pb, is_list=True) self.assertEqual(result, [meaning1, None]) diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index b01b060491ef..d0b1b5e02dad 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -517,7 +517,7 @@ def test__next_page_no_more(self): class Test__item_to_entity(unittest.TestCase): - def _callFUT(self, iterator, entity_pb): + def _call_fut(self, iterator, entity_pb): from google.cloud.datastore.query import _item_to_entity return _item_to_entity(iterator, entity_pb) @@ -534,21 +534,21 @@ def mocked(entity_pb): entity_pb = object() with _Monkey(helpers, entity_from_protobuf=mocked): - self.assertIs(result, self._callFUT(None, entity_pb)) + self.assertIs(result, self._call_fut(None, entity_pb)) self.assertEqual(entities, [entity_pb]) class Test__pb_from_query(unittest.TestCase): - def _callFUT(self, query): + def _call_fut(self, query): from google.cloud.datastore.query import _pb_from_query return _pb_from_query(query) def test_empty(self): from google.cloud.datastore._generated import query_pb2 - pb = self._callFUT(_Query()) + pb = self._call_fut(_Query()) self.assertEqual(list(pb.projection), []) self.assertEqual(list(pb.kind), []) self.assertEqual(list(pb.order), []) @@ -564,12 +564,12 @@ def test_empty(self): self.assertEqual(pb.offset, 0) def test_projection(self): - pb = self._callFUT(_Query(projection=['a', 'b', 'c'])) + pb = self._call_fut(_Query(projection=['a', 'b', 'c'])) self.assertEqual([item.property.name for item in pb.projection], ['a', 'b', 'c']) def test_kind(self): - pb = self._callFUT(_Query(kind='KIND')) + pb = self._call_fut(_Query(kind='KIND')) self.assertEqual([item.name for item in pb.kind], ['KIND']) def test_ancestor(self): @@ -577,7 +577,7 @@ def test_ancestor(self): from google.cloud.datastore._generated import query_pb2 ancestor = Key('Ancestor', 123, project='PROJECT') - pb = self._callFUT(_Query(ancestor=ancestor)) + pb = self._call_fut(_Query(ancestor=ancestor)) cfilter = pb.filter.composite_filter self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND) self.assertEqual(len(cfilter.filters), 1) @@ -593,7 +593,7 @@ def test_filter(self): query.OPERATORS = { '=': query_pb2.PropertyFilter.EQUAL, } - pb = self._callFUT(query) + pb = self._call_fut(query) cfilter = pb.filter.composite_filter self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND) self.assertEqual(len(cfilter.filters), 1) @@ -610,7 +610,7 @@ def test_filter_key(self): query.OPERATORS = { '=': query_pb2.PropertyFilter.EQUAL, } - pb = self._callFUT(query) + pb = self._call_fut(query) cfilter = pb.filter.composite_filter self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND) self.assertEqual(len(cfilter.filters), 1) @@ -622,7 +622,7 @@ def test_filter_key(self): def test_order(self): from google.cloud.datastore._generated import query_pb2 - pb = self._callFUT(_Query(order=['a', '-b', 'c'])) + pb = self._call_fut(_Query(order=['a', '-b', 'c'])) self.assertEqual([item.property.name for item in pb.order], ['a', 'b', 'c']) self.assertEqual([item.direction for item in pb.order], @@ -631,7 +631,7 @@ def test_order(self): query_pb2.PropertyOrder.ASCENDING]) def test_distinct_on(self): - pb = self._callFUT(_Query(distinct_on=['a', 'b', 'c'])) + pb = self._call_fut(_Query(distinct_on=['a', 'b', 'c'])) self.assertEqual([item.name for item in pb.distinct_on], ['a', 'b', 'c']) From fadbee9c33e4d52179775415f68c1723191b0603 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 8 Nov 2016 21:02:17 -0800 Subject: [PATCH 045/611] Manually fixing up bad indents / long lines after renames. --- .../unit_tests/test__http.py | 6 +-- .../unit_tests/test_client.py | 14 +++---- .../unit_tests/test_key.py | 42 +++++++++---------- .../unit_tests/test_query.py | 4 +- 4 files changed, 33 insertions(+), 33 deletions(-) diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index ceefaa8ff828..e96eed96a032 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -213,7 +213,7 @@ def test_constructor(self): stub = _GRPCStub() mock_args = [] datastore_api = self._make_one(stub, connection=conn, - mock_args=mock_args) + mock_args=mock_args) self.assertIs(datastore_api._stub, stub) self.assertEqual(mock_args, [( @@ -233,8 +233,8 @@ def test_constructor_insecure(self): stub = _GRPCStub() mock_args = [] datastore_api = self._make_one(stub, connection=conn, - secure=False, - mock_args=mock_args) + secure=False, + mock_args=mock_args) self.assertIs(datastore_api._stub, stub) self.assertEqual(mock_args, [( diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index fb356b8ddc10..2484d93796bc 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -134,11 +134,11 @@ def _get_target_class(): return Client def _make_one(self, project=PROJECT, namespace=None, - credentials=None, http=None): + credentials=None, http=None): return self._get_target_class()(project=project, - namespace=namespace, - credentials=credentials, - http=http) + namespace=namespace, + credentials=credentials, + http=http) def test_ctor_w_project_no_environ(self): from google.cloud._testing import _Monkey @@ -183,9 +183,9 @@ def test_ctor_w_explicit_inputs(self): creds = object() http = object() client = self._make_one(project=OTHER, - namespace=NAMESPACE, - credentials=creds, - http=http) + namespace=NAMESPACE, + credentials=creds, + http=http) self.assertEqual(client.project, OTHER) self.assertEqual(client.namespace, NAMESPACE) self.assertIsInstance(client.connection, _MockConnection) diff --git a/packages/google-cloud-datastore/unit_tests/test_key.py b/packages/google-cloud-datastore/unit_tests/test_key.py index ca5530868893..b2227d297b31 100644 --- a/packages/google-cloud-datastore/unit_tests/test_key.py +++ b/packages/google-cloud-datastore/unit_tests/test_key.py @@ -50,8 +50,8 @@ def test_ctor_parent(self): {'kind': _CHILD_KIND, 'id': _CHILD_ID}, ] parent_key = self._make_one(_PARENT_KIND, _PARENT_ID, - project=_PARENT_PROJECT, - namespace=_PARENT_NAMESPACE) + project=_PARENT_PROJECT, + namespace=_PARENT_NAMESPACE) key = self._make_one(_CHILD_KIND, _CHILD_ID, parent=parent_key) self.assertEqual(key.project, parent_key.project) self.assertEqual(key.namespace, parent_key.namespace) @@ -67,24 +67,24 @@ def test_ctor_partial_parent(self): def test_ctor_parent_bad_type(self): with self.assertRaises(AttributeError): self._make_one('KIND2', 1234, parent=('KIND1', 1234), - project=self._DEFAULT_PROJECT) + project=self._DEFAULT_PROJECT) def test_ctor_parent_bad_namespace(self): parent_key = self._make_one('KIND', 1234, namespace='FOO', - project=self._DEFAULT_PROJECT) + project=self._DEFAULT_PROJECT) with self.assertRaises(ValueError): self._make_one('KIND2', 1234, namespace='BAR', parent=parent_key, - project=self._DEFAULT_PROJECT) + PROJECT=self._DEFAULT_PROJECT) def test_ctor_parent_bad_project(self): parent_key = self._make_one('KIND', 1234, project='FOO') with self.assertRaises(ValueError): self._make_one('KIND2', 1234, parent=parent_key, - project='BAR') + project='BAR') def test_ctor_parent_empty_path(self): parent_key = self._make_one('KIND', 1234, - project=self._DEFAULT_PROJECT) + project=self._DEFAULT_PROJECT) with self.assertRaises(ValueError): self._make_one(parent=parent_key) @@ -95,7 +95,7 @@ def test_ctor_explicit(self): _ID = 1234 _PATH = [{'kind': _KIND, 'id': _ID}] key = self._make_one(_KIND, _ID, namespace=_NAMESPACE, - project=_PROJECT) + project=_PROJECT) self.assertEqual(key.project, _PROJECT) self.assertEqual(key.namespace, _NAMESPACE) self.assertEqual(key.kind, _KIND) @@ -110,8 +110,8 @@ def test_ctor_bad_id_or_name(self): project=self._DEFAULT_PROJECT) self.assertRaises(ValueError, self._make_one, 'KIND', None, project=self._DEFAULT_PROJECT) - self.assertRaises(ValueError, self._make_one, 'KIND', 10, 'KIND2', None, - project=self._DEFAULT_PROJECT) + self.assertRaises(ValueError, self._make_one, 'KIND', 10, 'KIND2', + None, project=self._DEFAULT_PROJECT) def test__clone(self): _PROJECT = 'PROJECT-ALT' @@ -120,7 +120,7 @@ def test__clone(self): _ID = 1234 _PATH = [{'kind': _KIND, 'id': _ID}] key = self._make_one(_KIND, _ID, namespace=_NAMESPACE, - project=_PROJECT) + project=_PROJECT) clone = key._clone() self.assertEqual(clone.project, _PROJECT) self.assertEqual(clone.namespace, _NAMESPACE) @@ -137,7 +137,7 @@ def test__clone_with_parent(self): _PATH = [{'kind': _KIND1, 'id': _ID1}, {'kind': _KIND2, 'id': _ID2}] parent = self._make_one(_KIND1, _ID1, namespace=_NAMESPACE, - project=_PROJECT) + project=_PROJECT) key = self._make_one(_KIND2, _ID2, parent=parent) self.assertIs(key.parent, parent) clone = key._clone() @@ -216,9 +216,9 @@ def test___eq_____ne___same_kind_and_id_different_namespace(self): _KIND = 'KIND' _ID = 1234 key1 = self._make_one(_KIND, _ID, project=_PROJECT, - namespace=_NAMESPACE1) + namespace=_NAMESPACE1) key2 = self._make_one(_KIND, _ID, project=_PROJECT, - namespace=_NAMESPACE2) + namespace=_NAMESPACE2) self.assertFalse(key1 == key2) self.assertTrue(key1 != key2) @@ -258,9 +258,9 @@ def test___eq_____ne___same_kind_and_name_different_namespace(self): _KIND = 'KIND' _NAME = 'one' key1 = self._make_one(_KIND, _NAME, project=_PROJECT, - namespace=_NAMESPACE1) + namespace=_NAMESPACE1) key2 = self._make_one(_KIND, _NAME, project=_PROJECT, - namespace=_NAMESPACE2) + namespace=_NAMESPACE2) self.assertFalse(key1 == key2) self.assertTrue(key1 != key2) @@ -343,7 +343,7 @@ def test_to_protobuf_w_explicit_project(self): def test_to_protobuf_w_explicit_namespace(self): _NAMESPACE = 'NAMESPACE' key = self._make_one('KIND', namespace=_NAMESPACE, - project=self._DEFAULT_PROJECT) + project=self._DEFAULT_PROJECT) pb = key.to_protobuf() self.assertEqual(pb.partition_id.namespace_id, _NAMESPACE) @@ -353,7 +353,7 @@ def test_to_protobuf_w_explicit_path(self): _ID = 1234 _NAME = 'NAME' key = self._make_one(_PARENT, _NAME, _CHILD, _ID, - project=self._DEFAULT_PROJECT) + project=self._DEFAULT_PROJECT) pb = key.to_protobuf() elems = list(pb.path) self.assertEqual(len(elems), 2) @@ -391,7 +391,7 @@ def test_id_or_name_no_name_or_id(self): def test_id_or_name_no_name_or_id_child(self): key = self._make_one('KIND1', 1234, 'KIND2', - project=self._DEFAULT_PROJECT) + project=self._DEFAULT_PROJECT) self.assertIsNone(key.id_or_name) def test_id_or_name_w_id_only(self): @@ -417,7 +417,7 @@ def test_parent_explicit_nested(self): _PARENT_ID = 1234 _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] key = self._make_one(_PARENT_KIND, _PARENT_ID, 'KIND2', - project=self._DEFAULT_PROJECT) + project=self._DEFAULT_PROJECT) self.assertEqual(key.parent.path, _PARENT_PATH) def test_parent_multiple_calls(self): @@ -425,7 +425,7 @@ def test_parent_multiple_calls(self): _PARENT_ID = 1234 _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] key = self._make_one(_PARENT_KIND, _PARENT_ID, 'KIND2', - project=self._DEFAULT_PROJECT) + project=self._DEFAULT_PROJECT) parent = key.parent self.assertEqual(parent.path, _PARENT_PATH) new_parent = key.parent diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index d0b1b5e02dad..728dbc3dc63d 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -432,7 +432,7 @@ def test__process_query_results(self): from google.cloud.datastore._generated import query_pb2 iterator = self._make_one(None, None, - end_cursor='abcd') + end_cursor='abcd') self.assertIsNotNone(iterator._end_cursor) entity_pbs = object() @@ -453,7 +453,7 @@ def test__process_query_results_done(self): from google.cloud.datastore._generated import query_pb2 iterator = self._make_one(None, None, - end_cursor='abcd') + end_cursor='abcd') self.assertIsNotNone(iterator._end_cursor) entity_pbs = object() From 1dc7ff14e491399f3c55a649fda17a775fbe5299 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 10 Nov 2016 21:17:51 -0800 Subject: [PATCH 046/611] Adding quiet flag to pip command for local deps. --- packages/google-cloud-datastore/tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/tox.ini b/packages/google-cloud-datastore/tox.ini index e40bcca54c94..13c0b72d3c98 100644 --- a/packages/google-cloud-datastore/tox.ini +++ b/packages/google-cloud-datastore/tox.ini @@ -4,7 +4,7 @@ envlist = [testing] localdeps = - pip install --upgrade {toxinidir}/../core + pip install --quiet --upgrade {toxinidir}/../core deps = pytest covercmd = From 232d9bc0a08303cf423c4949c2f23a76550a2815 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 8 Nov 2016 22:04:09 -0800 Subject: [PATCH 047/611] Dropping usage of _Monkey in favor of mock.patch. This was done only in bigquery, datastore and storage packages. Still needs updates in bigtable, core, logging, monitoring, pubsub and speech. --- packages/google-cloud-datastore/tox.ini | 1 + .../unit_tests/test__http.py | 58 +++++---- .../unit_tests/test_client.py | 120 ++++++++++-------- 3 files changed, 102 insertions(+), 77 deletions(-) diff --git a/packages/google-cloud-datastore/tox.ini b/packages/google-cloud-datastore/tox.ini index 13c0b72d3c98..22efa3321840 100644 --- a/packages/google-cloud-datastore/tox.ini +++ b/packages/google-cloud-datastore/tox.ini @@ -7,6 +7,7 @@ localdeps = pip install --quiet --upgrade {toxinidir}/../core deps = pytest + mock covercmd = py.test --quiet \ --cov=google.cloud.datastore \ diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index e96eed96a032..611bda7e2f3e 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -181,8 +181,7 @@ def _get_target_class(): return _DatastoreAPIOverGRPC def _make_one(self, stub, connection=None, secure=True, mock_args=None): - from google.cloud._testing import _Monkey - from google.cloud.datastore import _http as MUT + import mock if connection is None: connection = _Connection(None) @@ -197,10 +196,15 @@ def mock_make_stub(*args): return stub if secure: - to_monkey = {'make_secure_stub': mock_make_stub} + patch = mock.patch( + 'google.cloud.datastore._http.make_secure_stub', + new=mock_make_stub) else: - to_monkey = {'make_insecure_stub': mock_make_stub} - with _Monkey(MUT, **to_monkey): + patch = mock.patch( + 'google.cloud.datastore._http.make_insecure_stub', + new=mock_make_stub) + + with patch: return self._get_target_class()(connection, secure) def test_constructor(self): @@ -372,9 +376,10 @@ def _make_query_pb(self, kind): return pb def _make_one(self, credentials=None, http=None, use_grpc=False): - from google.cloud._testing import _Monkey - from google.cloud.datastore import _http as MUT - with _Monkey(MUT, _USE_GRPC=use_grpc): + import mock + + with mock.patch('google.cloud.datastore._http._USE_GRPC', + new=use_grpc): return self._get_target_class()(credentials=credentials, http=http) def _verifyProtobufCall(self, called_with, URI, conn): @@ -391,15 +396,14 @@ def test_default_url(self): self.assertEqual(conn.api_base_url, klass.API_BASE_URL) def test_custom_url_from_env(self): - import os - from google.cloud._testing import _Monkey + import mock from google.cloud.connection import API_BASE_URL from google.cloud.environment_vars import GCD_HOST HOST = 'CURR_HOST' fake_environ = {GCD_HOST: HOST} - with _Monkey(os, environ=fake_environ): + with mock.patch('os.environ', new=fake_environ): conn = self._make_one() self.assertNotEqual(conn.api_base_url, API_BASE_URL) @@ -410,8 +414,7 @@ def test_ctor_defaults(self): self.assertIsNone(conn.credentials) def test_ctor_without_grpc(self): - from google.cloud._testing import _Monkey - from google.cloud.datastore import _http as MUT + import mock connections = [] return_val = object() @@ -420,7 +423,10 @@ def mock_api(connection): connections.append(connection) return return_val - with _Monkey(MUT, _DatastoreAPIOverHttp=mock_api): + patch = mock.patch( + 'google.cloud.datastore._http._DatastoreAPIOverHttp', + new=mock_api) + with patch: conn = self._make_one(use_grpc=False) self.assertIsNone(conn.credentials) @@ -428,8 +434,7 @@ def mock_api(connection): self.assertEqual(connections, [conn]) def test_ctor_with_grpc(self): - from google.cloud._testing import _Monkey - from google.cloud.datastore import _http as MUT + import mock api_args = [] return_val = object() @@ -438,7 +443,10 @@ def mock_api(connection, secure): api_args.append((connection, secure)) return return_val - with _Monkey(MUT, _DatastoreAPIOverGRPC=mock_api): + patch = mock.patch( + 'google.cloud.datastore._http._DatastoreAPIOverGRPC', + new=mock_api) + with patch: conn = self._make_one(use_grpc=True) self.assertIsNone(conn.credentials) @@ -922,9 +930,8 @@ def test_begin_transaction(self): request.ParseFromString(cw['body']) def test_commit_wo_transaction(self): - from google.cloud._testing import _Monkey + import mock from google.cloud.datastore._generated import datastore_pb2 - from google.cloud.datastore import _http as MUT from google.cloud.datastore.helpers import _new_value_pb PROJECT = 'PROJECT' @@ -953,7 +960,10 @@ def mock_parse(response): _parsed.append(response) return expected_result - with _Monkey(MUT, _parse_commit_response=mock_parse): + patch = mock.patch( + 'google.cloud.datastore._http._parse_commit_response', + new=mock_parse) + with patch: result = conn.commit(PROJECT, req_pb, None) self.assertIs(result, expected_result) @@ -968,9 +978,8 @@ def mock_parse(response): self.assertEqual(_parsed, [rsp_pb]) def test_commit_w_transaction(self): - from google.cloud._testing import _Monkey + import mock from google.cloud.datastore._generated import datastore_pb2 - from google.cloud.datastore import _http as MUT from google.cloud.datastore.helpers import _new_value_pb PROJECT = 'PROJECT' @@ -999,7 +1008,10 @@ def mock_parse(response): _parsed.append(response) return expected_result - with _Monkey(MUT, _parse_commit_response=mock_parse): + patch = mock.patch( + 'google.cloud.datastore._http._parse_commit_response', + new=mock_parse) + with patch: result = conn.commit(PROJECT, req_pb, b'xact') self.assertIs(result, expected_result) diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index 2484d93796bc..61bb56ff96a0 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -38,22 +38,20 @@ def _call_fut(self): return _get_gcd_project() def test_no_value(self): - import os - from google.cloud._testing import _Monkey + import mock environ = {} - with _Monkey(os, getenv=environ.get): + with mock.patch('os.getenv', new=environ.get): project = self._call_fut() self.assertIsNone(project) def test_value_set(self): - import os - from google.cloud._testing import _Monkey + import mock from google.cloud.datastore.client import GCD_DATASET MOCK_PROJECT = object() environ = {GCD_DATASET: MOCK_PROJECT} - with _Monkey(os, getenv=environ.get): + with mock.patch('os.getenv', new=environ.get): project = self._call_fut() self.assertEqual(project, MOCK_PROJECT) @@ -67,8 +65,7 @@ def _call_fut(self, project=None): def _determine_default_helper(self, gcd=None, fallback=None, project_called=None): - from google.cloud._testing import _Monkey - from google.cloud.datastore import client + import mock _callers = [] @@ -80,12 +77,11 @@ def fallback_mock(project=None): _callers.append(('fallback_mock', project)) return fallback - patched_methods = { - '_get_gcd_project': gcd_mock, - '_base_default_project': fallback_mock, - } - - with _Monkey(client, **patched_methods): + patch = mock.patch.multiple( + 'google.cloud.datastore.client', + _get_gcd_project=gcd_mock, + _base_default_project=fallback_mock) + with patch: returned_project = self._call_fut(project_called) return returned_project, _callers @@ -141,18 +137,18 @@ def _make_one(self, project=PROJECT, namespace=None, http=http) def test_ctor_w_project_no_environ(self): - from google.cloud._testing import _Monkey - from google.cloud.datastore import client as _MUT + import mock # Some environments (e.g. AppVeyor CI) run in GCE, so # this test would fail artificially. - with _Monkey(_MUT, _base_default_project=lambda project: None): + patch = mock.patch( + 'google.cloud.datastore.client._base_default_project', + new=lambda project: None) + with patch: self.assertRaises(EnvironmentError, self._make_one, None) def test_ctor_w_implicit_inputs(self): - from google.cloud._testing import _Monkey - from google.cloud.datastore import client as _MUT - from google.cloud import client as _base_client + import mock OTHER = 'other' creds = object() @@ -163,10 +159,15 @@ def fallback_mock(project): return project or OTHER klass = self._get_target_class() - with _Monkey(_MUT, - _determine_default_project=fallback_mock): - with _Monkey(_base_client, - get_credentials=lambda: creds): + patch1 = mock.patch( + 'google.cloud.datastore.client._determine_default_project', + new=fallback_mock) + patch2 = mock.patch( + 'google.cloud.client.get_credentials', + new=lambda: creds) + + with patch1: + with patch2: client = klass() self.assertEqual(client.project, OTHER) self.assertIsNone(client.namespace) @@ -495,8 +496,7 @@ def test_get_multi_hit_multiple_keys_different_project(self): client.get_multi([key1, key2]) def test_get_multi_max_loops(self): - from google.cloud._testing import _Monkey - from google.cloud.datastore import client as _MUT + import mock from google.cloud.datastore.key import Key KIND = 'Kind' @@ -513,7 +513,10 @@ def test_get_multi_max_loops(self): key = Key(KIND, ID, project=self.PROJECT) deferred = [] missing = [] - with _Monkey(_MUT, _MAX_LOOPS=-1): + + patch = mock.patch( + 'google.cloud.datastore.client._MAX_LOOPS', new=-1) + with patch: result = client.get_multi([key], missing=missing, deferred=deferred) @@ -702,8 +705,7 @@ def test_key_w_project(self): client.key, KIND, ID, project=self.PROJECT) def test_key_wo_project(self): - from google.cloud.datastore import client as MUT - from google.cloud._testing import _Monkey + import mock KIND = 'KIND' ID = 1234 @@ -711,7 +713,9 @@ def test_key_wo_project(self): creds = object() client = self._make_one(credentials=creds) - with _Monkey(MUT, Key=_Dummy): + patch = mock.patch( + 'google.cloud.datastore.client.Key', new=_Dummy) + with patch: key = client.key(KIND, ID) self.assertIsInstance(key, _Dummy) @@ -723,8 +727,7 @@ def test_key_wo_project(self): self.assertEqual(key.kwargs, expected_kwargs) def test_key_w_namespace(self): - from google.cloud.datastore import client as MUT - from google.cloud._testing import _Monkey + import mock KIND = 'KIND' ID = 1234 @@ -733,7 +736,9 @@ def test_key_w_namespace(self): creds = object() client = self._make_one(namespace=NAMESPACE, credentials=creds) - with _Monkey(MUT, Key=_Dummy): + patch = mock.patch( + 'google.cloud.datastore.client.Key', new=_Dummy) + with patch: key = client.key(KIND, ID) self.assertIsInstance(key, _Dummy) @@ -744,8 +749,7 @@ def test_key_w_namespace(self): self.assertEqual(key.kwargs, expected_kwargs) def test_key_w_namespace_collision(self): - from google.cloud.datastore import client as MUT - from google.cloud._testing import _Monkey + import mock KIND = 'KIND' ID = 1234 @@ -755,7 +759,9 @@ def test_key_w_namespace_collision(self): creds = object() client = self._make_one(namespace=NAMESPACE1, credentials=creds) - with _Monkey(MUT, Key=_Dummy): + patch = mock.patch( + 'google.cloud.datastore.client.Key', new=_Dummy) + with patch: key = client.key(KIND, ID, namespace=NAMESPACE2) self.assertIsInstance(key, _Dummy) @@ -766,13 +772,14 @@ def test_key_w_namespace_collision(self): self.assertEqual(key.kwargs, expected_kwargs) def test_batch(self): - from google.cloud.datastore import client as MUT - from google.cloud._testing import _Monkey + import mock creds = object() client = self._make_one(credentials=creds) - with _Monkey(MUT, Batch=_Dummy): + patch = mock.patch( + 'google.cloud.datastore.client.Batch', new=_Dummy) + with patch: batch = client.batch() self.assertIsInstance(batch, _Dummy) @@ -780,13 +787,14 @@ def test_batch(self): self.assertEqual(batch.kwargs, {}) def test_transaction_defaults(self): - from google.cloud.datastore import client as MUT - from google.cloud._testing import _Monkey + import mock creds = object() client = self._make_one(credentials=creds) - with _Monkey(MUT, Transaction=_Dummy): + patch = mock.patch( + 'google.cloud.datastore.client.Transaction', new=_Dummy) + with patch: xact = client.transaction() self.assertIsInstance(xact, _Dummy) @@ -812,13 +820,14 @@ def test_query_w_project(self): client.query, kind=KIND, project=self.PROJECT) def test_query_w_defaults(self): - from google.cloud.datastore import client as MUT - from google.cloud._testing import _Monkey + import mock creds = object() client = self._make_one(credentials=creds) - with _Monkey(MUT, Query=_Dummy): + patch = mock.patch( + 'google.cloud.datastore.client.Query', new=_Dummy) + with patch: query = client.query() self.assertIsInstance(query, _Dummy) @@ -830,8 +839,7 @@ def test_query_w_defaults(self): self.assertEqual(query.kwargs, expected_kwargs) def test_query_explicit(self): - from google.cloud.datastore import client as MUT - from google.cloud._testing import _Monkey + import mock KIND = 'KIND' NAMESPACE = 'NAMESPACE' @@ -844,7 +852,9 @@ def test_query_explicit(self): creds = object() client = self._make_one(credentials=creds) - with _Monkey(MUT, Query=_Dummy): + patch = mock.patch( + 'google.cloud.datastore.client.Query', new=_Dummy) + with patch: query = client.query( kind=KIND, namespace=NAMESPACE, @@ -870,8 +880,7 @@ def test_query_explicit(self): self.assertEqual(query.kwargs, kwargs) def test_query_w_namespace(self): - from google.cloud.datastore import client as MUT - from google.cloud._testing import _Monkey + import mock KIND = 'KIND' NAMESPACE = object() @@ -879,7 +888,9 @@ def test_query_w_namespace(self): creds = object() client = self._make_one(namespace=NAMESPACE, credentials=creds) - with _Monkey(MUT, Query=_Dummy): + patch = mock.patch( + 'google.cloud.datastore.client.Query', new=_Dummy) + with patch: query = client.query(kind=KIND) self.assertIsInstance(query, _Dummy) @@ -892,8 +903,7 @@ def test_query_w_namespace(self): self.assertEqual(query.kwargs, expected_kwargs) def test_query_w_namespace_collision(self): - from google.cloud.datastore import client as MUT - from google.cloud._testing import _Monkey + import mock KIND = 'KIND' NAMESPACE1 = object() @@ -902,7 +912,9 @@ def test_query_w_namespace_collision(self): creds = object() client = self._make_one(namespace=NAMESPACE1, credentials=creds) - with _Monkey(MUT, Query=_Dummy): + patch = mock.patch( + 'google.cloud.datastore.client.Query', new=_Dummy) + with patch: query = client.query(kind=KIND, namespace=NAMESPACE2) self.assertIsInstance(query, _Dummy) From 07d0e66a4239b6faeb9c487f198ae28782429f1f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 18:25:50 -0800 Subject: [PATCH 048/611] Updating connection -> _connection attribute in some packages. In particular: bigquery, bigtable and datastore. (The only change in bigtable was an import, and that attribute should probably go elsewhere.) --- .../google/cloud/datastore/_http.py | 2 +- .../google/cloud/datastore/batch.py | 11 +--- .../google/cloud/datastore/client.py | 4 +- .../google/cloud/datastore/transaction.py | 5 +- .../unit_tests/test__http.py | 2 +- .../unit_tests/test_batch.py | 4 +- .../unit_tests/test_client.py | 52 +++++++++---------- .../unit_tests/test_query.py | 2 +- .../unit_tests/test_transaction.py | 4 +- 9 files changed, 39 insertions(+), 47 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 4636b2651b7a..b66626fabccb 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -21,7 +21,7 @@ from google.cloud._helpers import make_insecure_stub from google.cloud._helpers import make_secure_stub -from google.cloud import connection as connection_module +from google.cloud import _http as connection_module from google.cloud.environment_vars import DISABLE_GRPC from google.cloud.environment_vars import GCD_HOST from google.cloud import exceptions diff --git a/packages/google-cloud-datastore/google/cloud/datastore/batch.py b/packages/google-cloud-datastore/google/cloud/datastore/batch.py index e944b56b7a6e..2c09f357ee2e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/batch.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/batch.py @@ -103,15 +103,6 @@ def namespace(self): """ return self._client.namespace - @property - def connection(self): - """Getter for connection over which the batch will run. - - :rtype: :class:`google.cloud.datastore._http.Connection` - :returns: The connection over which the batch will run. - """ - return self._client.connection - def _add_partial_key_entity_pb(self): """Adds a new mutation for an entity with a partial key. @@ -247,7 +238,7 @@ def _commit(self): This is called by :meth:`commit`. """ # NOTE: ``self._commit_request`` will be modified. - _, updated_keys = self.connection.commit( + _, updated_keys = self._client._connection.commit( self.project, self._commit_request, self._id) # If the back-end returns without error, we are guaranteed that # :meth:`Connection.commit` will return keys that match (length and diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index fc9cbf2ea321..c9eccdd6fd09 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -294,7 +294,7 @@ def get_multi(self, keys, missing=None, deferred=None, transaction=None): transaction = self.current_transaction entity_pbs = _extended_lookup( - connection=self.connection, + connection=self._connection, project=self.project, key_pbs=[k.to_protobuf() for k in keys], missing=missing, @@ -414,7 +414,7 @@ def allocate_ids(self, incomplete_key, num_ids): incomplete_key_pb = incomplete_key.to_protobuf() incomplete_key_pbs = [incomplete_key_pb] * num_ids - conn = self.connection + conn = self._connection allocated_key_pbs = conn.allocate_ids(incomplete_key.project, incomplete_key_pbs) allocated_ids = [allocated_key_pb.path[-1].id diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index ef61d64a61d6..d3fe0b45e240 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -145,7 +145,8 @@ def begin(self): """ super(Transaction, self).begin() try: - self._id = self.connection.begin_transaction(self.project) + self._id = self._client._connection.begin_transaction( + self.project) except: self._status = self._ABORTED raise @@ -159,7 +160,7 @@ def rollback(self): - Sets the current transaction's ID to None. """ try: - self.connection.rollback(self.project, self._id) + self._client._connection.rollback(self.project, self._id) finally: super(Transaction, self).rollback() # Clear our own ID in case this gets accidentally reused. diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 611bda7e2f3e..2b0e826143ff 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -397,7 +397,7 @@ def test_default_url(self): def test_custom_url_from_env(self): import mock - from google.cloud.connection import API_BASE_URL + from google.cloud._http import API_BASE_URL from google.cloud.environment_vars import GCD_HOST HOST = 'CURR_HOST' diff --git a/packages/google-cloud-datastore/unit_tests/test_batch.py b/packages/google-cloud-datastore/unit_tests/test_batch.py index 737668af02b4..0bdc8762e64c 100644 --- a/packages/google-cloud-datastore/unit_tests/test_batch.py +++ b/packages/google-cloud-datastore/unit_tests/test_batch.py @@ -35,7 +35,7 @@ def test_ctor(self): batch = self._make_one(client) self.assertEqual(batch.project, _PROJECT) - self.assertEqual(batch.connection, connection) + self.assertIs(batch._client, client) self.assertEqual(batch.namespace, _NAMESPACE) self.assertIsNone(batch._id) self.assertEqual(batch._status, batch._INITIAL) @@ -439,7 +439,7 @@ class _Client(object): def __init__(self, project, connection, namespace=None): self.project = project - self.connection = connection + self._connection = connection self.namespace = namespace self._batches = [] diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index 61bb56ff96a0..a817e4066a25 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -171,9 +171,9 @@ def fallback_mock(project): client = klass() self.assertEqual(client.project, OTHER) self.assertIsNone(client.namespace) - self.assertIsInstance(client.connection, _MockConnection) - self.assertIs(client.connection.credentials, creds) - self.assertIsNone(client.connection.http) + self.assertIsInstance(client._connection, _MockConnection) + self.assertIs(client._connection.credentials, creds) + self.assertIsNone(client._connection.http) self.assertIsNone(client.current_batch) self.assertIsNone(client.current_transaction) self.assertEqual(default_called, [None]) @@ -189,9 +189,9 @@ def test_ctor_w_explicit_inputs(self): http=http) self.assertEqual(client.project, OTHER) self.assertEqual(client.namespace, NAMESPACE) - self.assertIsInstance(client.connection, _MockConnection) - self.assertIs(client.connection.credentials, creds) - self.assertIs(client.connection.http, http) + self.assertIsInstance(client._connection, _MockConnection) + self.assertIs(client._connection.credentials, creds) + self.assertIs(client._connection.http, http) self.assertIsNone(client.current_batch) self.assertEqual(list(client._batch_stack), []) @@ -269,7 +269,7 @@ def test_get_multi_miss(self): creds = object() client = self._make_one(credentials=creds) - client.connection._add_lookup_result() + client._connection._add_lookup_result() key = Key('Kind', 1234, project=self.PROJECT) results = client.get_multi([key]) self.assertEqual(results, []) @@ -291,7 +291,7 @@ def test_get_multi_miss_w_missing(self): creds = object() client = self._make_one(credentials=creds) # Set missing entity on mock connection. - client.connection._add_lookup_result(missing=[missed]) + client._connection._add_lookup_result(missing=[missed]) key = Key(KIND, ID, project=self.PROJECT) missing = [] @@ -330,7 +330,7 @@ def test_get_multi_miss_w_deferred(self): # Set deferred entity on mock connection. creds = object() client = self._make_one(credentials=creds) - client.connection._add_lookup_result(deferred=[key.to_protobuf()]) + client._connection._add_lookup_result(deferred=[key.to_protobuf()]) deferred = [] entities = client.get_multi([key], deferred=deferred) @@ -356,8 +356,8 @@ def test_get_multi_w_deferred_from_backend_but_not_passed(self): creds = object() client = self._make_one(credentials=creds) # mock up two separate requests - client.connection._add_lookup_result([entity1_pb], deferred=[key2_pb]) - client.connection._add_lookup_result([entity2_pb]) + client._connection._add_lookup_result([entity1_pb], deferred=[key2_pb]) + client._connection._add_lookup_result([entity2_pb]) missing = [] found = client.get_multi([key1, key2], missing=missing) @@ -373,7 +373,7 @@ def test_get_multi_w_deferred_from_backend_but_not_passed(self): self.assertEqual(found[1].key.path, key2.path) self.assertEqual(found[1].key.project, key2.project) - cw = client.connection._lookup_cw + cw = client._connection._lookup_cw self.assertEqual(len(cw), 2) ds_id, k_pbs, eventual, tid = cw[0] @@ -404,7 +404,7 @@ def test_get_multi_hit(self): # Make a connection to return the entity pb. creds = object() client = self._make_one(credentials=creds) - client.connection._add_lookup_result([entity_pb]) + client._connection._add_lookup_result([entity_pb]) key = Key(KIND, ID, project=self.PROJECT) result, = client.get_multi([key]) @@ -431,7 +431,7 @@ def test_get_multi_hit_w_transaction(self): # Make a connection to return the entity pb. creds = object() client = self._make_one(credentials=creds) - client.connection._add_lookup_result([entity_pb]) + client._connection._add_lookup_result([entity_pb]) key = Key(KIND, ID, project=self.PROJECT) txn = client.transaction() @@ -446,7 +446,7 @@ def test_get_multi_hit_w_transaction(self): self.assertEqual(list(result), ['foo']) self.assertEqual(result['foo'], 'Foo') - cw = client.connection._lookup_cw + cw = client._connection._lookup_cw self.assertEqual(len(cw), 1) _, _, _, transaction_id = cw[0] self.assertEqual(transaction_id, TXN_ID) @@ -465,7 +465,7 @@ def test_get_multi_hit_multiple_keys_same_project(self): # Make a connection to return the entity pbs. creds = object() client = self._make_one(credentials=creds) - client.connection._add_lookup_result([entity_pb1, entity_pb2]) + client._connection._add_lookup_result([entity_pb1, entity_pb2]) key1 = Key(KIND, ID1, project=self.PROJECT) key2 = Key(KIND, ID2, project=self.PROJECT) @@ -508,7 +508,7 @@ def test_get_multi_max_loops(self): # Make a connection to return the entity pb. creds = object() client = self._make_one(credentials=creds) - client.connection._add_lookup_result([entity_pb]) + client._connection._add_lookup_result([entity_pb]) key = Key(KIND, ID, project=self.PROJECT) deferred = [] @@ -564,14 +564,14 @@ def test_put_multi_no_batch_w_partial_key(self): creds = object() client = self._make_one(credentials=creds) - client.connection._commit.append([_KeyPB(key)]) + client._connection._commit.append([_KeyPB(key)]) result = client.put_multi([entity]) self.assertIsNone(result) - self.assertEqual(len(client.connection._commit_cw), 1) + self.assertEqual(len(client._connection._commit_cw), 1) (project, - commit_req, transaction_id) = client.connection._commit_cw[0] + commit_req, transaction_id) = client._connection._commit_cw[0] self.assertEqual(project, self.PROJECT) mutated_entity = _mutated_pb(self, commit_req.mutations, 'insert') @@ -627,20 +627,20 @@ def test_delete_multi_no_keys(self): client = self._make_one(credentials=creds) result = client.delete_multi([]) self.assertIsNone(result) - self.assertEqual(len(client.connection._commit_cw), 0) + self.assertEqual(len(client._connection._commit_cw), 0) def test_delete_multi_no_batch(self): key = _Key(self.PROJECT) creds = object() client = self._make_one(credentials=creds) - client.connection._commit.append([]) + client._connection._commit.append([]) result = client.delete_multi([key]) self.assertIsNone(result) - self.assertEqual(len(client.connection._commit_cw), 1) + self.assertEqual(len(client._connection._commit_cw), 1) (project, - commit_req, transaction_id) = client.connection._commit_cw[0] + commit_req, transaction_id) = client._connection._commit_cw[0] self.assertEqual(project, self.PROJECT) mutated_key = _mutated_pb(self, commit_req.mutations, 'delete') @@ -658,7 +658,7 @@ def test_delete_multi_w_existing_batch(self): self.assertIsNone(result) mutated_key = _mutated_pb(self, CURR_BATCH.mutations, 'delete') self.assertEqual(mutated_key, key._key) - self.assertEqual(len(client.connection._commit_cw), 0) + self.assertEqual(len(client._connection._commit_cw), 0) def test_delete_multi_w_existing_transaction(self): creds = object() @@ -671,7 +671,7 @@ def test_delete_multi_w_existing_transaction(self): self.assertIsNone(result) mutated_key = _mutated_pb(self, CURR_XACT.mutations, 'delete') self.assertEqual(mutated_key, key._key) - self.assertEqual(len(client.connection._commit_cw), 0) + self.assertEqual(len(client._connection._commit_cw), 0) def test_allocate_ids_w_partial_key(self): NUM_IDS = 2 diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index 728dbc3dc63d..0e431623e369 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -679,7 +679,7 @@ class _Client(object): def __init__(self, project, connection, namespace=None): self.project = project - self.connection = connection + self._connection = connection self.namespace = namespace @property diff --git a/packages/google-cloud-datastore/unit_tests/test_transaction.py b/packages/google-cloud-datastore/unit_tests/test_transaction.py index 46fbf21320e5..c09304df6f5b 100644 --- a/packages/google-cloud-datastore/unit_tests/test_transaction.py +++ b/packages/google-cloud-datastore/unit_tests/test_transaction.py @@ -33,7 +33,7 @@ def test_ctor_defaults(self): client = _Client(_PROJECT, connection) xact = self._make_one(client) self.assertEqual(xact.project, _PROJECT) - self.assertEqual(xact.connection, connection) + self.assertIs(xact._client, client) self.assertIsNone(xact.id) self.assertEqual(xact._status, self._get_target_class()._INITIAL) self.assertIsInstance(xact._commit_request, @@ -227,7 +227,7 @@ class _Client(object): def __init__(self, project, connection, namespace=None): self.project = project - self.connection = connection + self._connection = connection self.namespace = namespace self._batches = [] From c4b924ed68c058277c266e05d317dfc1f58a3893 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 20:29:20 -0800 Subject: [PATCH 049/611] Fixing usage of public connection attribute in datastore system test. Also fixing a merge issue with new datastore Iterator. --- packages/google-cloud-datastore/google/cloud/datastore/query.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index a9488db725af..a6f6c845e17f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -492,7 +492,7 @@ def _next_page(self): pb = self._build_protobuf() transaction = self.client.current_transaction - query_results = self.client.connection.run_query( + query_results = self.client._connection.run_query( query_pb=pb, project=self._query.project, namespace=self._query.namespace, From 1cd2a94ea0fb7156e767bbc0f9a5e1c64154ac17 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 14 Nov 2016 12:44:19 -0800 Subject: [PATCH 050/611] Upgrading core to version to 0.21.0. As a result, also upgrading the umbrella package and all packages to 0.21.0 (since they all depend on core). --- packages/google-cloud-datastore/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index d79db705950a..b1884e3c69eb 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -50,13 +50,13 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.20.0', + 'google-cloud-core >= 0.21.0', 'grpcio >= 1.0.0, < 2.0dev', ] setup( name='google-cloud-datastore', - version='0.20.1', + version='0.21.0', description='Python Client for Google Cloud Datastore', long_description=README, namespace_packages=[ From 5f693854b398414f35751a6568ef12bd402cd7f7 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 14 Nov 2016 14:11:34 -0800 Subject: [PATCH 051/611] Need to install from local deps first. The `pip install --upgrade` still is needed to ensure freshness but by removing the filesystem paths from deps we made the initial install grab from PyPI (by mistake). This way, all local package deps are grabbed from the local filesystem. --- packages/google-cloud-datastore/tox.ini | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/tox.ini b/packages/google-cloud-datastore/tox.ini index 22efa3321840..0209be892cdf 100644 --- a/packages/google-cloud-datastore/tox.ini +++ b/packages/google-cloud-datastore/tox.ini @@ -6,8 +6,9 @@ envlist = localdeps = pip install --quiet --upgrade {toxinidir}/../core deps = - pytest + {toxinidir}/../core mock + pytest covercmd = py.test --quiet \ --cov=google.cloud.datastore \ @@ -17,7 +18,6 @@ covercmd = [testenv] commands = - {[testing]localdeps} py.test --quiet {posargs} unit_tests deps = {[testing]deps} @@ -26,7 +26,6 @@ deps = basepython = python2.7 commands = - {[testing]localdeps} {[testing]covercmd} deps = {[testenv]deps} From ad6abff29bef88e824fe871b5c880e2a97afb3b1 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 14 Nov 2016 14:58:42 -0800 Subject: [PATCH 052/611] Fixing accidental removal of {localdeps} Also - adding RTD dependency for runtimeconfig. - adding local paths to umbrella tox config "deps" as was done in #2733. --- packages/google-cloud-datastore/tox.ini | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/google-cloud-datastore/tox.ini b/packages/google-cloud-datastore/tox.ini index 0209be892cdf..76ffb1f2a8b5 100644 --- a/packages/google-cloud-datastore/tox.ini +++ b/packages/google-cloud-datastore/tox.ini @@ -18,6 +18,7 @@ covercmd = [testenv] commands = + {[testing]localdeps} py.test --quiet {posargs} unit_tests deps = {[testing]deps} @@ -26,6 +27,7 @@ deps = basepython = python2.7 commands = + {[testing]localdeps} {[testing]covercmd} deps = {[testenv]deps} From cc4f735694cfade2b220e7ca6b366739dbaf6532 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 14 Nov 2016 20:36:49 -0800 Subject: [PATCH 053/611] Adding ability to run doctests with datastore system tests. --- .../google/cloud/datastore/__init__.py | 24 ++++++++++++++----- .../google/cloud/datastore/entity.py | 4 ++-- .../google/cloud/datastore/key.py | 2 +- .../unit_tests/test_entity.py | 13 +++++----- 4 files changed, 27 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py index 7ca8e3f116f0..51c742d2b17f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py @@ -16,14 +16,26 @@ You'll typically use these to get started with the API: -.. code-block:: python +.. testsetup:: constructors - from google.cloud import datastore + import os + os.environ['GOOGLE_CLOUD_PROJECT'] = u'my-project' - client = datastore.Client() - key = client.key('EntityKind', 1234) - entity = datastore.Entity(key) - query = client.query(kind='EntityKind') +.. doctest:: constructors + + >>> from google.cloud import datastore + >>> + >>> client = datastore.Client() + >>> print(client.project) + my-project + >>> key = client.key('EntityKind', 1234) + >>> key + + >>> entity = datastore.Entity(key) + >>> entity['answer'] = 42 + >>> entity + + >>> query = client.query(kind='EntityKind') The main concepts with this API are: diff --git a/packages/google-cloud-datastore/google/cloud/datastore/entity.py b/packages/google-cloud-datastore/google/cloud/datastore/entity.py index 02ef0fd3ac73..2728768a3af8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/entity.py @@ -144,7 +144,7 @@ def exclude_from_indexes(self): def __repr__(self): if self.key: - return '' % (self.key.path, + return '' % (self.key._flat_path, super(Entity, self).__repr__()) else: - return '' % (super(Entity, self).__repr__()) + return '' % (super(Entity, self).__repr__(),) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index 72a50340ec84..1ccabf171857 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -380,7 +380,7 @@ def parent(self): return self._parent def __repr__(self): - return '' % (self.path, self.project) + return '' % (self._flat_path, self.project) def _validate_project(project, parent): diff --git a/packages/google-cloud-datastore/unit_tests/test_entity.py b/packages/google-cloud-datastore/unit_tests/test_entity.py index 30d40f947cc6..31c60a172001 100644 --- a/packages/google-cloud-datastore/unit_tests/test_entity.py +++ b/packages/google-cloud-datastore/unit_tests/test_entity.py @@ -190,10 +190,13 @@ def test___repr___no_key_empty(self): def test___repr___w_key_non_empty(self): key = _Key() - key._path = '/bar/baz' + flat_path = ('bar', 12, 'baz', 'himom') + key._flat_path = flat_path entity = self._make_one(key=key) - entity['foo'] = 'Foo' - self.assertEqual(repr(entity), "") + entity_vals = {'foo': 'Foo'} + entity.update(entity_vals) + expected = '' % (flat_path, entity_vals) + self.assertEqual(repr(entity), expected) class _Key(object): @@ -206,7 +209,3 @@ class _Key(object): def __init__(self, project=_PROJECT): self.project = project - - @property - def path(self): - return self._path From 4eeccac03c73d8d8e8b8855e5ade54e4bdbc8029 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Wed, 16 Nov 2016 11:09:27 -0500 Subject: [PATCH 054/611] Set core version compatible specifier to packages. --- packages/google-cloud-datastore/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index b1884e3c69eb..e79548d09a58 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -50,7 +50,7 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.21.0', + 'google-cloud-core >= 0.21.0, < 0.22dev', 'grpcio >= 1.0.0, < 2.0dev', ] From c53686e0e9058a855be8776c31c5985d74f33be4 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 2 Dec 2016 15:02:25 -0800 Subject: [PATCH 055/611] Switch from oauth2client to google-auth (#2726) * Removes all use of oauth2client from every package and tests. * Updates core to use google-auth's default credentials, project ID, and scoping logic. * Updates bigtable to use google-auth's scoping logic. --- .../unit_tests/test__http.py | 19 +++---------------- 1 file changed, 3 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 2b0e826143ff..35781cdf3a40 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -455,9 +455,7 @@ def mock_api(connection, secure): def test_ctor_explicit(self): class Creds(object): - - def create_scoped_required(self): - return False + pass creds = Creds() conn = self._make_one(creds) @@ -475,23 +473,12 @@ def test_http_wo_creds(self): self.assertIsInstance(conn.http, httplib2.Http) def test_http_w_creds(self): - import httplib2 - - authorized = object() - class Creds(object): - - def authorize(self, http): - self._called_with = http - return authorized - - def create_scoped_required(self): - return False + pass creds = Creds() conn = self._make_one(creds) - self.assertIs(conn.http, authorized) - self.assertIsInstance(creds._called_with, httplib2.Http) + self.assertIs(conn.http.credentials, creds) def test_build_api_url_w_default_base_version(self): PROJECT = 'PROJECT' From 0f810a620ea12014d088a121d63a8115ce96c166 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 2 Dec 2016 15:05:39 -0800 Subject: [PATCH 056/611] Removing os.environ patching in doctest. It turns out Sphinx allows this outside of a sandbox. --- .../google/cloud/datastore/__init__.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py index 51c742d2b17f..1bdb49e483e1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py @@ -16,21 +16,14 @@ You'll typically use these to get started with the API: -.. testsetup:: constructors - - import os - os.environ['GOOGLE_CLOUD_PROJECT'] = u'my-project' - .. doctest:: constructors >>> from google.cloud import datastore >>> >>> client = datastore.Client() - >>> print(client.project) - my-project >>> key = client.key('EntityKind', 1234) >>> key - + >>> entity = datastore.Entity(key) >>> entity['answer'] = 42 >>> entity From 8d19a80755f30ccf8f99291088b2190665e66886 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 6 Dec 2016 23:39:39 -0800 Subject: [PATCH 057/611] Converting code-block to doctest in datastore.transaction. In the process, adding a Config.TO_DELETE runtime list so that test cleanup can handle any loose objects created. --- .../google/cloud/datastore/transaction.py | 115 ++++++++++++------ 1 file changed, 77 insertions(+), 38 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index d3fe0b45e240..b63098959d0d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -27,31 +27,61 @@ class Transaction(Batch): operations (either ``insert`` or ``upsert``) into the same mutation, and execute those within a transaction: - .. code-block:: python + .. testsetup:: txn-put-multi, txn-api - >>> client = datastore.Client() - >>> with client.transaction(): - ... client.put_multi([entity1, entity2]) + from google.cloud import datastore + from datastore import Config # system tests + + client = datastore.Client() + key1 = client.key('_Doctest') + entity1 = datastore.Entity(key=key1) + entity1['foo'] = 1337 + + key2 = client.key('_Doctest', 'abcd1234') + entity2 = datastore.Entity(key=key2) + entity2['foo'] = 42 + + Config.TO_DELETE.extend([entity1, entity2]) + + .. doctest:: txn-put-multi + + >>> with client.transaction(): + ... client.put_multi([entity1, entity2]) Because it derives from :class:`~google.cloud.datastore.batch.Batch`, :class:`Transaction` also provides :meth:`put` and :meth:`delete` methods: - .. code-block:: python + .. doctest:: txn-api - >>> with client.transaction() as xact: - ... xact.put(entity1) - ... xact.delete(entity2.key) + >>> with client.transaction() as xact: + ... xact.put(entity1) + ... xact.delete(entity2.key) By default, the transaction is rolled back if the transaction block exits with an error: - .. code-block:: python + .. testsetup:: txn-error + + from google.cloud import datastore + + client = datastore.Client() + + def do_some_work(): + return + + class SomeException(Exception): + pass - >>> with client.transaction(): - ... do_some_work() - ... raise SomeException() # rolls back + .. doctest:: txn-error - If the transaction block exists without an exception, it will commit + >>> with client.transaction(): + ... do_some_work() + ... raise SomeException # rolls back + Traceback (most recent call last): + ... + SomeException + + If the transaction block exits without an exception, it will commit by default. .. warning:: @@ -60,11 +90,23 @@ class Transaction(Batch): entities will not be available at save time! That means, if you try: - .. code-block:: python + .. testsetup:: txn-entity-key, txn-entity-key-after, txn-manual + + from google.cloud import datastore + from datastore import Config # system tests + + client = datastore.Client() - >>> with client.transaction(): - ... entity = datastore.Entity(key=client.key('Thing')) - ... client.put(entity) + def Entity(*args, **kwargs): + entity = datastore.Entity(*args, **kwargs) + Config.TO_DELETE.append(entity) + return entity + + .. doctest:: txn-entity-key + + >>> with client.transaction(): + ... entity = Entity(key=client.key('Thing')) + ... client.put(entity) ``entity`` won't have a complete key until the transaction is committed. @@ -72,32 +114,29 @@ class Transaction(Batch): Once you exit the transaction (or call :meth:`commit`), the automatically generated ID will be assigned to the entity: - .. code-block:: python + .. doctest:: txn-entity-key-after - >>> with client.transaction(): - ... entity = datastore.Entity(key=client.key('Thing')) - ... client.put(entity) - ... print(entity.key.is_partial) # There is no ID on this key. - ... - True - >>> print(entity.key.is_partial) # There *is* an ID. - False + >>> with client.transaction(): + ... entity = Entity(key=client.key('Thing')) + ... client.put(entity) + ... print(entity.key.is_partial) # There is no ID on this key. + ... + True + >>> print(entity.key.is_partial) # There *is* an ID. + False If you don't want to use the context manager you can initialize a transaction manually: - .. code-block:: python - - >>> transaction = client.transaction() - >>> transaction.begin() - >>> - >>> entity = datastore.Entity(key=client.key('Thing')) - >>> transaction.put(entity) - >>> - >>> if error: - ... transaction.rollback() - ... else: - ... transaction.commit() + .. doctest:: txn-manual + + >>> transaction = client.transaction() + >>> transaction.begin() + >>> + >>> entity = Entity(key=client.key('Thing')) + >>> transaction.put(entity) + >>> + >>> transaction.commit() :type client: :class:`google.cloud.datastore.client.Client` :param client: the client used to connect to datastore. From 72d89e60d0c752d1e68a8ffc4d6e872e4c3c3d92 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 6 Dec 2016 23:56:28 -0800 Subject: [PATCH 058/611] Converting code-block to doctest in datastore.client. --- .../google/cloud/datastore/client.py | 77 ++++++++++++------- 1 file changed, 48 insertions(+), 29 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index c9eccdd6fd09..809c39a902ed 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -146,6 +146,11 @@ def _extended_lookup(connection, project, key_pbs, class Client(_BaseClient, _ClientProjectMixin): """Convenience wrapper for invoking APIs/factories w/ a project. + .. doctest:: + + >>> from google.cloud import datastore + >>> client = datastore.Client() + :type project: str :param project: (optional) The project to pass to proxied API methods. @@ -449,51 +454,65 @@ def query(self, **kwargs): Using query to search a datastore: - .. code-block:: python + .. testsetup:: query + + from google.cloud import datastore + + client = datastore.Client() + query = client.query(kind='_Doctest') - >>> from google.cloud import datastore - >>> client = datastore.Client() - >>> query = client.query(kind='MyKind') - >>> query.add_filter('property', '=', 'val') + def do_something(entity): + pass + + .. doctest:: query + + >>> query = client.query(kind='MyKind') + >>> query.add_filter('property', '=', 'val') Using the query iterator - .. code-block:: python + .. doctest:: query - >>> query_iter = query.fetch() - >>> for entity in query_iter: - ... do_something(entity) + >>> query_iter = query.fetch() + >>> for entity in query_iter: + ... do_something(entity) or manually page through results - .. code-block:: python + .. testsetup:: query-page + + from google.cloud import datastore + from datastore import Config # system tests + + client = datastore.Client() - >>> query_iter = query.fetch(start_cursor='2mdd223i944') - >>> pages = query_iter.pages - >>> - >>> first_page = next(pages) - >>> first_page_entities = list(first_page) - >>> query_iter.next_page_token - 'abc-some-cursor' - >>> - >>> second_page = next(pages) - >>> second_page_entities = list(second_page) - >>> query_iter.next_page_token is None - True + key = client.key('_Doctest') + entity1 = datastore.Entity(key=key) + entity1['foo'] = 1337 + entity2 = datastore.Entity(key=key) + entity2['foo'] = 42 + Config.TO_DELETE.extend([entity1, entity2]) + client.put_multi([entity1, entity2]) - Under the hood this is doing: + query = client.query(kind='_Doctest') + cursor = None - .. code-block:: python + .. doctest:: query-page - >>> connection.run_query('project', query.to_protobuf()) - [], cursor, more_results, skipped_results + >>> query_iter = query.fetch(start_cursor=cursor) + >>> pages = query_iter.pages + >>> + >>> first_page = next(pages) + >>> first_page_entities = list(first_page) + >>> query_iter.next_page_token + '...' :type kwargs: dict :param kwargs: Parameters for initializing and instance of - :class:`google.cloud.datastore.query.Query`. + :class:`~google.cloud.datastore.query.Query`. - :rtype: :class:`google.cloud.datastore.query.Query` - :returns: An instance of :class:`google.cloud.datastore.query.Query` + :rtype: :class:`~google.cloud.datastore.query.Query` + :returns: A query object. """ if 'client' in kwargs: raise TypeError('Cannot pass client') From 16c36156a4a6e9699416f1eb2aec389c57c3b5c8 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 7 Dec 2016 00:06:42 -0800 Subject: [PATCH 059/611] Converting code-block to doctest in datastore.key. --- .../google/cloud/datastore/key.py | 48 +++++++++++++------ 1 file changed, 34 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index 1ccabf171857..c33e590a2581 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -23,30 +23,50 @@ class Key(object): """An immutable representation of a datastore Key. - To create a basic key: + .. testsetup:: key-ctor - .. code-block:: python + from google.cloud import datastore - >>> Key('EntityKind', 1234) - - >>> Key('EntityKind', 'foo') - + project = 'my-special-pony' + client = datastore.Client(project=project) + Key = datastore.Key + + parent_key = client.key('Parent', 'foo') + + To create a basic key directly: + + .. doctest:: key-ctor + + >>> Key('EntityKind', 1234, project=project) + + >>> Key('EntityKind', 'foo', project=project) + + + Though typical usage comes via the + :meth:`~google.cloud.datastore.client.Client.key` factory: + + .. doctest:: key-ctor + + >>> client.key('EntityKind', 1234) + + >>> client.key('EntityKind', 'foo') + To create a key with a parent: - .. code-block:: python + .. doctest:: key-ctor - >>> Key('Parent', 'foo', 'Child', 1234) - - >>> Key('Child', 1234, parent=parent_key) - + >>> client.key('Parent', 'foo', 'Child', 1234) + + >>> client.key('Child', 1234, parent=parent_key) + To create a partial key: - .. code-block:: python + .. doctest:: key-ctor - >>> Key('Parent', 'foo', 'Child') - + >>> client.key('Parent', 'foo', 'Child') + :type path_args: tuple of string and integer :param path_args: May represent a partial (odd length) or full (even From 183f357a0a230d0cd77f29fb9989c1333d1b8b99 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 7 Dec 2016 00:18:56 -0800 Subject: [PATCH 060/611] Converting code-block to doctest in datastore.entity. --- .../google/cloud/datastore/entity.py | 48 +++++++++++++------ 1 file changed, 33 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/entity.py b/packages/google-cloud-datastore/google/cloud/datastore/entity.py index 2728768a3af8..51fb0c659a7a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/entity.py @@ -40,30 +40,48 @@ class Entity(dict): Use :meth:`~google.cloud.datastore.client.Client.get` to retrieve an existing entity: - .. code-block:: python + .. testsetup:: entity-ctor - >>> from google.cloud import datastore - >>> client = datastore.Client() - >>> client.get(key) - + from google.cloud import datastore + from datastore import Config # system tests + + client = datastore.Client() + key = client.key('EntityKind', 1234, namespace='_Doctest') + entity = datastore.Entity(key=key) + entity['property'] = 'value' + Config.TO_DELETE.append(entity) + + client.put(entity) + + .. doctest:: entity-ctor + + >>> client.get(key) + You can the set values on the entity just like you would on any other dictionary. - .. code-block:: python + .. doctest:: entity-ctor + + >>> entity['age'] = 20 + >>> entity['name'] = 'JJ' + + And you can treat an entity like a regular Python dictionary: + + .. testsetup:: entity-dict - >>> entity['age'] = 20 - >>> entity['name'] = 'JJ' - >>> entity - + from google.cloud import datastore - And you can convert an entity to a regular Python dictionary with the - ``dict`` builtin: + entity = datastore.Entity() + entity['age'] = 20 + entity['name'] = 'JJ' - .. code-block:: python + .. doctest:: entity-dict - >>> dict(entity) - {'age': 20, 'name': 'JJ'} + >>> sorted(entity.keys()) + ['age', 'name'] + >>> sorted(entity.items()) + [('age', 20), ('name', 'JJ')] .. note:: From 6d1a48af031380061dba0c218667e1360ca7d2aa Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 7 Dec 2016 00:19:27 -0800 Subject: [PATCH 061/611] Changing doctest indent from 2->3 spaces to be uniform. --- .../google/cloud/datastore/__init__.py | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py index 1bdb49e483e1..89e0eaaa31ea 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py @@ -18,17 +18,17 @@ .. doctest:: constructors - >>> from google.cloud import datastore - >>> - >>> client = datastore.Client() - >>> key = client.key('EntityKind', 1234) - >>> key - - >>> entity = datastore.Entity(key) - >>> entity['answer'] = 42 - >>> entity - - >>> query = client.query(kind='EntityKind') + >>> from google.cloud import datastore + >>> + >>> client = datastore.Client() + >>> key = client.key('EntityKind', 1234) + >>> key + + >>> entity = datastore.Entity(key) + >>> entity['answer'] = 42 + >>> entity + + >>> query = client.query(kind='EntityKind') The main concepts with this API are: From ed21ce0bc6c458c845cf57aef8722406e652e5ef Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 7 Dec 2016 16:00:24 -0800 Subject: [PATCH 062/611] Raise ValueError if credentials are not from google-auth (#2828) --- .../unit_tests/test_client.py | 119 ++++++++---------- 1 file changed, 49 insertions(+), 70 deletions(-) diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index a817e4066a25..f6e016d03712 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -14,6 +14,13 @@ import unittest +import mock + + +def _make_credentials(): + import google.auth.credentials + return mock.Mock(spec=google.auth.credentials.Credentials) + def _make_entity_pb(project, kind, integer_id, name=None, str_val=None): from google.cloud.datastore._generated import entity_pb2 @@ -38,15 +45,12 @@ def _call_fut(self): return _get_gcd_project() def test_no_value(self): - import mock - environ = {} with mock.patch('os.getenv', new=environ.get): project = self._call_fut() self.assertIsNone(project) def test_value_set(self): - import mock from google.cloud.datastore.client import GCD_DATASET MOCK_PROJECT = object() @@ -65,8 +69,6 @@ def _call_fut(self, project=None): def _determine_default_helper(self, gcd=None, fallback=None, project_called=None): - import mock - _callers = [] def gcd_mock(): @@ -137,8 +139,6 @@ def _make_one(self, project=PROJECT, namespace=None, http=http) def test_ctor_w_project_no_environ(self): - import mock - # Some environments (e.g. AppVeyor CI) run in GCE, so # this test would fail artificially. patch = mock.patch( @@ -148,10 +148,8 @@ def test_ctor_w_project_no_environ(self): self.assertRaises(EnvironmentError, self._make_one, None) def test_ctor_w_implicit_inputs(self): - import mock - OTHER = 'other' - creds = object() + creds = _make_credentials() default_called = [] def fallback_mock(project): @@ -181,7 +179,7 @@ def fallback_mock(project): def test_ctor_w_explicit_inputs(self): OTHER = 'other' NAMESPACE = 'namespace' - creds = object() + creds = _make_credentials() http = object() client = self._make_one(project=OTHER, namespace=NAMESPACE, @@ -196,7 +194,7 @@ def test_ctor_w_explicit_inputs(self): self.assertEqual(list(client._batch_stack), []) def test__push_batch_and__pop_batch(self): - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) batch = client.batch() xact = client.transaction() @@ -221,7 +219,7 @@ def _get_multi(*args, **kw): _called_with.append((args, kw)) return [] - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) client.get_multi = _get_multi @@ -244,7 +242,7 @@ def _get_multi(*args, **kw): _called_with.append((args, kw)) return [_entity] - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) client.get_multi = _get_multi @@ -259,7 +257,7 @@ def _get_multi(*args, **kw): self.assertEqual(_called_with[0][1]['transaction'], TXN_ID) def test_get_multi_no_keys(self): - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) results = client.get_multi([]) self.assertEqual(results, []) @@ -267,7 +265,7 @@ def test_get_multi_no_keys(self): def test_get_multi_miss(self): from google.cloud.datastore.key import Key - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) client._connection._add_lookup_result() key = Key('Kind', 1234, project=self.PROJECT) @@ -288,7 +286,7 @@ def test_get_multi_miss_w_missing(self): path_element.kind = KIND path_element.id = ID - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) # Set missing entity on mock connection. client._connection._add_lookup_result(missing=[missed]) @@ -303,7 +301,7 @@ def test_get_multi_miss_w_missing(self): def test_get_multi_w_missing_non_empty(self): from google.cloud.datastore.key import Key - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) key = Key('Kind', 1234, project=self.PROJECT) @@ -314,7 +312,7 @@ def test_get_multi_w_missing_non_empty(self): def test_get_multi_w_deferred_non_empty(self): from google.cloud.datastore.key import Key - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) key = Key('Kind', 1234, project=self.PROJECT) @@ -328,7 +326,7 @@ def test_get_multi_miss_w_deferred(self): key = Key('Kind', 1234, project=self.PROJECT) # Set deferred entity on mock connection. - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) client._connection._add_lookup_result(deferred=[key.to_protobuf()]) @@ -353,7 +351,7 @@ def test_get_multi_w_deferred_from_backend_but_not_passed(self): entity2_pb = entity_pb2.Entity() entity2_pb.key.CopyFrom(key2_pb) - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) # mock up two separate requests client._connection._add_lookup_result([entity1_pb], deferred=[key2_pb]) @@ -402,7 +400,7 @@ def test_get_multi_hit(self): entity_pb = _make_entity_pb(self.PROJECT, KIND, ID, 'foo', 'Foo') # Make a connection to return the entity pb. - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) client._connection._add_lookup_result([entity_pb]) @@ -429,7 +427,7 @@ def test_get_multi_hit_w_transaction(self): entity_pb = _make_entity_pb(self.PROJECT, KIND, ID, 'foo', 'Foo') # Make a connection to return the entity pb. - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) client._connection._add_lookup_result([entity_pb]) @@ -463,7 +461,7 @@ def test_get_multi_hit_multiple_keys_same_project(self): entity_pb2 = _make_entity_pb(self.PROJECT, KIND, ID2) # Make a connection to return the entity pbs. - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) client._connection._add_lookup_result([entity_pb1, entity_pb2]) @@ -489,14 +487,13 @@ def test_get_multi_hit_multiple_keys_different_project(self): key1 = Key('KIND', 1234, project=PROJECT1) key2 = Key('KIND', 1234, project=PROJECT2) - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) with self.assertRaises(ValueError): client.get_multi([key1, key2]) def test_get_multi_max_loops(self): - import mock from google.cloud.datastore.key import Key KIND = 'Kind' @@ -506,7 +503,7 @@ def test_get_multi_max_loops(self): entity_pb = _make_entity_pb(self.PROJECT, KIND, ID, 'foo', 'Foo') # Make a connection to return the entity pb. - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) client._connection._add_lookup_result([entity_pb]) @@ -532,7 +529,7 @@ def test_put(self): def _put_multi(*args, **kw): _called_with.append((args, kw)) - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) client.put_multi = _put_multi entity = object() @@ -543,7 +540,7 @@ def _put_multi(*args, **kw): self.assertEqual(_called_with[0][1]['entities'], [entity]) def test_put_multi_no_entities(self): - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) self.assertIsNone(client.put_multi([])) @@ -551,7 +548,7 @@ def test_put_multi_w_single_empty_entity(self): # https://github.com/GoogleCloudPlatform/google-cloud-python/issues/649 from google.cloud.datastore.entity import Entity - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) self.assertRaises(ValueError, client.put_multi, Entity()) @@ -562,7 +559,7 @@ def test_put_multi_no_batch_w_partial_key(self): key = entity.key = _Key(self.PROJECT) key._id = None - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) client._connection._commit.append([_KeyPB(key)]) @@ -588,7 +585,7 @@ def test_put_multi_no_batch_w_partial_key(self): def test_put_multi_existing_batch_w_completed_key(self): from google.cloud.datastore.helpers import _property_tuples - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) entity = _Entity(foo=u'bar') key = entity.key = _Key(self.PROJECT) @@ -612,7 +609,7 @@ def test_delete(self): def _delete_multi(*args, **kw): _called_with.append((args, kw)) - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) client.delete_multi = _delete_multi key = object() @@ -623,7 +620,7 @@ def _delete_multi(*args, **kw): self.assertEqual(_called_with[0][1]['keys'], [key]) def test_delete_multi_no_keys(self): - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) result = client.delete_multi([]) self.assertIsNone(result) @@ -632,7 +629,7 @@ def test_delete_multi_no_keys(self): def test_delete_multi_no_batch(self): key = _Key(self.PROJECT) - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) client._connection._commit.append([]) @@ -648,7 +645,7 @@ def test_delete_multi_no_batch(self): self.assertIsNone(transaction_id) def test_delete_multi_w_existing_batch(self): - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) key = _Key(self.PROJECT) @@ -661,7 +658,7 @@ def test_delete_multi_w_existing_batch(self): self.assertEqual(len(client._connection._commit_cw), 0) def test_delete_multi_w_existing_transaction(self): - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) key = _Key(self.PROJECT) @@ -679,7 +676,7 @@ def test_allocate_ids_w_partial_key(self): INCOMPLETE_KEY = _Key(self.PROJECT) INCOMPLETE_KEY._id = None - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) result = client.allocate_ids(INCOMPLETE_KEY, NUM_IDS) @@ -688,7 +685,7 @@ def test_allocate_ids_w_partial_key(self): self.assertEqual([key._id for key in result], list(range(NUM_IDS))) def test_allocate_ids_with_completed_key(self): - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) COMPLETE_KEY = _Key(self.PROJECT) @@ -698,19 +695,17 @@ def test_key_w_project(self): KIND = 'KIND' ID = 1234 - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) self.assertRaises(TypeError, client.key, KIND, ID, project=self.PROJECT) def test_key_wo_project(self): - import mock - KIND = 'KIND' ID = 1234 - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) patch = mock.patch( @@ -727,13 +722,11 @@ def test_key_wo_project(self): self.assertEqual(key.kwargs, expected_kwargs) def test_key_w_namespace(self): - import mock - KIND = 'KIND' ID = 1234 NAMESPACE = object() - creds = object() + creds = _make_credentials() client = self._make_one(namespace=NAMESPACE, credentials=creds) patch = mock.patch( @@ -749,14 +742,12 @@ def test_key_w_namespace(self): self.assertEqual(key.kwargs, expected_kwargs) def test_key_w_namespace_collision(self): - import mock - KIND = 'KIND' ID = 1234 NAMESPACE1 = object() NAMESPACE2 = object() - creds = object() + creds = _make_credentials() client = self._make_one(namespace=NAMESPACE1, credentials=creds) patch = mock.patch( @@ -772,9 +763,7 @@ def test_key_w_namespace_collision(self): self.assertEqual(key.kwargs, expected_kwargs) def test_batch(self): - import mock - - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) patch = mock.patch( @@ -787,9 +776,7 @@ def test_batch(self): self.assertEqual(batch.kwargs, {}) def test_transaction_defaults(self): - import mock - - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) patch = mock.patch( @@ -804,25 +791,23 @@ def test_transaction_defaults(self): def test_query_w_client(self): KIND = 'KIND' - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) - other = self._make_one(credentials=object()) + other = self._make_one(credentials=_make_credentials()) self.assertRaises(TypeError, client.query, kind=KIND, client=other) def test_query_w_project(self): KIND = 'KIND' - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) self.assertRaises(TypeError, client.query, kind=KIND, project=self.PROJECT) def test_query_w_defaults(self): - import mock - - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) patch = mock.patch( @@ -839,8 +824,6 @@ def test_query_w_defaults(self): self.assertEqual(query.kwargs, expected_kwargs) def test_query_explicit(self): - import mock - KIND = 'KIND' NAMESPACE = 'NAMESPACE' ANCESTOR = object() @@ -849,7 +832,7 @@ def test_query_explicit(self): ORDER = ['PROPERTY'] DISTINCT_ON = ['DISTINCT_ON'] - creds = object() + creds = _make_credentials() client = self._make_one(credentials=creds) patch = mock.patch( @@ -880,12 +863,10 @@ def test_query_explicit(self): self.assertEqual(query.kwargs, kwargs) def test_query_w_namespace(self): - import mock - KIND = 'KIND' NAMESPACE = object() - creds = object() + creds = _make_credentials() client = self._make_one(namespace=NAMESPACE, credentials=creds) patch = mock.patch( @@ -903,13 +884,11 @@ def test_query_w_namespace(self): self.assertEqual(query.kwargs, expected_kwargs) def test_query_w_namespace_collision(self): - import mock - KIND = 'KIND' NAMESPACE1 = object() NAMESPACE2 = object() - creds = object() + creds = _make_credentials() client = self._make_one(namespace=NAMESPACE1, credentials=creds) patch = mock.patch( From a2b2962e592eabfb9a4e035cf2d53eebd1bce4e5 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 8 Dec 2016 15:17:03 -0800 Subject: [PATCH 063/611] Update versions for mega-release. We want to update - `google-cloud-bigquery` - `google-cloud-datastore` - `google-cloud-logging` - `google-cloud-storage` - `google-cloud-core` And then update `google-cloud` to re-wrap the latest versions of each. However, to avoid having packages in `google-cloud` with conflicting versions of `google-cloud-core`, we must release all packages. --- packages/google-cloud-datastore/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index e79548d09a58..a0ca9a2f5606 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -50,13 +50,13 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.21.0, < 0.22dev', + 'google-cloud-core >= 0.22.1, < 0.23dev', 'grpcio >= 1.0.0, < 2.0dev', ] setup( name='google-cloud-datastore', - version='0.21.0', + version='0.22.0', description='Python Client for Google Cloud Datastore', long_description=README, namespace_packages=[ From fed6533be9487ee471cfca55804ff2bf2510a999 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 9 Dec 2016 16:57:17 -0800 Subject: [PATCH 064/611] Explicitly putting 1.0.2 lower bound on grpcio. Also upgrading logging from 0.14.x to 0.90.x --- packages/google-cloud-datastore/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index a0ca9a2f5606..3de3a0b91a84 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -51,7 +51,7 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.22.1, < 0.23dev', - 'grpcio >= 1.0.0, < 2.0dev', + 'grpcio >= 1.0.2, < 2.0dev', ] setup( From 1123d80083236813e5b1b93ac4c4a46bb3fddf7a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 14 Dec 2016 23:43:05 -0800 Subject: [PATCH 065/611] Manually creating Client._connection in subclasses. --- .../google/cloud/datastore/client.py | 25 +++++++++++-------- .../unit_tests/test_client.py | 12 +++++---- 2 files changed, 21 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 809c39a902ed..8473a08c6f65 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -157,26 +157,29 @@ class Client(_BaseClient, _ClientProjectMixin): :type namespace: str :param namespace: (optional) namespace to pass to proxied API methods. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or - :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no ``http`` - object is passed), falls back to the default inferred - from the environment. - - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. If not passed, an + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. + + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. """ - _connection_class = Connection def __init__(self, project=None, namespace=None, credentials=None, http=None): _ClientProjectMixin.__init__(self, project=project) + _BaseClient.__init__(self, credentials=credentials, http=http) + self._connection = Connection( + credentials=self._credentials, http=self._http) + self.namespace = namespace self._batch_stack = _LocalStack() - super(Client, self).__init__(credentials, http) @staticmethod def _determine_default(project): diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index f6e016d03712..ef198b98b19a 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -118,13 +118,15 @@ class TestClient(unittest.TestCase): PROJECT = 'PROJECT' def setUp(self): - KLASS = self._get_target_class() - self.original_cnxn_class = KLASS._connection_class - KLASS._connection_class = _MockConnection + from google.cloud.datastore import client as MUT + + self.original_cnxn_class = MUT.Connection + MUT.Connection = _MockConnection def tearDown(self): - KLASS = self._get_target_class() - KLASS._connection_class = self.original_cnxn_class + from google.cloud.datastore import client as MUT + + MUT.Connection = self.original_cnxn_class @staticmethod def _get_target_class(): From 878f4784dc37beea7e005727a446b815c5496721 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 15 Dec 2016 13:32:01 -0800 Subject: [PATCH 066/611] Using `gapic-google-cloud-datastore-v1` instead of `datastore._generated`. This does fully switch over to using the GAPIC client, but at least gets rid of **all** references to the generated code. --- packages/google-cloud-datastore/.coveragerc | 2 - .../cloud/datastore/_generated/__init__.py | 15 - .../datastore/_generated/_datastore.proto | 316 ------ .../cloud/datastore/_generated/_entity.proto | 201 ---- .../cloud/datastore/_generated/_query.proto | 306 ------ .../_generated/datastore_grpc_pb2.py | 301 ------ .../datastore/_generated/datastore_pb2.py | 891 ----------------- .../cloud/datastore/_generated/entity_pb2.py | 495 ---------- .../cloud/datastore/_generated/query_pb2.py | 934 ------------------ .../google/cloud/datastore/_http.py | 83 +- .../google/cloud/datastore/batch.py | 12 +- .../google/cloud/datastore/client.py | 4 +- .../google/cloud/datastore/helpers.py | 22 +- .../google/cloud/datastore/key.py | 4 +- .../google/cloud/datastore/query.py | 10 +- packages/google-cloud-datastore/setup.py | 1 + .../unit_tests/test__http.py | 58 +- .../unit_tests/test_batch.py | 4 +- .../unit_tests/test_client.py | 8 +- .../unit_tests/test_helpers.py | 60 +- .../unit_tests/test_key.py | 2 +- .../unit_tests/test_query.py | 20 +- .../unit_tests/test_transaction.py | 4 +- 23 files changed, 146 insertions(+), 3607 deletions(-) delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_generated/__init__.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_generated/_datastore.proto delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_generated/_entity.proto delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_generated/_query.proto delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_generated/datastore_grpc_pb2.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_generated/datastore_pb2.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_generated/entity_pb2.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_generated/query_pb2.py diff --git a/packages/google-cloud-datastore/.coveragerc b/packages/google-cloud-datastore/.coveragerc index 08f3fdea2433..a54b99aa14b7 100644 --- a/packages/google-cloud-datastore/.coveragerc +++ b/packages/google-cloud-datastore/.coveragerc @@ -2,8 +2,6 @@ branch = True [report] -omit = - */_generated/*.py fail_under = 100 show_missing = True exclude_lines = diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_generated/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore/_generated/__init__.py deleted file mode 100644 index 5b2724764514..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore/_generated/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Generated protobuf modules for Google Cloud Datastore API.""" diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_generated/_datastore.proto b/packages/google-cloud-datastore/google/cloud/datastore/_generated/_datastore.proto deleted file mode 100644 index 5881e9a14714..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore/_generated/_datastore.proto +++ /dev/null @@ -1,316 +0,0 @@ -// Copyright 2016 Google Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.datastore.v1; - -import "google/api/annotations.proto"; -import "google/datastore/v1/entity.proto"; -import "google/datastore/v1/query.proto"; - -option java_multiple_files = true; -option java_outer_classname = "DatastoreProto"; -option java_package = "com.google.datastore.v1"; - - -// Each RPC normalizes the partition IDs of the keys in its input entities, -// and always returns entities with keys with normalized partition IDs. -// This applies to all keys and entities, including those in values, except keys -// with both an empty path and an empty or unset partition ID. Normalization of -// input keys sets the project ID (if not already set) to the project ID from -// the request. -// -service Datastore { - // Looks up entities by key. - rpc Lookup(LookupRequest) returns (LookupResponse) { - option (google.api.http) = { post: "/v1/projects/{project_id}:lookup" body: "*" }; - } - - // Queries for entities. - rpc RunQuery(RunQueryRequest) returns (RunQueryResponse) { - option (google.api.http) = { post: "/v1/projects/{project_id}:runQuery" body: "*" }; - } - - // Begins a new transaction. - rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) { - option (google.api.http) = { post: "/v1/projects/{project_id}:beginTransaction" body: "*" }; - } - - // Commits a transaction, optionally creating, deleting or modifying some - // entities. - rpc Commit(CommitRequest) returns (CommitResponse) { - option (google.api.http) = { post: "/v1/projects/{project_id}:commit" body: "*" }; - } - - // Rolls back a transaction. - rpc Rollback(RollbackRequest) returns (RollbackResponse) { - option (google.api.http) = { post: "/v1/projects/{project_id}:rollback" body: "*" }; - } - - // Allocates IDs for the given keys, which is useful for referencing an entity - // before it is inserted. - rpc AllocateIds(AllocateIdsRequest) returns (AllocateIdsResponse) { - option (google.api.http) = { post: "/v1/projects/{project_id}:allocateIds" body: "*" }; - } -} - -// The request for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. -message LookupRequest { - // The ID of the project against which to make the request. - string project_id = 8; - - // The options for this lookup request. - ReadOptions read_options = 1; - - // Keys of entities to look up. - repeated Key keys = 3; -} - -// The response for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. -message LookupResponse { - // Entities found as `ResultType.FULL` entities. The order of results in this - // field is undefined and has no relation to the order of the keys in the - // input. - repeated EntityResult found = 1; - - // Entities not found as `ResultType.KEY_ONLY` entities. The order of results - // in this field is undefined and has no relation to the order of the keys - // in the input. - repeated EntityResult missing = 2; - - // A list of keys that were not looked up due to resource constraints. The - // order of results in this field is undefined and has no relation to the - // order of the keys in the input. - repeated Key deferred = 3; -} - -// The request for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. -message RunQueryRequest { - // The ID of the project against which to make the request. - string project_id = 8; - - // Entities are partitioned into subsets, identified by a partition ID. - // Queries are scoped to a single partition. - // This partition ID is normalized with the standard default context - // partition ID. - PartitionId partition_id = 2; - - // The options for this query. - ReadOptions read_options = 1; - - // The type of query. - oneof query_type { - // The query to run. - Query query = 3; - - // The GQL query to run. - GqlQuery gql_query = 7; - } -} - -// The response for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. -message RunQueryResponse { - // A batch of query results (always present). - QueryResultBatch batch = 1; - - // The parsed form of the `GqlQuery` from the request, if it was set. - Query query = 2; -} - -// The request for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. -message BeginTransactionRequest { - // The ID of the project against which to make the request. - string project_id = 8; -} - -// The response for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. -message BeginTransactionResponse { - // The transaction identifier (always present). - bytes transaction = 1; -} - -// The request for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. -message RollbackRequest { - // The ID of the project against which to make the request. - string project_id = 8; - - // The transaction identifier, returned by a call to - // [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - bytes transaction = 1; -} - -// The response for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. -// (an empty message). -message RollbackResponse { - -} - -// The request for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. -message CommitRequest { - // The modes available for commits. - enum Mode { - // Unspecified. This value must not be used. - MODE_UNSPECIFIED = 0; - - // Transactional: The mutations are either all applied, or none are applied. - // Learn about transactions [here](https://cloud.google.com/datastore/docs/concepts/transactions). - TRANSACTIONAL = 1; - - // Non-transactional: The mutations may not apply as all or none. - NON_TRANSACTIONAL = 2; - } - - // The ID of the project against which to make the request. - string project_id = 8; - - // The type of commit to perform. Defaults to `TRANSACTIONAL`. - Mode mode = 5; - - // Must be set when mode is `TRANSACTIONAL`. - oneof transaction_selector { - // The identifier of the transaction associated with the commit. A - // transaction identifier is returned by a call to - // [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - bytes transaction = 1; - } - - // The mutations to perform. - // - // When mode is `TRANSACTIONAL`, mutations affecting a single entity are - // applied in order. The following sequences of mutations affecting a single - // entity are not permitted in a single `Commit` request: - // - // - `insert` followed by `insert` - // - `update` followed by `insert` - // - `upsert` followed by `insert` - // - `delete` followed by `update` - // - // When mode is `NON_TRANSACTIONAL`, no two mutations may affect a single - // entity. - repeated Mutation mutations = 6; -} - -// The response for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. -message CommitResponse { - // The result of performing the mutations. - // The i-th mutation result corresponds to the i-th mutation in the request. - repeated MutationResult mutation_results = 3; - - // The number of index entries updated during the commit, or zero if none were - // updated. - int32 index_updates = 4; -} - -// The request for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. -message AllocateIdsRequest { - // The ID of the project against which to make the request. - string project_id = 8; - - // A list of keys with incomplete key paths for which to allocate IDs. - // No key may be reserved/read-only. - repeated Key keys = 1; -} - -// The response for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. -message AllocateIdsResponse { - // The keys specified in the request (in the same order), each with - // its key path completed with a newly allocated ID. - repeated Key keys = 1; -} - -// A mutation to apply to an entity. -message Mutation { - // The mutation operation. - // - // For `insert`, `update`, and `upsert`: - // - The entity's key must not be reserved/read-only. - // - No property in the entity may have a reserved name, - // not even a property in an entity in a value. - // - No value in the entity may have meaning 18, - // not even a value in an entity in another value. - oneof operation { - // The entity to insert. The entity must not already exist. - // The entity key's final path element may be incomplete. - Entity insert = 4; - - // The entity to update. The entity must already exist. - // Must have a complete key path. - Entity update = 5; - - // The entity to upsert. The entity may or may not already exist. - // The entity key's final path element may be incomplete. - Entity upsert = 6; - - // The key of the entity to delete. The entity may or may not already exist. - // Must have a complete key path and must not be reserved/read-only. - Key delete = 7; - } - - // When set, the server will detect whether or not this mutation conflicts - // with the current version of the entity on the server. Conflicting mutations - // are not applied, and are marked as such in MutationResult. - oneof conflict_detection_strategy { - // The version of the entity that this mutation is being applied to. If this - // does not match the current version on the server, the mutation conflicts. - int64 base_version = 8; - } -} - -// The result of applying a mutation. -message MutationResult { - // The automatically allocated key. - // Set only when the mutation allocated a key. - Key key = 3; - - // The version of the entity on the server after processing the mutation. If - // the mutation doesn't change anything on the server, then the version will - // be the version of the current entity or, if no entity is present, a version - // that is strictly greater than the version of any previous entity and less - // than the version of any possible future entity. - int64 version = 4; - - // Whether a conflict was detected for this mutation. Always false when a - // conflict detection strategy field is not set in the mutation. - bool conflict_detected = 5; -} - -// The options shared by read requests. -message ReadOptions { - // The possible values for read consistencies. - enum ReadConsistency { - // Unspecified. This value must not be used. - READ_CONSISTENCY_UNSPECIFIED = 0; - - // Strong consistency. - STRONG = 1; - - // Eventual consistency. - EVENTUAL = 2; - } - - // If not specified, lookups and ancestor queries default to - // `read_consistency`=`STRONG`, global queries default to - // `read_consistency`=`EVENTUAL`. - oneof consistency_type { - // The non-transactional read consistency to use. - // Cannot be set to `STRONG` for global queries. - ReadConsistency read_consistency = 1; - - // The identifier of the transaction in which to read. A - // transaction identifier is returned by a call to - // [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - bytes transaction = 2; - } -} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_generated/_entity.proto b/packages/google-cloud-datastore/google/cloud/datastore/_generated/_entity.proto deleted file mode 100644 index a0e7d39138f2..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore/_generated/_entity.proto +++ /dev/null @@ -1,201 +0,0 @@ -// Copyright 2016 Google Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.datastore.v1; - -import "google/api/annotations.proto"; -import "google/protobuf/struct.proto"; -import "google/protobuf/timestamp.proto"; -import "google/type/latlng.proto"; - -option java_multiple_files = true; -option java_outer_classname = "EntityProto"; -option java_package = "com.google.datastore.v1"; - - -// A partition ID identifies a grouping of entities. The grouping is always -// by project and namespace, however the namespace ID may be empty. -// -// A partition ID contains several dimensions: -// project ID and namespace ID. -// -// Partition dimensions: -// -// - May be `""`. -// - Must be valid UTF-8 bytes. -// - Must have values that match regex `[A-Za-z\d\.\-_]{1,100}` -// If the value of any dimension matches regex `__.*__`, the partition is -// reserved/read-only. -// A reserved/read-only partition ID is forbidden in certain documented -// contexts. -// -// Foreign partition IDs (in which the project ID does -// not match the context project ID ) are discouraged. -// Reads and writes of foreign partition IDs may fail if the project is not in an active state. -message PartitionId { - // The ID of the project to which the entities belong. - string project_id = 2; - - // If not empty, the ID of the namespace to which the entities belong. - string namespace_id = 4; -} - -// A unique identifier for an entity. -// If a key's partition ID or any of its path kinds or names are -// reserved/read-only, the key is reserved/read-only. -// A reserved/read-only key is forbidden in certain documented contexts. -message Key { - // A (kind, ID/name) pair used to construct a key path. - // - // If either name or ID is set, the element is complete. - // If neither is set, the element is incomplete. - message PathElement { - // The kind of the entity. - // A kind matching regex `__.*__` is reserved/read-only. - // A kind must not contain more than 1500 bytes when UTF-8 encoded. - // Cannot be `""`. - string kind = 1; - - // The type of ID. - oneof id_type { - // The auto-allocated ID of the entity. - // Never equal to zero. Values less than zero are discouraged and may not - // be supported in the future. - int64 id = 2; - - // The name of the entity. - // A name matching regex `__.*__` is reserved/read-only. - // A name must not be more than 1500 bytes when UTF-8 encoded. - // Cannot be `""`. - string name = 3; - } - } - - // Entities are partitioned into subsets, currently identified by a project - // ID and namespace ID. - // Queries are scoped to a single partition. - PartitionId partition_id = 1; - - // The entity path. - // An entity path consists of one or more elements composed of a kind and a - // string or numerical identifier, which identify entities. The first - // element identifies a _root entity_, the second element identifies - // a _child_ of the root entity, the third element identifies a child of the - // second entity, and so forth. The entities identified by all prefixes of - // the path are called the element's _ancestors_. - // - // An entity path is always fully complete: *all* of the entity's ancestors - // are required to be in the path along with the entity identifier itself. - // The only exception is that in some documented cases, the identifier in the - // last path element (for the entity) itself may be omitted. For example, - // the last path element of the key of `Mutation.insert` may have no - // identifier. - // - // A path can never be empty, and a path can have at most 100 elements. - repeated PathElement path = 2; -} - -// An array value. -message ArrayValue { - // Values in the array. - // The order of this array may not be preserved if it contains a mix of - // indexed and unindexed values. - repeated Value values = 1; -} - -// A message that can hold any of the supported value types and associated -// metadata. -message Value { - // Must have a value set. - oneof value_type { - // A null value. - google.protobuf.NullValue null_value = 11; - - // A boolean value. - bool boolean_value = 1; - - // An integer value. - int64 integer_value = 2; - - // A double value. - double double_value = 3; - - // A timestamp value. - // When stored in the Datastore, precise only to microseconds; - // any additional precision is rounded down. - google.protobuf.Timestamp timestamp_value = 10; - - // A key value. - Key key_value = 5; - - // A UTF-8 encoded string value. - // When `exclude_from_indexes` is false (it is indexed) , may have at most 1500 bytes. - // Otherwise, may be set to at least 1,000,000 bytes. - string string_value = 17; - - // A blob value. - // May have at most 1,000,000 bytes. - // When `exclude_from_indexes` is false, may have at most 1500 bytes. - // In JSON requests, must be base64-encoded. - bytes blob_value = 18; - - // A geo point value representing a point on the surface of Earth. - google.type.LatLng geo_point_value = 8; - - // An entity value. - // - // - May have no key. - // - May have a key with an incomplete key path. - // - May have a reserved/read-only key. - Entity entity_value = 6; - - // An array value. - // Cannot contain another array value. - // A `Value` instance that sets field `array_value` must not set fields - // `meaning` or `exclude_from_indexes`. - ArrayValue array_value = 9; - } - - // The `meaning` field should only be populated for backwards compatibility. - int32 meaning = 14; - - // If the value should be excluded from all indexes including those defined - // explicitly. - bool exclude_from_indexes = 19; -} - -// A Datastore data object. -// -// An entity is limited to 1 megabyte when stored. That _roughly_ -// corresponds to a limit of 1 megabyte for the serialized form of this -// message. -message Entity { - // The entity's key. - // - // An entity must have a key, unless otherwise documented (for example, - // an entity in `Value.entity_value` may have no key). - // An entity's kind is its key path's last element's kind, - // or null if it has no key. - Key key = 1; - - // The entity's properties. - // The map's keys are property names. - // A property name matching regex `__.*__` is reserved. - // A reserved property name is forbidden in certain documented contexts. - // The name must not contain more than 500 characters. - // The name cannot be `""`. - map properties = 3; -} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_generated/_query.proto b/packages/google-cloud-datastore/google/cloud/datastore/_generated/_query.proto deleted file mode 100644 index e6dba2b226f9..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore/_generated/_query.proto +++ /dev/null @@ -1,306 +0,0 @@ -// Copyright 2016 Google Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.datastore.v1; - -import "google/api/annotations.proto"; -import "google/datastore/v1/entity.proto"; -import "google/protobuf/wrappers.proto"; -import "google/type/latlng.proto"; - -option java_multiple_files = true; -option java_outer_classname = "QueryProto"; -option java_package = "com.google.datastore.v1"; - - -// The result of fetching an entity from Datastore. -message EntityResult { - // Specifies what data the 'entity' field contains. - // A `ResultType` is either implied (for example, in `LookupResponse.missing` - // from `datastore.proto`, it is always `KEY_ONLY`) or specified by context - // (for example, in message `QueryResultBatch`, field `entity_result_type` - // specifies a `ResultType` for all the values in field `entity_results`). - enum ResultType { - // Unspecified. This value is never used. - RESULT_TYPE_UNSPECIFIED = 0; - - // The key and properties. - FULL = 1; - - // A projected subset of properties. The entity may have no key. - PROJECTION = 2; - - // Only the key. - KEY_ONLY = 3; - } - - // The resulting entity. - Entity entity = 1; - - // The version of the entity, a strictly positive number that monotonically - // increases with changes to the entity. - // - // This field is set for [`FULL`][google.datastore.v1.EntityResult.ResultType.FULL] entity - // results. - // - // For [missing][google.datastore.v1.LookupResponse.missing] entities in `LookupResponse`, this - // is the version of the snapshot that was used to look up the entity, and it - // is always set except for eventually consistent reads. - int64 version = 4; - - // A cursor that points to the position after the result entity. - // Set only when the `EntityResult` is part of a `QueryResultBatch` message. - bytes cursor = 3; -} - -// A query for entities. -message Query { - // The projection to return. Defaults to returning all properties. - repeated Projection projection = 2; - - // The kinds to query (if empty, returns entities of all kinds). - // Currently at most 1 kind may be specified. - repeated KindExpression kind = 3; - - // The filter to apply. - Filter filter = 4; - - // The order to apply to the query results (if empty, order is unspecified). - repeated PropertyOrder order = 5; - - // The properties to make distinct. The query results will contain the first - // result for each distinct combination of values for the given properties - // (if empty, all results are returned). - repeated PropertyReference distinct_on = 6; - - // A starting point for the query results. Query cursors are - // returned in query result batches and - // [can only be used to continue the same query](https://cloud.google.com/datastore/docs/concepts/queries#cursors_limits_and_offsets). - bytes start_cursor = 7; - - // An ending point for the query results. Query cursors are - // returned in query result batches and - // [can only be used to limit the same query](https://cloud.google.com/datastore/docs/concepts/queries#cursors_limits_and_offsets). - bytes end_cursor = 8; - - // The number of results to skip. Applies before limit, but after all other - // constraints. Optional. Must be >= 0 if specified. - int32 offset = 10; - - // The maximum number of results to return. Applies after all other - // constraints. Optional. - // Unspecified is interpreted as no limit. - // Must be >= 0 if specified. - google.protobuf.Int32Value limit = 12; -} - -// A representation of a kind. -message KindExpression { - // The name of the kind. - string name = 1; -} - -// A reference to a property relative to the kind expressions. -message PropertyReference { - // The name of the property. - // If name includes "."s, it may be interpreted as a property name path. - string name = 2; -} - -// A representation of a property in a projection. -message Projection { - // The property to project. - PropertyReference property = 1; -} - -// The desired order for a specific property. -message PropertyOrder { - // The sort direction. - enum Direction { - // Unspecified. This value must not be used. - DIRECTION_UNSPECIFIED = 0; - - // Ascending. - ASCENDING = 1; - - // Descending. - DESCENDING = 2; - } - - // The property to order by. - PropertyReference property = 1; - - // The direction to order by. Defaults to `ASCENDING`. - Direction direction = 2; -} - -// A holder for any type of filter. -message Filter { - // The type of filter. - oneof filter_type { - // A composite filter. - CompositeFilter composite_filter = 1; - - // A filter on a property. - PropertyFilter property_filter = 2; - } -} - -// A filter that merges multiple other filters using the given operator. -message CompositeFilter { - // A composite filter operator. - enum Operator { - // Unspecified. This value must not be used. - OPERATOR_UNSPECIFIED = 0; - - // The results are required to satisfy each of the combined filters. - AND = 1; - } - - // The operator for combining multiple filters. - Operator op = 1; - - // The list of filters to combine. - // Must contain at least one filter. - repeated Filter filters = 2; -} - -// A filter on a specific property. -message PropertyFilter { - // A property filter operator. - enum Operator { - // Unspecified. This value must not be used. - OPERATOR_UNSPECIFIED = 0; - - // Less than. - LESS_THAN = 1; - - // Less than or equal. - LESS_THAN_OR_EQUAL = 2; - - // Greater than. - GREATER_THAN = 3; - - // Greater than or equal. - GREATER_THAN_OR_EQUAL = 4; - - // Equal. - EQUAL = 5; - - // Has ancestor. - HAS_ANCESTOR = 11; - } - - // The property to filter by. - PropertyReference property = 1; - - // The operator to filter by. - Operator op = 2; - - // The value to compare the property to. - Value value = 3; -} - -// A [GQL query](https://cloud.google.com/datastore/docs/apis/gql/gql_reference). -message GqlQuery { - // A string of the format described - // [here](https://cloud.google.com/datastore/docs/apis/gql/gql_reference). - string query_string = 1; - - // When false, the query string must not contain any literals and instead must - // bind all values. For example, - // `SELECT * FROM Kind WHERE a = 'string literal'` is not allowed, while - // `SELECT * FROM Kind WHERE a = @value` is. - bool allow_literals = 2; - - // For each non-reserved named binding site in the query string, there must be - // a named parameter with that name, but not necessarily the inverse. - // - // Key must match regex `[A-Za-z_$][A-Za-z_$0-9]*`, must not match regex - // `__.*__`, and must not be `""`. - map named_bindings = 5; - - // Numbered binding site @1 references the first numbered parameter, - // effectively using 1-based indexing, rather than the usual 0. - // - // For each binding site numbered i in `query_string`, there must be an i-th - // numbered parameter. The inverse must also be true. - repeated GqlQueryParameter positional_bindings = 4; -} - -// A binding parameter for a GQL query. -message GqlQueryParameter { - // The type of parameter. - oneof parameter_type { - // A value parameter. - Value value = 2; - - // A query cursor. Query cursors are returned in query - // result batches. - bytes cursor = 3; - } -} - -// A batch of results produced by a query. -message QueryResultBatch { - // The possible values for the `more_results` field. - enum MoreResultsType { - // Unspecified. This value is never used. - MORE_RESULTS_TYPE_UNSPECIFIED = 0; - - // There may be additional batches to fetch from this query. - NOT_FINISHED = 1; - - // The query is finished, but there may be more results after the limit. - MORE_RESULTS_AFTER_LIMIT = 2; - - // The query is finished, but there may be more results after the end - // cursor. - MORE_RESULTS_AFTER_CURSOR = 4; - - // The query has been exhausted. - NO_MORE_RESULTS = 3; - } - - // The number of results skipped, typically because of an offset. - int32 skipped_results = 6; - - // A cursor that points to the position after the last skipped result. - // Will be set when `skipped_results` != 0. - bytes skipped_cursor = 3; - - // The result type for every entity in `entity_results`. - EntityResult.ResultType entity_result_type = 1; - - // The results for this batch. - repeated EntityResult entity_results = 2; - - // A cursor that points to the position after the last result in the batch. - bytes end_cursor = 4; - - // The state of the query after the current batch. - MoreResultsType more_results = 5; - - // The version number of the snapshot this batch was returned from. - // This applies to the range of results from the query's `start_cursor` (or - // the beginning of the query if no cursor was given) to this batch's - // `end_cursor` (not the query's `end_cursor`). - // - // In a single transaction, subsequent query result batches for the same query - // can have a greater snapshot version number. Each batch's snapshot version - // is valid for all preceding batches. - int64 snapshot_version = 7; -} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_generated/datastore_grpc_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore/_generated/datastore_grpc_pb2.py deleted file mode 100644 index beea35710c6c..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore/_generated/datastore_grpc_pb2.py +++ /dev/null @@ -1,301 +0,0 @@ -# BEGIN: Imports from datastore_pb2 -from google.cloud.datastore._generated.datastore_pb2 import AllocateIdsRequest -from google.cloud.datastore._generated.datastore_pb2 import AllocateIdsResponse -from google.cloud.datastore._generated.datastore_pb2 import BeginTransactionRequest -from google.cloud.datastore._generated.datastore_pb2 import BeginTransactionResponse -from google.cloud.datastore._generated.datastore_pb2 import CommitRequest -from google.cloud.datastore._generated.datastore_pb2 import CommitResponse -from google.cloud.datastore._generated.datastore_pb2 import LookupRequest -from google.cloud.datastore._generated.datastore_pb2 import LookupResponse -from google.cloud.datastore._generated.datastore_pb2 import Mutation -from google.cloud.datastore._generated.datastore_pb2 import MutationResult -from google.cloud.datastore._generated.datastore_pb2 import ReadOptions -from google.cloud.datastore._generated.datastore_pb2 import RollbackRequest -from google.cloud.datastore._generated.datastore_pb2 import RollbackResponse -from google.cloud.datastore._generated.datastore_pb2 import RunQueryRequest -from google.cloud.datastore._generated.datastore_pb2 import RunQueryResponse -# END: Imports from datastore_pb2 -import grpc -from grpc.beta import implementations as beta_implementations -from grpc.beta import interfaces as beta_interfaces -from grpc.framework.common import cardinality -from grpc.framework.interfaces.face import utilities as face_utilities - - -class DatastoreStub(object): - """Each RPC normalizes the partition IDs of the keys in its input entities, - and always returns entities with keys with normalized partition IDs. - This applies to all keys and entities, including those in values, except keys - with both an empty path and an empty or unset partition ID. Normalization of - input keys sets the project ID (if not already set) to the project ID from - the request. - - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.Lookup = channel.unary_unary( - '/google.datastore.v1.Datastore/Lookup', - request_serializer=LookupRequest.SerializeToString, - response_deserializer=LookupResponse.FromString, - ) - self.RunQuery = channel.unary_unary( - '/google.datastore.v1.Datastore/RunQuery', - request_serializer=RunQueryRequest.SerializeToString, - response_deserializer=RunQueryResponse.FromString, - ) - self.BeginTransaction = channel.unary_unary( - '/google.datastore.v1.Datastore/BeginTransaction', - request_serializer=BeginTransactionRequest.SerializeToString, - response_deserializer=BeginTransactionResponse.FromString, - ) - self.Commit = channel.unary_unary( - '/google.datastore.v1.Datastore/Commit', - request_serializer=CommitRequest.SerializeToString, - response_deserializer=CommitResponse.FromString, - ) - self.Rollback = channel.unary_unary( - '/google.datastore.v1.Datastore/Rollback', - request_serializer=RollbackRequest.SerializeToString, - response_deserializer=RollbackResponse.FromString, - ) - self.AllocateIds = channel.unary_unary( - '/google.datastore.v1.Datastore/AllocateIds', - request_serializer=AllocateIdsRequest.SerializeToString, - response_deserializer=AllocateIdsResponse.FromString, - ) - - -class DatastoreServicer(object): - """Each RPC normalizes the partition IDs of the keys in its input entities, - and always returns entities with keys with normalized partition IDs. - This applies to all keys and entities, including those in values, except keys - with both an empty path and an empty or unset partition ID. Normalization of - input keys sets the project ID (if not already set) to the project ID from - the request. - - """ - - def Lookup(self, request, context): - """Looks up entities by key. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def RunQuery(self, request, context): - """Queries for entities. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def BeginTransaction(self, request, context): - """Begins a new transaction. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def Commit(self, request, context): - """Commits a transaction, optionally creating, deleting or modifying some - entities. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def Rollback(self, request, context): - """Rolls back a transaction. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def AllocateIds(self, request, context): - """Allocates IDs for the given keys, which is useful for referencing an entity - before it is inserted. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_DatastoreServicer_to_server(servicer, server): - rpc_method_handlers = { - 'Lookup': grpc.unary_unary_rpc_method_handler( - servicer.Lookup, - request_deserializer=LookupRequest.FromString, - response_serializer=LookupResponse.SerializeToString, - ), - 'RunQuery': grpc.unary_unary_rpc_method_handler( - servicer.RunQuery, - request_deserializer=RunQueryRequest.FromString, - response_serializer=RunQueryResponse.SerializeToString, - ), - 'BeginTransaction': grpc.unary_unary_rpc_method_handler( - servicer.BeginTransaction, - request_deserializer=BeginTransactionRequest.FromString, - response_serializer=BeginTransactionResponse.SerializeToString, - ), - 'Commit': grpc.unary_unary_rpc_method_handler( - servicer.Commit, - request_deserializer=CommitRequest.FromString, - response_serializer=CommitResponse.SerializeToString, - ), - 'Rollback': grpc.unary_unary_rpc_method_handler( - servicer.Rollback, - request_deserializer=RollbackRequest.FromString, - response_serializer=RollbackResponse.SerializeToString, - ), - 'AllocateIds': grpc.unary_unary_rpc_method_handler( - servicer.AllocateIds, - request_deserializer=AllocateIdsRequest.FromString, - response_serializer=AllocateIdsResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.datastore.v1.Datastore', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) - - -class BetaDatastoreServicer(object): - """Each RPC normalizes the partition IDs of the keys in its input entities, - and always returns entities with keys with normalized partition IDs. - This applies to all keys and entities, including those in values, except keys - with both an empty path and an empty or unset partition ID. Normalization of - input keys sets the project ID (if not already set) to the project ID from - the request. - - """ - def Lookup(self, request, context): - """Looks up entities by key. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def RunQuery(self, request, context): - """Queries for entities. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def BeginTransaction(self, request, context): - """Begins a new transaction. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def Commit(self, request, context): - """Commits a transaction, optionally creating, deleting or modifying some - entities. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def Rollback(self, request, context): - """Rolls back a transaction. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def AllocateIds(self, request, context): - """Allocates IDs for the given keys, which is useful for referencing an entity - before it is inserted. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - - -class BetaDatastoreStub(object): - """Each RPC normalizes the partition IDs of the keys in its input entities, - and always returns entities with keys with normalized partition IDs. - This applies to all keys and entities, including those in values, except keys - with both an empty path and an empty or unset partition ID. Normalization of - input keys sets the project ID (if not already set) to the project ID from - the request. - - """ - def Lookup(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Looks up entities by key. - """ - raise NotImplementedError() - Lookup.future = None - def RunQuery(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Queries for entities. - """ - raise NotImplementedError() - RunQuery.future = None - def BeginTransaction(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Begins a new transaction. - """ - raise NotImplementedError() - BeginTransaction.future = None - def Commit(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Commits a transaction, optionally creating, deleting or modifying some - entities. - """ - raise NotImplementedError() - Commit.future = None - def Rollback(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Rolls back a transaction. - """ - raise NotImplementedError() - Rollback.future = None - def AllocateIds(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Allocates IDs for the given keys, which is useful for referencing an entity - before it is inserted. - """ - raise NotImplementedError() - AllocateIds.future = None - - -def beta_create_Datastore_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): - request_deserializers = { - ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsRequest.FromString, - ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionRequest.FromString, - ('google.datastore.v1.Datastore', 'Commit'): CommitRequest.FromString, - ('google.datastore.v1.Datastore', 'Lookup'): LookupRequest.FromString, - ('google.datastore.v1.Datastore', 'Rollback'): RollbackRequest.FromString, - ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryRequest.FromString, - } - response_serializers = { - ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsResponse.SerializeToString, - ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionResponse.SerializeToString, - ('google.datastore.v1.Datastore', 'Commit'): CommitResponse.SerializeToString, - ('google.datastore.v1.Datastore', 'Lookup'): LookupResponse.SerializeToString, - ('google.datastore.v1.Datastore', 'Rollback'): RollbackResponse.SerializeToString, - ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryResponse.SerializeToString, - } - method_implementations = { - ('google.datastore.v1.Datastore', 'AllocateIds'): face_utilities.unary_unary_inline(servicer.AllocateIds), - ('google.datastore.v1.Datastore', 'BeginTransaction'): face_utilities.unary_unary_inline(servicer.BeginTransaction), - ('google.datastore.v1.Datastore', 'Commit'): face_utilities.unary_unary_inline(servicer.Commit), - ('google.datastore.v1.Datastore', 'Lookup'): face_utilities.unary_unary_inline(servicer.Lookup), - ('google.datastore.v1.Datastore', 'Rollback'): face_utilities.unary_unary_inline(servicer.Rollback), - ('google.datastore.v1.Datastore', 'RunQuery'): face_utilities.unary_unary_inline(servicer.RunQuery), - } - server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) - return beta_implementations.server(method_implementations, options=server_options) - - -def beta_create_Datastore_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): - request_serializers = { - ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsRequest.SerializeToString, - ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionRequest.SerializeToString, - ('google.datastore.v1.Datastore', 'Commit'): CommitRequest.SerializeToString, - ('google.datastore.v1.Datastore', 'Lookup'): LookupRequest.SerializeToString, - ('google.datastore.v1.Datastore', 'Rollback'): RollbackRequest.SerializeToString, - ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryRequest.SerializeToString, - } - response_deserializers = { - ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsResponse.FromString, - ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionResponse.FromString, - ('google.datastore.v1.Datastore', 'Commit'): CommitResponse.FromString, - ('google.datastore.v1.Datastore', 'Lookup'): LookupResponse.FromString, - ('google.datastore.v1.Datastore', 'Rollback'): RollbackResponse.FromString, - ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryResponse.FromString, - } - cardinalities = { - 'AllocateIds': cardinality.Cardinality.UNARY_UNARY, - 'BeginTransaction': cardinality.Cardinality.UNARY_UNARY, - 'Commit': cardinality.Cardinality.UNARY_UNARY, - 'Lookup': cardinality.Cardinality.UNARY_UNARY, - 'Rollback': cardinality.Cardinality.UNARY_UNARY, - 'RunQuery': cardinality.Cardinality.UNARY_UNARY, - } - stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) - return beta_implementations.dynamic_stub(channel, 'google.datastore.v1.Datastore', cardinalities, options=stub_options) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_generated/datastore_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore/_generated/datastore_pb2.py deleted file mode 100644 index f7a321a5c6b8..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore/_generated/datastore_pb2.py +++ /dev/null @@ -1,891 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/datastore/v1/datastore.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.datastore._generated import entity_pb2 as google_dot_datastore_dot_v1_dot_entity__pb2 -from google.cloud.datastore._generated import query_pb2 as google_dot_datastore_dot_v1_dot_query__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='google/datastore/v1/datastore.proto', - package='google.datastore.v1', - syntax='proto3', - serialized_pb=_b('\n#google/datastore/v1/datastore.proto\x12\x13google.datastore.v1\x1a\x1cgoogle/api/annotations.proto\x1a google/datastore/v1/entity.proto\x1a\x1fgoogle/datastore/v1/query.proto\"\x83\x01\n\rLookupRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x36\n\x0cread_options\x18\x01 \x01(\x0b\x32 .google.datastore.v1.ReadOptions\x12&\n\x04keys\x18\x03 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\xa2\x01\n\x0eLookupResponse\x12\x30\n\x05\x66ound\x18\x01 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x32\n\x07missing\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12*\n\x08\x64\x65\x66\x65rred\x18\x03 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\x84\x02\n\x0fRunQueryRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x36\n\x0cpartition_id\x18\x02 \x01(\x0b\x32 .google.datastore.v1.PartitionId\x12\x36\n\x0cread_options\x18\x01 \x01(\x0b\x32 .google.datastore.v1.ReadOptions\x12+\n\x05query\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.QueryH\x00\x12\x32\n\tgql_query\x18\x07 \x01(\x0b\x32\x1d.google.datastore.v1.GqlQueryH\x00\x42\x0c\n\nquery_type\"s\n\x10RunQueryResponse\x12\x34\n\x05\x62\x61tch\x18\x01 \x01(\x0b\x32%.google.datastore.v1.QueryResultBatch\x12)\n\x05query\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.Query\"-\n\x17\x42\x65ginTransactionRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\":\n\x0fRollbackRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\"\x12\n\x10RollbackResponse\"\x83\x02\n\rCommitRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x35\n\x04mode\x18\x05 \x01(\x0e\x32\'.google.datastore.v1.CommitRequest.Mode\x12\x15\n\x0btransaction\x18\x01 \x01(\x0cH\x00\x12\x30\n\tmutations\x18\x06 \x03(\x0b\x32\x1d.google.datastore.v1.Mutation\"F\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\x11\n\rTRANSACTIONAL\x10\x01\x12\x15\n\x11NON_TRANSACTIONAL\x10\x02\x42\x16\n\x14transaction_selector\"f\n\x0e\x43ommitResponse\x12=\n\x10mutation_results\x18\x03 \x03(\x0b\x32#.google.datastore.v1.MutationResult\x12\x15\n\rindex_updates\x18\x04 \x01(\x05\"P\n\x12\x41llocateIdsRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"=\n\x13\x41llocateIdsResponse\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\x87\x02\n\x08Mutation\x12-\n\x06insert\x18\x04 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12-\n\x06update\x18\x05 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12-\n\x06upsert\x18\x06 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12*\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x18.google.datastore.v1.KeyH\x00\x12\x16\n\x0c\x62\x61se_version\x18\x08 \x01(\x03H\x01\x42\x0b\n\toperationB\x1d\n\x1b\x63onflict_detection_strategy\"c\n\x0eMutationResult\x12%\n\x03key\x18\x03 \x01(\x0b\x32\x18.google.datastore.v1.Key\x12\x0f\n\x07version\x18\x04 \x01(\x03\x12\x19\n\x11\x63onflict_detected\x18\x05 \x01(\x08\"\xd5\x01\n\x0bReadOptions\x12L\n\x10read_consistency\x18\x01 \x01(\x0e\x32\x30.google.datastore.v1.ReadOptions.ReadConsistencyH\x00\x12\x15\n\x0btransaction\x18\x02 \x01(\x0cH\x00\"M\n\x0fReadConsistency\x12 \n\x1cREAD_CONSISTENCY_UNSPECIFIED\x10\x00\x12\n\n\x06STRONG\x10\x01\x12\x0c\n\x08\x45VENTUAL\x10\x02\x42\x12\n\x10\x63onsistency_type2\xdb\x06\n\tDatastore\x12~\n\x06Lookup\x12\".google.datastore.v1.LookupRequest\x1a#.google.datastore.v1.LookupResponse\"+\x82\xd3\xe4\x93\x02%\" /v1/projects/{project_id}:lookup:\x01*\x12\x86\x01\n\x08RunQuery\x12$.google.datastore.v1.RunQueryRequest\x1a%.google.datastore.v1.RunQueryResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1/projects/{project_id}:runQuery:\x01*\x12\xa6\x01\n\x10\x42\x65ginTransaction\x12,.google.datastore.v1.BeginTransactionRequest\x1a-.google.datastore.v1.BeginTransactionResponse\"5\x82\xd3\xe4\x93\x02/\"*/v1/projects/{project_id}:beginTransaction:\x01*\x12~\n\x06\x43ommit\x12\".google.datastore.v1.CommitRequest\x1a#.google.datastore.v1.CommitResponse\"+\x82\xd3\xe4\x93\x02%\" /v1/projects/{project_id}:commit:\x01*\x12\x86\x01\n\x08Rollback\x12$.google.datastore.v1.RollbackRequest\x1a%.google.datastore.v1.RollbackResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1/projects/{project_id}:rollback:\x01*\x12\x92\x01\n\x0b\x41llocateIds\x12\'.google.datastore.v1.AllocateIdsRequest\x1a(.google.datastore.v1.AllocateIdsResponse\"0\x82\xd3\xe4\x93\x02*\"%/v1/projects/{project_id}:allocateIds:\x01*B+\n\x17\x63om.google.datastore.v1B\x0e\x44\x61tastoreProtoP\x01\x62\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_datastore_dot_v1_dot_entity__pb2.DESCRIPTOR,google_dot_datastore_dot_v1_dot_query__pb2.DESCRIPTOR,]) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - - - -_COMMITREQUEST_MODE = _descriptor.EnumDescriptor( - name='Mode', - full_name='google.datastore.v1.CommitRequest.Mode', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='MODE_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='TRANSACTIONAL', index=1, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='NON_TRANSACTIONAL', index=2, number=2, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=1178, - serialized_end=1248, -) -_sym_db.RegisterEnumDescriptor(_COMMITREQUEST_MODE) - -_READOPTIONS_READCONSISTENCY = _descriptor.EnumDescriptor( - name='ReadConsistency', - full_name='google.datastore.v1.ReadOptions.ReadConsistency', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='READ_CONSISTENCY_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='STRONG', index=1, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='EVENTUAL', index=2, number=2, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=2007, - serialized_end=2084, -) -_sym_db.RegisterEnumDescriptor(_READOPTIONS_READCONSISTENCY) - - -_LOOKUPREQUEST = _descriptor.Descriptor( - name='LookupRequest', - full_name='google.datastore.v1.LookupRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='project_id', full_name='google.datastore.v1.LookupRequest.project_id', index=0, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='read_options', full_name='google.datastore.v1.LookupRequest.read_options', index=1, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='keys', full_name='google.datastore.v1.LookupRequest.keys', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=158, - serialized_end=289, -) - - -_LOOKUPRESPONSE = _descriptor.Descriptor( - name='LookupResponse', - full_name='google.datastore.v1.LookupResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='found', full_name='google.datastore.v1.LookupResponse.found', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='missing', full_name='google.datastore.v1.LookupResponse.missing', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='deferred', full_name='google.datastore.v1.LookupResponse.deferred', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=292, - serialized_end=454, -) - - -_RUNQUERYREQUEST = _descriptor.Descriptor( - name='RunQueryRequest', - full_name='google.datastore.v1.RunQueryRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='project_id', full_name='google.datastore.v1.RunQueryRequest.project_id', index=0, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='partition_id', full_name='google.datastore.v1.RunQueryRequest.partition_id', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='read_options', full_name='google.datastore.v1.RunQueryRequest.read_options', index=2, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='query', full_name='google.datastore.v1.RunQueryRequest.query', index=3, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='gql_query', full_name='google.datastore.v1.RunQueryRequest.gql_query', index=4, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='query_type', full_name='google.datastore.v1.RunQueryRequest.query_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=457, - serialized_end=717, -) - - -_RUNQUERYRESPONSE = _descriptor.Descriptor( - name='RunQueryResponse', - full_name='google.datastore.v1.RunQueryResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='batch', full_name='google.datastore.v1.RunQueryResponse.batch', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='query', full_name='google.datastore.v1.RunQueryResponse.query', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=719, - serialized_end=834, -) - - -_BEGINTRANSACTIONREQUEST = _descriptor.Descriptor( - name='BeginTransactionRequest', - full_name='google.datastore.v1.BeginTransactionRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='project_id', full_name='google.datastore.v1.BeginTransactionRequest.project_id', index=0, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=836, - serialized_end=881, -) - - -_BEGINTRANSACTIONRESPONSE = _descriptor.Descriptor( - name='BeginTransactionResponse', - full_name='google.datastore.v1.BeginTransactionResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='transaction', full_name='google.datastore.v1.BeginTransactionResponse.transaction', index=0, - number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=883, - serialized_end=930, -) - - -_ROLLBACKREQUEST = _descriptor.Descriptor( - name='RollbackRequest', - full_name='google.datastore.v1.RollbackRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='project_id', full_name='google.datastore.v1.RollbackRequest.project_id', index=0, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='transaction', full_name='google.datastore.v1.RollbackRequest.transaction', index=1, - number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=932, - serialized_end=990, -) - - -_ROLLBACKRESPONSE = _descriptor.Descriptor( - name='RollbackResponse', - full_name='google.datastore.v1.RollbackResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=992, - serialized_end=1010, -) - - -_COMMITREQUEST = _descriptor.Descriptor( - name='CommitRequest', - full_name='google.datastore.v1.CommitRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='project_id', full_name='google.datastore.v1.CommitRequest.project_id', index=0, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='mode', full_name='google.datastore.v1.CommitRequest.mode', index=1, - number=5, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='transaction', full_name='google.datastore.v1.CommitRequest.transaction', index=2, - number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='mutations', full_name='google.datastore.v1.CommitRequest.mutations', index=3, - number=6, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _COMMITREQUEST_MODE, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='transaction_selector', full_name='google.datastore.v1.CommitRequest.transaction_selector', - index=0, containing_type=None, fields=[]), - ], - serialized_start=1013, - serialized_end=1272, -) - - -_COMMITRESPONSE = _descriptor.Descriptor( - name='CommitResponse', - full_name='google.datastore.v1.CommitResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='mutation_results', full_name='google.datastore.v1.CommitResponse.mutation_results', index=0, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='index_updates', full_name='google.datastore.v1.CommitResponse.index_updates', index=1, - number=4, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1274, - serialized_end=1376, -) - - -_ALLOCATEIDSREQUEST = _descriptor.Descriptor( - name='AllocateIdsRequest', - full_name='google.datastore.v1.AllocateIdsRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='project_id', full_name='google.datastore.v1.AllocateIdsRequest.project_id', index=0, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='keys', full_name='google.datastore.v1.AllocateIdsRequest.keys', index=1, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1378, - serialized_end=1458, -) - - -_ALLOCATEIDSRESPONSE = _descriptor.Descriptor( - name='AllocateIdsResponse', - full_name='google.datastore.v1.AllocateIdsResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='keys', full_name='google.datastore.v1.AllocateIdsResponse.keys', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1460, - serialized_end=1521, -) - - -_MUTATION = _descriptor.Descriptor( - name='Mutation', - full_name='google.datastore.v1.Mutation', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='insert', full_name='google.datastore.v1.Mutation.insert', index=0, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='update', full_name='google.datastore.v1.Mutation.update', index=1, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='upsert', full_name='google.datastore.v1.Mutation.upsert', index=2, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='delete', full_name='google.datastore.v1.Mutation.delete', index=3, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='base_version', full_name='google.datastore.v1.Mutation.base_version', index=4, - number=8, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='operation', full_name='google.datastore.v1.Mutation.operation', - index=0, containing_type=None, fields=[]), - _descriptor.OneofDescriptor( - name='conflict_detection_strategy', full_name='google.datastore.v1.Mutation.conflict_detection_strategy', - index=1, containing_type=None, fields=[]), - ], - serialized_start=1524, - serialized_end=1787, -) - - -_MUTATIONRESULT = _descriptor.Descriptor( - name='MutationResult', - full_name='google.datastore.v1.MutationResult', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.datastore.v1.MutationResult.key', index=0, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='version', full_name='google.datastore.v1.MutationResult.version', index=1, - number=4, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='conflict_detected', full_name='google.datastore.v1.MutationResult.conflict_detected', index=2, - number=5, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1789, - serialized_end=1888, -) - - -_READOPTIONS = _descriptor.Descriptor( - name='ReadOptions', - full_name='google.datastore.v1.ReadOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='read_consistency', full_name='google.datastore.v1.ReadOptions.read_consistency', index=0, - number=1, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='transaction', full_name='google.datastore.v1.ReadOptions.transaction', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _READOPTIONS_READCONSISTENCY, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='consistency_type', full_name='google.datastore.v1.ReadOptions.consistency_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=1891, - serialized_end=2104, -) - -_LOOKUPREQUEST.fields_by_name['read_options'].message_type = _READOPTIONS -_LOOKUPREQUEST.fields_by_name['keys'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._KEY -_LOOKUPRESPONSE.fields_by_name['found'].message_type = google_dot_datastore_dot_v1_dot_query__pb2._ENTITYRESULT -_LOOKUPRESPONSE.fields_by_name['missing'].message_type = google_dot_datastore_dot_v1_dot_query__pb2._ENTITYRESULT -_LOOKUPRESPONSE.fields_by_name['deferred'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._KEY -_RUNQUERYREQUEST.fields_by_name['partition_id'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._PARTITIONID -_RUNQUERYREQUEST.fields_by_name['read_options'].message_type = _READOPTIONS -_RUNQUERYREQUEST.fields_by_name['query'].message_type = google_dot_datastore_dot_v1_dot_query__pb2._QUERY -_RUNQUERYREQUEST.fields_by_name['gql_query'].message_type = google_dot_datastore_dot_v1_dot_query__pb2._GQLQUERY -_RUNQUERYREQUEST.oneofs_by_name['query_type'].fields.append( - _RUNQUERYREQUEST.fields_by_name['query']) -_RUNQUERYREQUEST.fields_by_name['query'].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name['query_type'] -_RUNQUERYREQUEST.oneofs_by_name['query_type'].fields.append( - _RUNQUERYREQUEST.fields_by_name['gql_query']) -_RUNQUERYREQUEST.fields_by_name['gql_query'].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name['query_type'] -_RUNQUERYRESPONSE.fields_by_name['batch'].message_type = google_dot_datastore_dot_v1_dot_query__pb2._QUERYRESULTBATCH -_RUNQUERYRESPONSE.fields_by_name['query'].message_type = google_dot_datastore_dot_v1_dot_query__pb2._QUERY -_COMMITREQUEST.fields_by_name['mode'].enum_type = _COMMITREQUEST_MODE -_COMMITREQUEST.fields_by_name['mutations'].message_type = _MUTATION -_COMMITREQUEST_MODE.containing_type = _COMMITREQUEST -_COMMITREQUEST.oneofs_by_name['transaction_selector'].fields.append( - _COMMITREQUEST.fields_by_name['transaction']) -_COMMITREQUEST.fields_by_name['transaction'].containing_oneof = _COMMITREQUEST.oneofs_by_name['transaction_selector'] -_COMMITRESPONSE.fields_by_name['mutation_results'].message_type = _MUTATIONRESULT -_ALLOCATEIDSREQUEST.fields_by_name['keys'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._KEY -_ALLOCATEIDSRESPONSE.fields_by_name['keys'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._KEY -_MUTATION.fields_by_name['insert'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._ENTITY -_MUTATION.fields_by_name['update'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._ENTITY -_MUTATION.fields_by_name['upsert'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._ENTITY -_MUTATION.fields_by_name['delete'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._KEY -_MUTATION.oneofs_by_name['operation'].fields.append( - _MUTATION.fields_by_name['insert']) -_MUTATION.fields_by_name['insert'].containing_oneof = _MUTATION.oneofs_by_name['operation'] -_MUTATION.oneofs_by_name['operation'].fields.append( - _MUTATION.fields_by_name['update']) -_MUTATION.fields_by_name['update'].containing_oneof = _MUTATION.oneofs_by_name['operation'] -_MUTATION.oneofs_by_name['operation'].fields.append( - _MUTATION.fields_by_name['upsert']) -_MUTATION.fields_by_name['upsert'].containing_oneof = _MUTATION.oneofs_by_name['operation'] -_MUTATION.oneofs_by_name['operation'].fields.append( - _MUTATION.fields_by_name['delete']) -_MUTATION.fields_by_name['delete'].containing_oneof = _MUTATION.oneofs_by_name['operation'] -_MUTATION.oneofs_by_name['conflict_detection_strategy'].fields.append( - _MUTATION.fields_by_name['base_version']) -_MUTATION.fields_by_name['base_version'].containing_oneof = _MUTATION.oneofs_by_name['conflict_detection_strategy'] -_MUTATIONRESULT.fields_by_name['key'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._KEY -_READOPTIONS.fields_by_name['read_consistency'].enum_type = _READOPTIONS_READCONSISTENCY -_READOPTIONS_READCONSISTENCY.containing_type = _READOPTIONS -_READOPTIONS.oneofs_by_name['consistency_type'].fields.append( - _READOPTIONS.fields_by_name['read_consistency']) -_READOPTIONS.fields_by_name['read_consistency'].containing_oneof = _READOPTIONS.oneofs_by_name['consistency_type'] -_READOPTIONS.oneofs_by_name['consistency_type'].fields.append( - _READOPTIONS.fields_by_name['transaction']) -_READOPTIONS.fields_by_name['transaction'].containing_oneof = _READOPTIONS.oneofs_by_name['consistency_type'] -DESCRIPTOR.message_types_by_name['LookupRequest'] = _LOOKUPREQUEST -DESCRIPTOR.message_types_by_name['LookupResponse'] = _LOOKUPRESPONSE -DESCRIPTOR.message_types_by_name['RunQueryRequest'] = _RUNQUERYREQUEST -DESCRIPTOR.message_types_by_name['RunQueryResponse'] = _RUNQUERYRESPONSE -DESCRIPTOR.message_types_by_name['BeginTransactionRequest'] = _BEGINTRANSACTIONREQUEST -DESCRIPTOR.message_types_by_name['BeginTransactionResponse'] = _BEGINTRANSACTIONRESPONSE -DESCRIPTOR.message_types_by_name['RollbackRequest'] = _ROLLBACKREQUEST -DESCRIPTOR.message_types_by_name['RollbackResponse'] = _ROLLBACKRESPONSE -DESCRIPTOR.message_types_by_name['CommitRequest'] = _COMMITREQUEST -DESCRIPTOR.message_types_by_name['CommitResponse'] = _COMMITRESPONSE -DESCRIPTOR.message_types_by_name['AllocateIdsRequest'] = _ALLOCATEIDSREQUEST -DESCRIPTOR.message_types_by_name['AllocateIdsResponse'] = _ALLOCATEIDSRESPONSE -DESCRIPTOR.message_types_by_name['Mutation'] = _MUTATION -DESCRIPTOR.message_types_by_name['MutationResult'] = _MUTATIONRESULT -DESCRIPTOR.message_types_by_name['ReadOptions'] = _READOPTIONS - -LookupRequest = _reflection.GeneratedProtocolMessageType('LookupRequest', (_message.Message,), dict( - DESCRIPTOR = _LOOKUPREQUEST, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.LookupRequest) - )) -_sym_db.RegisterMessage(LookupRequest) - -LookupResponse = _reflection.GeneratedProtocolMessageType('LookupResponse', (_message.Message,), dict( - DESCRIPTOR = _LOOKUPRESPONSE, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.LookupResponse) - )) -_sym_db.RegisterMessage(LookupResponse) - -RunQueryRequest = _reflection.GeneratedProtocolMessageType('RunQueryRequest', (_message.Message,), dict( - DESCRIPTOR = _RUNQUERYREQUEST, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.RunQueryRequest) - )) -_sym_db.RegisterMessage(RunQueryRequest) - -RunQueryResponse = _reflection.GeneratedProtocolMessageType('RunQueryResponse', (_message.Message,), dict( - DESCRIPTOR = _RUNQUERYRESPONSE, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.RunQueryResponse) - )) -_sym_db.RegisterMessage(RunQueryResponse) - -BeginTransactionRequest = _reflection.GeneratedProtocolMessageType('BeginTransactionRequest', (_message.Message,), dict( - DESCRIPTOR = _BEGINTRANSACTIONREQUEST, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.BeginTransactionRequest) - )) -_sym_db.RegisterMessage(BeginTransactionRequest) - -BeginTransactionResponse = _reflection.GeneratedProtocolMessageType('BeginTransactionResponse', (_message.Message,), dict( - DESCRIPTOR = _BEGINTRANSACTIONRESPONSE, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.BeginTransactionResponse) - )) -_sym_db.RegisterMessage(BeginTransactionResponse) - -RollbackRequest = _reflection.GeneratedProtocolMessageType('RollbackRequest', (_message.Message,), dict( - DESCRIPTOR = _ROLLBACKREQUEST, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.RollbackRequest) - )) -_sym_db.RegisterMessage(RollbackRequest) - -RollbackResponse = _reflection.GeneratedProtocolMessageType('RollbackResponse', (_message.Message,), dict( - DESCRIPTOR = _ROLLBACKRESPONSE, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.RollbackResponse) - )) -_sym_db.RegisterMessage(RollbackResponse) - -CommitRequest = _reflection.GeneratedProtocolMessageType('CommitRequest', (_message.Message,), dict( - DESCRIPTOR = _COMMITREQUEST, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.CommitRequest) - )) -_sym_db.RegisterMessage(CommitRequest) - -CommitResponse = _reflection.GeneratedProtocolMessageType('CommitResponse', (_message.Message,), dict( - DESCRIPTOR = _COMMITRESPONSE, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.CommitResponse) - )) -_sym_db.RegisterMessage(CommitResponse) - -AllocateIdsRequest = _reflection.GeneratedProtocolMessageType('AllocateIdsRequest', (_message.Message,), dict( - DESCRIPTOR = _ALLOCATEIDSREQUEST, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.AllocateIdsRequest) - )) -_sym_db.RegisterMessage(AllocateIdsRequest) - -AllocateIdsResponse = _reflection.GeneratedProtocolMessageType('AllocateIdsResponse', (_message.Message,), dict( - DESCRIPTOR = _ALLOCATEIDSRESPONSE, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.AllocateIdsResponse) - )) -_sym_db.RegisterMessage(AllocateIdsResponse) - -Mutation = _reflection.GeneratedProtocolMessageType('Mutation', (_message.Message,), dict( - DESCRIPTOR = _MUTATION, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.Mutation) - )) -_sym_db.RegisterMessage(Mutation) - -MutationResult = _reflection.GeneratedProtocolMessageType('MutationResult', (_message.Message,), dict( - DESCRIPTOR = _MUTATIONRESULT, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.MutationResult) - )) -_sym_db.RegisterMessage(MutationResult) - -ReadOptions = _reflection.GeneratedProtocolMessageType('ReadOptions', (_message.Message,), dict( - DESCRIPTOR = _READOPTIONS, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.ReadOptions) - )) -_sym_db.RegisterMessage(ReadOptions) - - -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.google.datastore.v1B\016DatastoreProtoP\001')) -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_generated/entity_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore/_generated/entity_pb2.py deleted file mode 100644 index 44d530bdb74a..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore/_generated/entity_pb2.py +++ /dev/null @@ -1,495 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/datastore/v1/entity.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='google/datastore/v1/entity.proto', - package='google.datastore.v1', - syntax='proto3', - serialized_pb=_b('\n google/datastore/v1/entity.proto\x12\x13google.datastore.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x18google/type/latlng.proto\"7\n\x0bPartitionId\x12\x12\n\nproject_id\x18\x02 \x01(\t\x12\x14\n\x0cnamespace_id\x18\x04 \x01(\t\"\xb7\x01\n\x03Key\x12\x36\n\x0cpartition_id\x18\x01 \x01(\x0b\x32 .google.datastore.v1.PartitionId\x12\x32\n\x04path\x18\x02 \x03(\x0b\x32$.google.datastore.v1.Key.PathElement\x1a\x44\n\x0bPathElement\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x0c\n\x02id\x18\x02 \x01(\x03H\x00\x12\x0e\n\x04name\x18\x03 \x01(\tH\x00\x42\t\n\x07id_type\"8\n\nArrayValue\x12*\n\x06values\x18\x01 \x03(\x0b\x32\x1a.google.datastore.v1.Value\"\xf1\x03\n\x05Value\x12\x30\n\nnull_value\x18\x0b \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x17\n\rboolean_value\x18\x01 \x01(\x08H\x00\x12\x17\n\rinteger_value\x18\x02 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x03 \x01(\x01H\x00\x12\x35\n\x0ftimestamp_value\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12-\n\tkey_value\x18\x05 \x01(\x0b\x32\x18.google.datastore.v1.KeyH\x00\x12\x16\n\x0cstring_value\x18\x11 \x01(\tH\x00\x12\x14\n\nblob_value\x18\x12 \x01(\x0cH\x00\x12.\n\x0fgeo_point_value\x18\x08 \x01(\x0b\x32\x13.google.type.LatLngH\x00\x12\x33\n\x0c\x65ntity_value\x18\x06 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12\x36\n\x0b\x61rray_value\x18\t \x01(\x0b\x32\x1f.google.datastore.v1.ArrayValueH\x00\x12\x0f\n\x07meaning\x18\x0e \x01(\x05\x12\x1c\n\x14\x65xclude_from_indexes\x18\x13 \x01(\x08\x42\x0c\n\nvalue_type\"\xbf\x01\n\x06\x45ntity\x12%\n\x03key\x18\x01 \x01(\x0b\x32\x18.google.datastore.v1.Key\x12?\n\nproperties\x18\x03 \x03(\x0b\x32+.google.datastore.v1.Entity.PropertiesEntry\x1aM\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.Value:\x02\x38\x01\x42(\n\x17\x63om.google.datastore.v1B\x0b\x45ntityProtoP\x01\x62\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_type_dot_latlng__pb2.DESCRIPTOR,]) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - - - - -_PARTITIONID = _descriptor.Descriptor( - name='PartitionId', - full_name='google.datastore.v1.PartitionId', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='project_id', full_name='google.datastore.v1.PartitionId.project_id', index=0, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='namespace_id', full_name='google.datastore.v1.PartitionId.namespace_id', index=1, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=176, - serialized_end=231, -) - - -_KEY_PATHELEMENT = _descriptor.Descriptor( - name='PathElement', - full_name='google.datastore.v1.Key.PathElement', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='kind', full_name='google.datastore.v1.Key.PathElement.kind', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='id', full_name='google.datastore.v1.Key.PathElement.id', index=1, - number=2, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='name', full_name='google.datastore.v1.Key.PathElement.name', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='id_type', full_name='google.datastore.v1.Key.PathElement.id_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=349, - serialized_end=417, -) - -_KEY = _descriptor.Descriptor( - name='Key', - full_name='google.datastore.v1.Key', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='partition_id', full_name='google.datastore.v1.Key.partition_id', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='path', full_name='google.datastore.v1.Key.path', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[_KEY_PATHELEMENT, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=234, - serialized_end=417, -) - - -_ARRAYVALUE = _descriptor.Descriptor( - name='ArrayValue', - full_name='google.datastore.v1.ArrayValue', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='values', full_name='google.datastore.v1.ArrayValue.values', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=419, - serialized_end=475, -) - - -_VALUE = _descriptor.Descriptor( - name='Value', - full_name='google.datastore.v1.Value', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='null_value', full_name='google.datastore.v1.Value.null_value', index=0, - number=11, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='boolean_value', full_name='google.datastore.v1.Value.boolean_value', index=1, - number=1, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='integer_value', full_name='google.datastore.v1.Value.integer_value', index=2, - number=2, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='double_value', full_name='google.datastore.v1.Value.double_value', index=3, - number=3, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='timestamp_value', full_name='google.datastore.v1.Value.timestamp_value', index=4, - number=10, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='key_value', full_name='google.datastore.v1.Value.key_value', index=5, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='string_value', full_name='google.datastore.v1.Value.string_value', index=6, - number=17, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='blob_value', full_name='google.datastore.v1.Value.blob_value', index=7, - number=18, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='geo_point_value', full_name='google.datastore.v1.Value.geo_point_value', index=8, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='entity_value', full_name='google.datastore.v1.Value.entity_value', index=9, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='array_value', full_name='google.datastore.v1.Value.array_value', index=10, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='meaning', full_name='google.datastore.v1.Value.meaning', index=11, - number=14, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='exclude_from_indexes', full_name='google.datastore.v1.Value.exclude_from_indexes', index=12, - number=19, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='value_type', full_name='google.datastore.v1.Value.value_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=478, - serialized_end=975, -) - - -_ENTITY_PROPERTIESENTRY = _descriptor.Descriptor( - name='PropertiesEntry', - full_name='google.datastore.v1.Entity.PropertiesEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.datastore.v1.Entity.PropertiesEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='value', full_name='google.datastore.v1.Entity.PropertiesEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1092, - serialized_end=1169, -) - -_ENTITY = _descriptor.Descriptor( - name='Entity', - full_name='google.datastore.v1.Entity', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.datastore.v1.Entity.key', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='properties', full_name='google.datastore.v1.Entity.properties', index=1, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[_ENTITY_PROPERTIESENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=978, - serialized_end=1169, -) - -_KEY_PATHELEMENT.containing_type = _KEY -_KEY_PATHELEMENT.oneofs_by_name['id_type'].fields.append( - _KEY_PATHELEMENT.fields_by_name['id']) -_KEY_PATHELEMENT.fields_by_name['id'].containing_oneof = _KEY_PATHELEMENT.oneofs_by_name['id_type'] -_KEY_PATHELEMENT.oneofs_by_name['id_type'].fields.append( - _KEY_PATHELEMENT.fields_by_name['name']) -_KEY_PATHELEMENT.fields_by_name['name'].containing_oneof = _KEY_PATHELEMENT.oneofs_by_name['id_type'] -_KEY.fields_by_name['partition_id'].message_type = _PARTITIONID -_KEY.fields_by_name['path'].message_type = _KEY_PATHELEMENT -_ARRAYVALUE.fields_by_name['values'].message_type = _VALUE -_VALUE.fields_by_name['null_value'].enum_type = google_dot_protobuf_dot_struct__pb2._NULLVALUE -_VALUE.fields_by_name['timestamp_value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_VALUE.fields_by_name['key_value'].message_type = _KEY -_VALUE.fields_by_name['geo_point_value'].message_type = google_dot_type_dot_latlng__pb2._LATLNG -_VALUE.fields_by_name['entity_value'].message_type = _ENTITY -_VALUE.fields_by_name['array_value'].message_type = _ARRAYVALUE -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['null_value']) -_VALUE.fields_by_name['null_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['boolean_value']) -_VALUE.fields_by_name['boolean_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['integer_value']) -_VALUE.fields_by_name['integer_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['double_value']) -_VALUE.fields_by_name['double_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['timestamp_value']) -_VALUE.fields_by_name['timestamp_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['key_value']) -_VALUE.fields_by_name['key_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['string_value']) -_VALUE.fields_by_name['string_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['blob_value']) -_VALUE.fields_by_name['blob_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['geo_point_value']) -_VALUE.fields_by_name['geo_point_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['entity_value']) -_VALUE.fields_by_name['entity_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['array_value']) -_VALUE.fields_by_name['array_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_ENTITY_PROPERTIESENTRY.fields_by_name['value'].message_type = _VALUE -_ENTITY_PROPERTIESENTRY.containing_type = _ENTITY -_ENTITY.fields_by_name['key'].message_type = _KEY -_ENTITY.fields_by_name['properties'].message_type = _ENTITY_PROPERTIESENTRY -DESCRIPTOR.message_types_by_name['PartitionId'] = _PARTITIONID -DESCRIPTOR.message_types_by_name['Key'] = _KEY -DESCRIPTOR.message_types_by_name['ArrayValue'] = _ARRAYVALUE -DESCRIPTOR.message_types_by_name['Value'] = _VALUE -DESCRIPTOR.message_types_by_name['Entity'] = _ENTITY - -PartitionId = _reflection.GeneratedProtocolMessageType('PartitionId', (_message.Message,), dict( - DESCRIPTOR = _PARTITIONID, - __module__ = 'google.datastore.v1.entity_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.PartitionId) - )) -_sym_db.RegisterMessage(PartitionId) - -Key = _reflection.GeneratedProtocolMessageType('Key', (_message.Message,), dict( - - PathElement = _reflection.GeneratedProtocolMessageType('PathElement', (_message.Message,), dict( - DESCRIPTOR = _KEY_PATHELEMENT, - __module__ = 'google.datastore.v1.entity_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.Key.PathElement) - )) - , - DESCRIPTOR = _KEY, - __module__ = 'google.datastore.v1.entity_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.Key) - )) -_sym_db.RegisterMessage(Key) -_sym_db.RegisterMessage(Key.PathElement) - -ArrayValue = _reflection.GeneratedProtocolMessageType('ArrayValue', (_message.Message,), dict( - DESCRIPTOR = _ARRAYVALUE, - __module__ = 'google.datastore.v1.entity_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.ArrayValue) - )) -_sym_db.RegisterMessage(ArrayValue) - -Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), dict( - DESCRIPTOR = _VALUE, - __module__ = 'google.datastore.v1.entity_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.Value) - )) -_sym_db.RegisterMessage(Value) - -Entity = _reflection.GeneratedProtocolMessageType('Entity', (_message.Message,), dict( - - PropertiesEntry = _reflection.GeneratedProtocolMessageType('PropertiesEntry', (_message.Message,), dict( - DESCRIPTOR = _ENTITY_PROPERTIESENTRY, - __module__ = 'google.datastore.v1.entity_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.Entity.PropertiesEntry) - )) - , - DESCRIPTOR = _ENTITY, - __module__ = 'google.datastore.v1.entity_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.Entity) - )) -_sym_db.RegisterMessage(Entity) -_sym_db.RegisterMessage(Entity.PropertiesEntry) - - -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.google.datastore.v1B\013EntityProtoP\001')) -_ENTITY_PROPERTIESENTRY.has_options = True -_ENTITY_PROPERTIESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_generated/query_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore/_generated/query_pb2.py deleted file mode 100644 index 7569f225d53a..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore/_generated/query_pb2.py +++ /dev/null @@ -1,934 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/datastore/v1/query.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.datastore._generated import entity_pb2 as google_dot_datastore_dot_v1_dot_entity__pb2 -from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='google/datastore/v1/query.proto', - package='google.datastore.v1', - syntax='proto3', - serialized_pb=_b('\n\x1fgoogle/datastore/v1/query.proto\x12\x13google.datastore.v1\x1a\x1cgoogle/api/annotations.proto\x1a google/datastore/v1/entity.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x18google/type/latlng.proto\"\xaf\x01\n\x0c\x45ntityResult\x12+\n\x06\x65ntity\x18\x01 \x01(\x0b\x32\x1b.google.datastore.v1.Entity\x12\x0f\n\x07version\x18\x04 \x01(\x03\x12\x0e\n\x06\x63ursor\x18\x03 \x01(\x0c\"Q\n\nResultType\x12\x1b\n\x17RESULT_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x46ULL\x10\x01\x12\x0e\n\nPROJECTION\x10\x02\x12\x0c\n\x08KEY_ONLY\x10\x03\"\xf2\x02\n\x05Query\x12\x33\n\nprojection\x18\x02 \x03(\x0b\x32\x1f.google.datastore.v1.Projection\x12\x31\n\x04kind\x18\x03 \x03(\x0b\x32#.google.datastore.v1.KindExpression\x12+\n\x06\x66ilter\x18\x04 \x01(\x0b\x32\x1b.google.datastore.v1.Filter\x12\x31\n\x05order\x18\x05 \x03(\x0b\x32\".google.datastore.v1.PropertyOrder\x12;\n\x0b\x64istinct_on\x18\x06 \x03(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x14\n\x0cstart_cursor\x18\x07 \x01(\x0c\x12\x12\n\nend_cursor\x18\x08 \x01(\x0c\x12\x0e\n\x06offset\x18\n \x01(\x05\x12*\n\x05limit\x18\x0c \x01(\x0b\x32\x1b.google.protobuf.Int32Value\"\x1e\n\x0eKindExpression\x12\x0c\n\x04name\x18\x01 \x01(\t\"!\n\x11PropertyReference\x12\x0c\n\x04name\x18\x02 \x01(\t\"F\n\nProjection\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\"\xd1\x01\n\rPropertyOrder\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12?\n\tdirection\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyOrder.Direction\"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02\"\x99\x01\n\x06\x46ilter\x12@\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32$.google.datastore.v1.CompositeFilterH\x00\x12>\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32#.google.datastore.v1.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type\"\xa9\x01\n\x0f\x43ompositeFilter\x12\x39\n\x02op\x18\x01 \x01(\x0e\x32-.google.datastore.v1.CompositeFilter.Operator\x12,\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x1b.google.datastore.v1.Filter\"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\"\xc7\x02\n\x0ePropertyFilter\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x38\n\x02op\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyFilter.Operator\x12)\n\x05value\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.Value\"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b\"\xa5\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12H\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x30.google.datastore.v1.GqlQuery.NamedBindingsEntry\x12\x43\n\x13positional_bindings\x18\x04 \x03(\x0b\x32&.google.datastore.v1.GqlQueryParameter\x1a\\\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.datastore.v1.GqlQueryParameter:\x02\x38\x01\"d\n\x11GqlQueryParameter\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type\"\xde\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12H\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32,.google.datastore.v1.EntityResult.ResultType\x12\x39\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12K\n\x0cmore_results\x18\x05 \x01(\x0e\x32\x35.google.datastore.v1.QueryResultBatch.MoreResultsType\x12\x18\n\x10snapshot_version\x18\x07 \x01(\x03\"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42\'\n\x17\x63om.google.datastore.v1B\nQueryProtoP\x01\x62\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_datastore_dot_v1_dot_entity__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,google_dot_type_dot_latlng__pb2.DESCRIPTOR,]) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - - - -_ENTITYRESULT_RESULTTYPE = _descriptor.EnumDescriptor( - name='ResultType', - full_name='google.datastore.v1.EntityResult.ResultType', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='RESULT_TYPE_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='FULL', index=1, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='PROJECTION', index=2, number=2, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='KEY_ONLY', index=3, number=3, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=273, - serialized_end=354, -) -_sym_db.RegisterEnumDescriptor(_ENTITYRESULT_RESULTTYPE) - -_PROPERTYORDER_DIRECTION = _descriptor.EnumDescriptor( - name='Direction', - full_name='google.datastore.v1.PropertyOrder.Direction', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='DIRECTION_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='ASCENDING', index=1, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='DESCENDING', index=2, number=2, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=1009, - serialized_end=1078, -) -_sym_db.RegisterEnumDescriptor(_PROPERTYORDER_DIRECTION) - -_COMPOSITEFILTER_OPERATOR = _descriptor.EnumDescriptor( - name='Operator', - full_name='google.datastore.v1.CompositeFilter.Operator', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='OPERATOR_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='AND', index=1, number=1, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=1361, - serialized_end=1406, -) -_sym_db.RegisterEnumDescriptor(_COMPOSITEFILTER_OPERATOR) - -_PROPERTYFILTER_OPERATOR = _descriptor.EnumDescriptor( - name='Operator', - full_name='google.datastore.v1.PropertyFilter.Operator', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='OPERATOR_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='LESS_THAN', index=1, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='LESS_THAN_OR_EQUAL', index=2, number=2, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='GREATER_THAN', index=3, number=3, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='GREATER_THAN_OR_EQUAL', index=4, number=4, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='EQUAL', index=5, number=5, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='HAS_ANCESTOR', index=6, number=11, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=1587, - serialized_end=1736, -) -_sym_db.RegisterEnumDescriptor(_PROPERTYFILTER_OPERATOR) - -_QUERYRESULTBATCH_MORERESULTSTYPE = _descriptor.EnumDescriptor( - name='MoreResultsType', - full_name='google.datastore.v1.QueryResultBatch.MoreResultsType', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='MORE_RESULTS_TYPE_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='NOT_FINISHED', index=1, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MORE_RESULTS_AFTER_LIMIT', index=2, number=2, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MORE_RESULTS_AFTER_CURSOR', index=3, number=4, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='NO_MORE_RESULTS', index=4, number=3, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=2463, - serialized_end=2615, -) -_sym_db.RegisterEnumDescriptor(_QUERYRESULTBATCH_MORERESULTSTYPE) - - -_ENTITYRESULT = _descriptor.Descriptor( - name='EntityResult', - full_name='google.datastore.v1.EntityResult', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='entity', full_name='google.datastore.v1.EntityResult.entity', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='version', full_name='google.datastore.v1.EntityResult.version', index=1, - number=4, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='cursor', full_name='google.datastore.v1.EntityResult.cursor', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _ENTITYRESULT_RESULTTYPE, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=179, - serialized_end=354, -) - - -_QUERY = _descriptor.Descriptor( - name='Query', - full_name='google.datastore.v1.Query', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='projection', full_name='google.datastore.v1.Query.projection', index=0, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='kind', full_name='google.datastore.v1.Query.kind', index=1, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='filter', full_name='google.datastore.v1.Query.filter', index=2, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='order', full_name='google.datastore.v1.Query.order', index=3, - number=5, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='distinct_on', full_name='google.datastore.v1.Query.distinct_on', index=4, - number=6, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='start_cursor', full_name='google.datastore.v1.Query.start_cursor', index=5, - number=7, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='end_cursor', full_name='google.datastore.v1.Query.end_cursor', index=6, - number=8, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='offset', full_name='google.datastore.v1.Query.offset', index=7, - number=10, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='limit', full_name='google.datastore.v1.Query.limit', index=8, - number=12, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=357, - serialized_end=727, -) - - -_KINDEXPRESSION = _descriptor.Descriptor( - name='KindExpression', - full_name='google.datastore.v1.KindExpression', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.datastore.v1.KindExpression.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=729, - serialized_end=759, -) - - -_PROPERTYREFERENCE = _descriptor.Descriptor( - name='PropertyReference', - full_name='google.datastore.v1.PropertyReference', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.datastore.v1.PropertyReference.name', index=0, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=761, - serialized_end=794, -) - - -_PROJECTION = _descriptor.Descriptor( - name='Projection', - full_name='google.datastore.v1.Projection', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='property', full_name='google.datastore.v1.Projection.property', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=796, - serialized_end=866, -) - - -_PROPERTYORDER = _descriptor.Descriptor( - name='PropertyOrder', - full_name='google.datastore.v1.PropertyOrder', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='property', full_name='google.datastore.v1.PropertyOrder.property', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='direction', full_name='google.datastore.v1.PropertyOrder.direction', index=1, - number=2, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _PROPERTYORDER_DIRECTION, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=869, - serialized_end=1078, -) - - -_FILTER = _descriptor.Descriptor( - name='Filter', - full_name='google.datastore.v1.Filter', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='composite_filter', full_name='google.datastore.v1.Filter.composite_filter', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='property_filter', full_name='google.datastore.v1.Filter.property_filter', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='filter_type', full_name='google.datastore.v1.Filter.filter_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=1081, - serialized_end=1234, -) - - -_COMPOSITEFILTER = _descriptor.Descriptor( - name='CompositeFilter', - full_name='google.datastore.v1.CompositeFilter', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='op', full_name='google.datastore.v1.CompositeFilter.op', index=0, - number=1, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='filters', full_name='google.datastore.v1.CompositeFilter.filters', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _COMPOSITEFILTER_OPERATOR, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1237, - serialized_end=1406, -) - - -_PROPERTYFILTER = _descriptor.Descriptor( - name='PropertyFilter', - full_name='google.datastore.v1.PropertyFilter', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='property', full_name='google.datastore.v1.PropertyFilter.property', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='op', full_name='google.datastore.v1.PropertyFilter.op', index=1, - number=2, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='value', full_name='google.datastore.v1.PropertyFilter.value', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _PROPERTYFILTER_OPERATOR, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1409, - serialized_end=1736, -) - - -_GQLQUERY_NAMEDBINDINGSENTRY = _descriptor.Descriptor( - name='NamedBindingsEntry', - full_name='google.datastore.v1.GqlQuery.NamedBindingsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.datastore.v1.GqlQuery.NamedBindingsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='value', full_name='google.datastore.v1.GqlQuery.NamedBindingsEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1940, - serialized_end=2032, -) - -_GQLQUERY = _descriptor.Descriptor( - name='GqlQuery', - full_name='google.datastore.v1.GqlQuery', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='query_string', full_name='google.datastore.v1.GqlQuery.query_string', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='allow_literals', full_name='google.datastore.v1.GqlQuery.allow_literals', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='named_bindings', full_name='google.datastore.v1.GqlQuery.named_bindings', index=2, - number=5, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='positional_bindings', full_name='google.datastore.v1.GqlQuery.positional_bindings', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[_GQLQUERY_NAMEDBINDINGSENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1739, - serialized_end=2032, -) - - -_GQLQUERYPARAMETER = _descriptor.Descriptor( - name='GqlQueryParameter', - full_name='google.datastore.v1.GqlQueryParameter', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='value', full_name='google.datastore.v1.GqlQueryParameter.value', index=0, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='cursor', full_name='google.datastore.v1.GqlQueryParameter.cursor', index=1, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='parameter_type', full_name='google.datastore.v1.GqlQueryParameter.parameter_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=2034, - serialized_end=2134, -) - - -_QUERYRESULTBATCH = _descriptor.Descriptor( - name='QueryResultBatch', - full_name='google.datastore.v1.QueryResultBatch', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='skipped_results', full_name='google.datastore.v1.QueryResultBatch.skipped_results', index=0, - number=6, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='skipped_cursor', full_name='google.datastore.v1.QueryResultBatch.skipped_cursor', index=1, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='entity_result_type', full_name='google.datastore.v1.QueryResultBatch.entity_result_type', index=2, - number=1, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='entity_results', full_name='google.datastore.v1.QueryResultBatch.entity_results', index=3, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='end_cursor', full_name='google.datastore.v1.QueryResultBatch.end_cursor', index=4, - number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='more_results', full_name='google.datastore.v1.QueryResultBatch.more_results', index=5, - number=5, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='snapshot_version', full_name='google.datastore.v1.QueryResultBatch.snapshot_version', index=6, - number=7, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _QUERYRESULTBATCH_MORERESULTSTYPE, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2137, - serialized_end=2615, -) - -_ENTITYRESULT.fields_by_name['entity'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._ENTITY -_ENTITYRESULT_RESULTTYPE.containing_type = _ENTITYRESULT -_QUERY.fields_by_name['projection'].message_type = _PROJECTION -_QUERY.fields_by_name['kind'].message_type = _KINDEXPRESSION -_QUERY.fields_by_name['filter'].message_type = _FILTER -_QUERY.fields_by_name['order'].message_type = _PROPERTYORDER -_QUERY.fields_by_name['distinct_on'].message_type = _PROPERTYREFERENCE -_QUERY.fields_by_name['limit'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE -_PROJECTION.fields_by_name['property'].message_type = _PROPERTYREFERENCE -_PROPERTYORDER.fields_by_name['property'].message_type = _PROPERTYREFERENCE -_PROPERTYORDER.fields_by_name['direction'].enum_type = _PROPERTYORDER_DIRECTION -_PROPERTYORDER_DIRECTION.containing_type = _PROPERTYORDER -_FILTER.fields_by_name['composite_filter'].message_type = _COMPOSITEFILTER -_FILTER.fields_by_name['property_filter'].message_type = _PROPERTYFILTER -_FILTER.oneofs_by_name['filter_type'].fields.append( - _FILTER.fields_by_name['composite_filter']) -_FILTER.fields_by_name['composite_filter'].containing_oneof = _FILTER.oneofs_by_name['filter_type'] -_FILTER.oneofs_by_name['filter_type'].fields.append( - _FILTER.fields_by_name['property_filter']) -_FILTER.fields_by_name['property_filter'].containing_oneof = _FILTER.oneofs_by_name['filter_type'] -_COMPOSITEFILTER.fields_by_name['op'].enum_type = _COMPOSITEFILTER_OPERATOR -_COMPOSITEFILTER.fields_by_name['filters'].message_type = _FILTER -_COMPOSITEFILTER_OPERATOR.containing_type = _COMPOSITEFILTER -_PROPERTYFILTER.fields_by_name['property'].message_type = _PROPERTYREFERENCE -_PROPERTYFILTER.fields_by_name['op'].enum_type = _PROPERTYFILTER_OPERATOR -_PROPERTYFILTER.fields_by_name['value'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._VALUE -_PROPERTYFILTER_OPERATOR.containing_type = _PROPERTYFILTER -_GQLQUERY_NAMEDBINDINGSENTRY.fields_by_name['value'].message_type = _GQLQUERYPARAMETER -_GQLQUERY_NAMEDBINDINGSENTRY.containing_type = _GQLQUERY -_GQLQUERY.fields_by_name['named_bindings'].message_type = _GQLQUERY_NAMEDBINDINGSENTRY -_GQLQUERY.fields_by_name['positional_bindings'].message_type = _GQLQUERYPARAMETER -_GQLQUERYPARAMETER.fields_by_name['value'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._VALUE -_GQLQUERYPARAMETER.oneofs_by_name['parameter_type'].fields.append( - _GQLQUERYPARAMETER.fields_by_name['value']) -_GQLQUERYPARAMETER.fields_by_name['value'].containing_oneof = _GQLQUERYPARAMETER.oneofs_by_name['parameter_type'] -_GQLQUERYPARAMETER.oneofs_by_name['parameter_type'].fields.append( - _GQLQUERYPARAMETER.fields_by_name['cursor']) -_GQLQUERYPARAMETER.fields_by_name['cursor'].containing_oneof = _GQLQUERYPARAMETER.oneofs_by_name['parameter_type'] -_QUERYRESULTBATCH.fields_by_name['entity_result_type'].enum_type = _ENTITYRESULT_RESULTTYPE -_QUERYRESULTBATCH.fields_by_name['entity_results'].message_type = _ENTITYRESULT -_QUERYRESULTBATCH.fields_by_name['more_results'].enum_type = _QUERYRESULTBATCH_MORERESULTSTYPE -_QUERYRESULTBATCH_MORERESULTSTYPE.containing_type = _QUERYRESULTBATCH -DESCRIPTOR.message_types_by_name['EntityResult'] = _ENTITYRESULT -DESCRIPTOR.message_types_by_name['Query'] = _QUERY -DESCRIPTOR.message_types_by_name['KindExpression'] = _KINDEXPRESSION -DESCRIPTOR.message_types_by_name['PropertyReference'] = _PROPERTYREFERENCE -DESCRIPTOR.message_types_by_name['Projection'] = _PROJECTION -DESCRIPTOR.message_types_by_name['PropertyOrder'] = _PROPERTYORDER -DESCRIPTOR.message_types_by_name['Filter'] = _FILTER -DESCRIPTOR.message_types_by_name['CompositeFilter'] = _COMPOSITEFILTER -DESCRIPTOR.message_types_by_name['PropertyFilter'] = _PROPERTYFILTER -DESCRIPTOR.message_types_by_name['GqlQuery'] = _GQLQUERY -DESCRIPTOR.message_types_by_name['GqlQueryParameter'] = _GQLQUERYPARAMETER -DESCRIPTOR.message_types_by_name['QueryResultBatch'] = _QUERYRESULTBATCH - -EntityResult = _reflection.GeneratedProtocolMessageType('EntityResult', (_message.Message,), dict( - DESCRIPTOR = _ENTITYRESULT, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.EntityResult) - )) -_sym_db.RegisterMessage(EntityResult) - -Query = _reflection.GeneratedProtocolMessageType('Query', (_message.Message,), dict( - DESCRIPTOR = _QUERY, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.Query) - )) -_sym_db.RegisterMessage(Query) - -KindExpression = _reflection.GeneratedProtocolMessageType('KindExpression', (_message.Message,), dict( - DESCRIPTOR = _KINDEXPRESSION, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.KindExpression) - )) -_sym_db.RegisterMessage(KindExpression) - -PropertyReference = _reflection.GeneratedProtocolMessageType('PropertyReference', (_message.Message,), dict( - DESCRIPTOR = _PROPERTYREFERENCE, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyReference) - )) -_sym_db.RegisterMessage(PropertyReference) - -Projection = _reflection.GeneratedProtocolMessageType('Projection', (_message.Message,), dict( - DESCRIPTOR = _PROJECTION, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.Projection) - )) -_sym_db.RegisterMessage(Projection) - -PropertyOrder = _reflection.GeneratedProtocolMessageType('PropertyOrder', (_message.Message,), dict( - DESCRIPTOR = _PROPERTYORDER, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyOrder) - )) -_sym_db.RegisterMessage(PropertyOrder) - -Filter = _reflection.GeneratedProtocolMessageType('Filter', (_message.Message,), dict( - DESCRIPTOR = _FILTER, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.Filter) - )) -_sym_db.RegisterMessage(Filter) - -CompositeFilter = _reflection.GeneratedProtocolMessageType('CompositeFilter', (_message.Message,), dict( - DESCRIPTOR = _COMPOSITEFILTER, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.CompositeFilter) - )) -_sym_db.RegisterMessage(CompositeFilter) - -PropertyFilter = _reflection.GeneratedProtocolMessageType('PropertyFilter', (_message.Message,), dict( - DESCRIPTOR = _PROPERTYFILTER, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyFilter) - )) -_sym_db.RegisterMessage(PropertyFilter) - -GqlQuery = _reflection.GeneratedProtocolMessageType('GqlQuery', (_message.Message,), dict( - - NamedBindingsEntry = _reflection.GeneratedProtocolMessageType('NamedBindingsEntry', (_message.Message,), dict( - DESCRIPTOR = _GQLQUERY_NAMEDBINDINGSENTRY, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery.NamedBindingsEntry) - )) - , - DESCRIPTOR = _GQLQUERY, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery) - )) -_sym_db.RegisterMessage(GqlQuery) -_sym_db.RegisterMessage(GqlQuery.NamedBindingsEntry) - -GqlQueryParameter = _reflection.GeneratedProtocolMessageType('GqlQueryParameter', (_message.Message,), dict( - DESCRIPTOR = _GQLQUERYPARAMETER, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQueryParameter) - )) -_sym_db.RegisterMessage(GqlQueryParameter) - -QueryResultBatch = _reflection.GeneratedProtocolMessageType('QueryResultBatch', (_message.Message,), dict( - DESCRIPTOR = _QUERYRESULTBATCH, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.QueryResultBatch) - )) -_sym_db.RegisterMessage(QueryResultBatch) - - -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.google.datastore.v1B\nQueryProtoP\001')) -_GQLQUERY_NAMEDBINDINGSENTRY.has_options = True -_GQLQUERY_NAMEDBINDINGSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index b66626fabccb..a6bae476dff8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -25,14 +25,14 @@ from google.cloud.environment_vars import DISABLE_GRPC from google.cloud.environment_vars import GCD_HOST from google.cloud import exceptions -from google.cloud.datastore._generated import datastore_pb2 as _datastore_pb2 +from google.cloud.grpc.datastore.v1 import datastore_pb2 as _datastore_pb2 try: from grpc import StatusCode - from google.cloud.datastore._generated import datastore_grpc_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2_grpc except ImportError: # pragma: NO COVER _GRPC_ERROR_MAPPING = {} _HAVE_GRPC = False - datastore_grpc_pb2 = None + datastore_pb2_grpc = None StatusCode = None else: # NOTE: We don't include OK -> 200 or CANCELLED -> 499 @@ -147,10 +147,10 @@ def lookup(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.LookupRequest` + :type request_pb: :class:`.datastore_pb2.LookupRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.LookupResponse` + :rtype: :class:`.datastore_pb2.LookupResponse` :returns: The returned protobuf response object. """ return self._rpc(project, 'lookup', request_pb, @@ -163,10 +163,10 @@ def run_query(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.RunQueryRequest` + :type request_pb: :class:`.datastore_pb2.RunQueryRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.RunQueryResponse` + :rtype: :class:`.datastore_pb2.RunQueryResponse` :returns: The returned protobuf response object. """ return self._rpc(project, 'runQuery', request_pb, @@ -180,10 +180,10 @@ def begin_transaction(self, project, request_pb): usually your project name in the cloud console. :type request_pb: - :class:`._generated.datastore_pb2.BeginTransactionRequest` + :class:`.datastore_pb2.BeginTransactionRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.BeginTransactionResponse` + :rtype: :class:`.datastore_pb2.BeginTransactionResponse` :returns: The returned protobuf response object. """ return self._rpc(project, 'beginTransaction', request_pb, @@ -196,10 +196,10 @@ def commit(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.CommitRequest` + :type request_pb: :class:`.datastore_pb2.CommitRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.CommitResponse` + :rtype: :class:`.datastore_pb2.CommitResponse` :returns: The returned protobuf response object. """ return self._rpc(project, 'commit', request_pb, @@ -212,10 +212,10 @@ def rollback(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.RollbackRequest` + :type request_pb: :class:`.datastore_pb2.RollbackRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.RollbackResponse` + :rtype: :class:`.datastore_pb2.RollbackResponse` :returns: The returned protobuf response object. """ return self._rpc(project, 'rollback', request_pb, @@ -228,10 +228,10 @@ def allocate_ids(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.AllocateIdsRequest` + :type request_pb: :class:`.datastore_pb2.AllocateIdsRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.AllocateIdsResponse` + :rtype: :class:`.datastore_pb2.AllocateIdsResponse` :returns: The returned protobuf response object. """ return self._rpc(project, 'allocateIds', request_pb, @@ -280,10 +280,10 @@ def __init__(self, connection, secure): if secure: self._stub = make_secure_stub(connection.credentials, connection.USER_AGENT, - datastore_grpc_pb2.DatastoreStub, + datastore_pb2_grpc.DatastoreStub, connection.host) else: - self._stub = make_insecure_stub(datastore_grpc_pb2.DatastoreStub, + self._stub = make_insecure_stub(datastore_pb2_grpc.DatastoreStub, connection.host) def lookup(self, project, request_pb): @@ -293,10 +293,10 @@ def lookup(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.LookupRequest` + :type request_pb: :class:`.datastore_pb2.LookupRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.LookupResponse` + :rtype: :class:`.datastore_pb2.LookupResponse` :returns: The returned protobuf response object. """ request_pb.project_id = project @@ -310,10 +310,10 @@ def run_query(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.RunQueryRequest` + :type request_pb: :class:`.datastore_pb2.RunQueryRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.RunQueryResponse` + :rtype: :class:`.datastore_pb2.RunQueryResponse` :returns: The returned protobuf response object. """ request_pb.project_id = project @@ -328,10 +328,10 @@ def begin_transaction(self, project, request_pb): usually your project name in the cloud console. :type request_pb: - :class:`._generated.datastore_pb2.BeginTransactionRequest` + :class:`.datastore_pb2.BeginTransactionRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.BeginTransactionResponse` + :rtype: :class:`.datastore_pb2.BeginTransactionResponse` :returns: The returned protobuf response object. """ request_pb.project_id = project @@ -345,10 +345,10 @@ def commit(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.CommitRequest` + :type request_pb: :class:`.datastore_pb2.CommitRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.CommitResponse` + :rtype: :class:`.datastore_pb2.CommitResponse` :returns: The returned protobuf response object. """ request_pb.project_id = project @@ -362,10 +362,10 @@ def rollback(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.RollbackRequest` + :type request_pb: :class:`.datastore_pb2.RollbackRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.RollbackResponse` + :rtype: :class:`.datastore_pb2.RollbackResponse` :returns: The returned protobuf response object. """ request_pb.project_id = project @@ -379,10 +379,10 @@ def allocate_ids(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.AllocateIdsRequest` + :type request_pb: :class:`.datastore_pb2.AllocateIdsRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.AllocateIdsResponse` + :rtype: :class:`.datastore_pb2.AllocateIdsResponse` :returns: The returned protobuf response object. """ request_pb.project_id = project @@ -469,8 +469,7 @@ def lookup(self, project, key_pbs, Maps the ``DatastoreService.Lookup`` protobuf RPC. This uses mostly protobufs - (:class:`google.cloud.datastore._generated.entity_pb2.Key` as input - and :class:`google.cloud.datastore._generated.entity_pb2.Entity` + (:class:`.entity_pb2.Key` as input and :class:`.entity_pb2.Entity` as output). It is used under the hood in :meth:`Client.get() <.datastore.client.Client.get>`: @@ -493,7 +492,7 @@ def lookup(self, project, key_pbs, :param project: The project to look up the keys in. :type key_pbs: list of - :class:`google.cloud.datastore._generated.entity_pb2.Key` + :class:`.entity_pb2.Key` :param key_pbs: The keys to retrieve from the datastore. :type eventual: bool @@ -509,9 +508,9 @@ def lookup(self, project, key_pbs, :rtype: tuple :returns: A triple of (``results``, ``missing``, ``deferred``) where both ``results`` and ``missing`` are lists of - :class:`google.cloud.datastore._generated.entity_pb2.Entity` + :class:`.entity_pb2.Entity` and ``deferred`` is a list of - :class:`google.cloud.datastore._generated.entity_pb2.Key`. + :class:`.entity_pb2.Key`. """ lookup_request = _datastore_pb2.LookupRequest() _set_read_options(lookup_request, eventual, transaction_id) @@ -543,7 +542,7 @@ def run_query(self, project, query_pb, namespace=None, :type project: str :param project: The project over which to run the query. - :type query_pb: :class:`.datastore._generated.query_pb2.Query` + :type query_pb: :class:`.query_pb2.Query` :param query_pb: The Protobuf representing the query to run. :type namespace: str @@ -602,7 +601,7 @@ def commit(self, project, request, transaction_id): :type project: str :param project: The project to which the transaction applies. - :type request: :class:`._generated.datastore_pb2.CommitRequest` + :type request: :class:`.datastore_pb2.CommitRequest` :param request: The protobuf with the mutations being committed. :type transaction_id: str @@ -616,7 +615,7 @@ def commit(self, project, request, transaction_id): :rtype: tuple :returns: The pair of the number of index updates and a list of - :class:`._generated.entity_pb2.Key` for each incomplete key + :class:`.entity_pb2.Key` for each incomplete key that was completed in the commit. """ if transaction_id: @@ -654,10 +653,10 @@ def allocate_ids(self, project, key_pbs): :param project: The project to which the transaction belongs. :type key_pbs: list of - :class:`google.cloud.datastore._generated.entity_pb2.Key` + :class:`.entity_pb2.Key` :param key_pbs: The keys for which the backend should allocate IDs. - :rtype: list of :class:`.datastore._generated.entity_pb2.Key` + :rtype: list of :class:`.entity_pb2.Key` :returns: An equal number of keys, with IDs filled in by the backend. """ request = _datastore_pb2.AllocateIdsRequest() @@ -691,7 +690,7 @@ def _add_keys_to_request(request_field_pb, key_pbs): :type request_field_pb: `RepeatedCompositeFieldContainer` :param request_field_pb: A repeated proto field that contains keys. - :type key_pbs: list of :class:`.datastore._generated.entity_pb2.Key` + :type key_pbs: list of :class:`.entity_pb2.Key` :param key_pbs: The keys to add to a request. """ for key_pb in key_pbs: @@ -701,12 +700,12 @@ def _add_keys_to_request(request_field_pb, key_pbs): def _parse_commit_response(commit_response_pb): """Extract response data from a commit response. - :type commit_response_pb: :class:`._generated.datastore_pb2.CommitResponse` + :type commit_response_pb: :class:`.datastore_pb2.CommitResponse` :param commit_response_pb: The protobuf response from a commit request. :rtype: tuple :returns: The pair of the number of index updates and a list of - :class:`._generated.entity_pb2.Key` for each incomplete key + :class:`.entity_pb2.Key` for each incomplete key that was completed in the commit. """ mut_results = commit_response_pb.mutation_results diff --git a/packages/google-cloud-datastore/google/cloud/datastore/batch.py b/packages/google-cloud-datastore/google/cloud/datastore/batch.py index 2c09f357ee2e..00854d2007b6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/batch.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/batch.py @@ -22,7 +22,7 @@ """ from google.cloud.datastore import helpers -from google.cloud.datastore._generated import datastore_pb2 as _datastore_pb2 +from google.cloud.grpc.datastore.v1 import datastore_pb2 as _datastore_pb2 class Batch(object): @@ -106,7 +106,7 @@ def namespace(self): def _add_partial_key_entity_pb(self): """Adds a new mutation for an entity with a partial key. - :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Entity` + :rtype: :class:`.entity_pb2.Entity` :returns: The newly created entity protobuf that will be updated and sent with a commit. """ @@ -116,7 +116,7 @@ def _add_partial_key_entity_pb(self): def _add_complete_key_entity_pb(self): """Adds a new mutation for an entity with a completed key. - :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Entity` + :rtype: :class:`.entity_pb2.Entity` :returns: The newly created entity protobuf that will be updated and sent with a commit. """ @@ -129,7 +129,7 @@ def _add_complete_key_entity_pb(self): def _add_delete_key_pb(self): """Adds a new mutation for a key to be deleted. - :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Key` + :rtype: :class:`.entity_pb2.Key` :returns: The newly created key protobuf that will be deleted when sent with a commit. """ @@ -147,7 +147,7 @@ def mutations(self): built-up so far. :rtype: iterable - :returns: The list of :class:`._generated.datastore_pb2.Mutation` + :returns: The list of :class:`.datastore_pb2.Mutation` protobufs to be sent in the commit request. """ return self._commit_request.mutations @@ -302,7 +302,7 @@ def _assign_entity_to_pb(entity_pb, entity): Helper method for ``Batch.put``. - :type entity_pb: :class:`._generated.entity_pb2.Entity` + :type entity_pb: :class:`.entity_pb2.Entity` :param entity_pb: The entity owned by a mutation. :type entity: :class:`google.cloud.datastore.entity.Entity` diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 8473a08c6f65..42b0c6497f88 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -78,7 +78,7 @@ def _extended_lookup(connection, project, key_pbs, :type project: str :param project: The project to make the request for. - :type key_pbs: list of :class:`._generated.entity_pb2.Key` + :type key_pbs: list of :class:`.entity_pb2.Key` :param key_pbs: The keys to retrieve from the datastore. :type missing: list @@ -100,7 +100,7 @@ def _extended_lookup(connection, project, key_pbs, the given transaction. Incompatible with ``eventual==True``. - :rtype: list of :class:`._generated.entity_pb2.Entity` + :rtype: list of :class:`.entity_pb2.Entity` :returns: The requested entities. :raises: :class:`ValueError` if missing / deferred are not null or empty list. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index 13723bdb0fa5..ced1b83f20e7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -26,7 +26,7 @@ from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import _pb_timestamp_to_datetime -from google.cloud.datastore._generated import entity_pb2 as _entity_pb2 +from google.cloud.grpc.datastore.v1 import entity_pb2 as _entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key @@ -36,7 +36,7 @@ def _get_meaning(value_pb, is_list=False): """Get the meaning from a protobuf value. - :type value_pb: :class:`google.cloud.datastore._generated.entity_pb2.Value` + :type value_pb: :class:`.entity_pb2.Value` :param value_pb: The protobuf value to be checked for an associated meaning. @@ -77,13 +77,13 @@ def _get_meaning(value_pb, is_list=False): def _new_value_pb(entity_pb, name): """Add (by name) a new ``Value`` protobuf to an entity protobuf. - :type entity_pb: :class:`.datastore._generated.entity_pb2.Entity` + :type entity_pb: :class:`.entity_pb2.Entity` :param entity_pb: An entity protobuf to add a new property to. :type name: str :param name: The name of the new property. - :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Value` + :rtype: :class:`.entity_pb2.Value` :returns: The new ``Value`` protobuf that was added to the entity. """ return entity_pb.properties.get_or_create(name) @@ -92,7 +92,7 @@ def _new_value_pb(entity_pb, name): def _property_tuples(entity_pb): """Iterator of name, ``Value`` tuples from entity properties. - :type entity_pb: :class:`.datastore._generated.entity_pb2.Entity` + :type entity_pb: :class:`.entity_pb2.Entity` :param entity_pb: An entity protobuf to add a new property to. :rtype: :class:`generator` @@ -108,7 +108,7 @@ def entity_from_protobuf(pb): The protobuf should be one returned from the Cloud Datastore Protobuf API. - :type pb: :class:`google.cloud.datastore._generated.entity_pb2.Entity` + :type pb: :class:`.entity_pb2.Entity` :param pb: The Protobuf representing the entity. :rtype: :class:`google.cloud.datastore.entity.Entity` @@ -168,7 +168,7 @@ def _set_pb_meaning_from_entity(entity, name, value, value_pb, :type value: object :param value: The current value stored as property ``name``. - :type value_pb: :class:`google.cloud.datastore._generated.entity_pb2.Value` + :type value_pb: :class:`.entity_pb2.Value` :param value_pb: The protobuf value to add meaning / meanings to. :type is_list: bool @@ -203,7 +203,7 @@ def entity_to_protobuf(entity): :type entity: :class:`google.cloud.datastore.entity.Entity` :param entity: The entity to be turned into a protobuf. - :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Entity` + :rtype: :class:`.entity_pb2.Entity` :returns: The protobuf representing the entity. """ entity_pb = _entity_pb2.Entity() @@ -241,7 +241,7 @@ def key_from_protobuf(pb): The protobuf should be one returned from the Cloud Datastore Protobuf API. - :type pb: :class:`google.cloud.datastore._generated.entity_pb2.Key` + :type pb: :class:`.entity_pb2.Key` :param pb: The Protobuf representing the key. :rtype: :class:`google.cloud.datastore.key.Key` @@ -339,7 +339,7 @@ def _get_value_from_value_pb(value_pb): Some work is done to coerce the return value into a more useful type (particularly in the case of a timestamp value, or a key value). - :type value_pb: :class:`google.cloud.datastore._generated.entity_pb2.Value` + :type value_pb: :class:`.entity_pb2.Value` :param value_pb: The Value Protobuf. :rtype: object @@ -399,7 +399,7 @@ def _set_protobuf_value(value_pb, val): Some value types (entities, keys, lists) cannot be directly assigned; this function handles them correctly. - :type value_pb: :class:`google.cloud.datastore._generated.entity_pb2.Value` + :type value_pb: :class:`.entity_pb2.Value` :param value_pb: The value protobuf to which the value is being assigned. :type val: :class:`datetime.datetime`, boolean, float, integer, string, diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index c33e590a2581..0af884c67301 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -17,7 +17,7 @@ import copy import six -from google.cloud.datastore._generated import entity_pb2 as _entity_pb2 +from google.cloud.grpc.datastore.v1 import entity_pb2 as _entity_pb2 class Key(object): @@ -261,7 +261,7 @@ def completed_key(self, id_or_name): def to_protobuf(self): """Return a protobuf corresponding to the key. - :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Key` + :rtype: :class:`.entity_pb2.Key` :returns: The protobuf representing the key. """ key = _entity_pb2.Key() diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index a6f6c845e17f..e8989a41a9dd 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -20,7 +20,7 @@ from google.cloud.iterator import Iterator as BaseIterator from google.cloud.iterator import Page -from google.cloud.datastore._generated import query_pb2 as _query_pb2 +from google.cloud.grpc.datastore.v1 import query_pb2 as _query_pb2 from google.cloud.datastore import helpers from google.cloud.datastore.key import Key @@ -417,7 +417,7 @@ def _build_protobuf(self): Relies on the current state of the iterator. :rtype: - :class:`google.cloud.datastore._generated.query_pb2.Query` + :class:`.query_pb2.Query` :returns: The query protobuf object for the current state of the iterator. """ @@ -452,7 +452,7 @@ def _process_query_results(self, entity_pbs, cursor_as_bytes, :param cursor_as_bytes: The end cursor of the query. :type more_results_enum: - :class:`._generated.query_pb2.QueryResultBatch.MoreResultsType` + :class:`.query_pb2.QueryResultBatch.MoreResultsType` :param more_results_enum: Enum indicating if there are more results. :type skipped_results: int @@ -508,7 +508,7 @@ def _pb_from_query(query): :type query: :class:`Query` :param query: The source query. - :rtype: :class:`google.cloud.datastore._generated.query_pb2.Query` + :rtype: :class:`.query_pb2.Query` :returns: A protobuf that can be sent to the protobuf API. N.b. that it does not contain "in-flight" fields for ongoing query executions (cursors, offset, limit). @@ -575,7 +575,7 @@ def _item_to_entity(iterator, entity_pb): :param iterator: The iterator that is currently in use. :type entity_pb: - :class:`google.cloud.datastore._generated.entity_pb2.Entity` + :class:`.entity_pb2.Entity` :param entity_pb: An entity protobuf to convert to a native entity. :rtype: :class:`~google.cloud.datastore.entity.Entity` diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 3de3a0b91a84..8dee2f7bfc62 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -52,6 +52,7 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.22.1, < 0.23dev', 'grpcio >= 1.0.2, < 2.0dev', + 'gapic-google-cloud-datastore-v1 >= 0.14.0, < 0.15dev', ] setup( diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 35781cdf3a40..6515767bd0ca 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -223,7 +223,7 @@ def test_constructor(self): self.assertEqual(mock_args, [( conn.credentials, conn.USER_AGENT, - MUT.datastore_grpc_pb2.DatastoreStub, + MUT.datastore_pb2_grpc.DatastoreStub, conn.host, )]) @@ -242,7 +242,7 @@ def test_constructor_insecure(self): self.assertIs(datastore_api._stub, stub) self.assertEqual(mock_args, [( - MUT.datastore_grpc_pb2.DatastoreStub, + MUT.datastore_pb2_grpc.DatastoreStub, conn.host, )]) @@ -370,7 +370,7 @@ def _make_key_pb(self, project, id_=1234): return Key(*path_args, project=project).to_protobuf() def _make_query_pb(self, kind): - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 pb = query_pb2.Query() pb.kind.add().name = kind return pb @@ -508,7 +508,7 @@ def test_build_api_url_w_explicit_base_version(self): URI) def test_lookup_single_key_empty_response(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' key_pb = self._make_key_pb(PROJECT) @@ -535,7 +535,7 @@ def test_lookup_single_key_empty_response(self): self.assertEqual(key_pb, keys[0]) def test_lookup_single_key_empty_response_w_eventual(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' key_pb = self._make_key_pb(PROJECT) @@ -574,7 +574,7 @@ def test_lookup_single_key_empty_response_w_eventual_and_transaction(self): eventual=True, transaction_id=TRANSACTION) def test_lookup_single_key_empty_response_w_transaction(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' TRANSACTION = b'TRANSACTION' @@ -604,8 +604,8 @@ def test_lookup_single_key_empty_response_w_transaction(self): self.assertEqual(request.read_options.transaction, TRANSACTION) def test_lookup_single_key_nonempty_response(self): - from google.cloud.datastore._generated import datastore_pb2 - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 PROJECT = 'PROJECT' key_pb = self._make_key_pb(PROJECT) @@ -636,7 +636,7 @@ def test_lookup_single_key_nonempty_response(self): self.assertEqual(key_pb, keys[0]) def test_lookup_multiple_keys_empty_response(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' key_pb1 = self._make_key_pb(PROJECT) @@ -665,7 +665,7 @@ def test_lookup_multiple_keys_empty_response(self): self.assertEqual(key_pb2, keys[1]) def test_lookup_multiple_keys_w_missing(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' key_pb1 = self._make_key_pb(PROJECT) @@ -699,7 +699,7 @@ def test_lookup_multiple_keys_w_missing(self): self.assertEqual(key_pb2, keys[1]) def test_lookup_multiple_keys_w_deferred(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' key_pb1 = self._make_key_pb(PROJECT) @@ -735,8 +735,8 @@ def test_lookup_multiple_keys_w_deferred(self): self.assertEqual(key_pb2, keys[1]) def test_run_query_w_eventual_no_transaction(self): - from google.cloud.datastore._generated import datastore_pb2 - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 PROJECT = 'PROJECT' KIND = 'Nonesuch' @@ -773,8 +773,8 @@ def test_run_query_w_eventual_no_transaction(self): self.assertEqual(request.read_options.transaction, b'') def test_run_query_wo_eventual_w_transaction(self): - from google.cloud.datastore._generated import datastore_pb2 - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 PROJECT = 'PROJECT' KIND = 'Nonesuch' @@ -813,8 +813,8 @@ def test_run_query_wo_eventual_w_transaction(self): self.assertEqual(request.read_options.transaction, TRANSACTION) def test_run_query_w_eventual_and_transaction(self): - from google.cloud.datastore._generated import datastore_pb2 - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 PROJECT = 'PROJECT' KIND = 'Nonesuch' @@ -831,8 +831,8 @@ def test_run_query_w_eventual_and_transaction(self): eventual=True, transaction_id=TRANSACTION) def test_run_query_wo_namespace_empty_result(self): - from google.cloud.datastore._generated import datastore_pb2 - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 PROJECT = 'PROJECT' KIND = 'Nonesuch' @@ -865,8 +865,8 @@ def test_run_query_wo_namespace_empty_result(self): self.assertEqual(request.query, q_pb) def test_run_query_w_namespace_nonempty_result(self): - from google.cloud.datastore._generated import datastore_pb2 - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 PROJECT = 'PROJECT' KIND = 'Kind' @@ -895,7 +895,7 @@ def test_run_query_w_namespace_nonempty_result(self): self.assertEqual(request.query, q_pb) def test_begin_transaction(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' TRANSACTION = b'TRANSACTION' @@ -918,7 +918,7 @@ def test_begin_transaction(self): def test_commit_wo_transaction(self): import mock - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb PROJECT = 'PROJECT' @@ -966,7 +966,7 @@ def mock_parse(response): def test_commit_w_transaction(self): import mock - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb PROJECT = 'PROJECT' @@ -1013,7 +1013,7 @@ def mock_parse(response): self.assertEqual(_parsed, [rsp_pb]) def test_rollback_ok(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' TRANSACTION = b'xact' @@ -1035,7 +1035,7 @@ def test_rollback_ok(self): self.assertEqual(request.transaction, TRANSACTION) def test_allocate_ids_empty(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' rsp_pb = datastore_pb2.AllocateIdsResponse() @@ -1056,7 +1056,7 @@ def test_allocate_ids_empty(self): self.assertEqual(list(request.keys), []) def test_allocate_ids_non_empty(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' before_key_pbs = [ @@ -1097,8 +1097,8 @@ def _call_fut(self, commit_response_pb): return _parse_commit_response(commit_response_pb) def test_it(self): - from google.cloud.datastore._generated import datastore_pb2 - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 index_updates = 1337 keys = [ diff --git a/packages/google-cloud-datastore/unit_tests/test_batch.py b/packages/google-cloud-datastore/unit_tests/test_batch.py index 0bdc8762e64c..7681a8fd9201 100644 --- a/packages/google-cloud-datastore/unit_tests/test_batch.py +++ b/packages/google-cloud-datastore/unit_tests/test_batch.py @@ -27,7 +27,7 @@ def _make_one(self, client): return self._get_target_class()(client) def test_ctor(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 _PROJECT = 'PROJECT' _NAMESPACE = 'NAMESPACE' connection = _Connection() @@ -416,7 +416,7 @@ def is_partial(self): return self._id is None def to_protobuf(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 key = self._key = entity_pb2.Key() # Don't assign it, because it will just get ripped out # key.partition_id.project_id = self.project diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index ef198b98b19a..67a0229870c0 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -23,7 +23,7 @@ def _make_credentials(): def _make_entity_pb(project, kind, integer_id, name=None, str_val=None): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() @@ -275,7 +275,7 @@ def test_get_multi_miss(self): self.assertEqual(results, []) def test_get_multi_miss_w_missing(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.key import Key KIND = 'Kind' @@ -339,7 +339,7 @@ def test_get_multi_miss_w_deferred(self): [key.to_protobuf()]) def test_get_multi_w_deferred_from_backend_but_not_passed(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key @@ -1004,7 +1004,7 @@ def is_partial(self): return self._id is None def to_protobuf(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 key = self._key = entity_pb2.Key() # Don't assign it, because it will just get ripped out # key.partition_id.project_id = self.project diff --git a/packages/google-cloud-datastore/unit_tests/test_helpers.py b/packages/google-cloud-datastore/unit_tests/test_helpers.py index f3fa3391bbb7..f3d144e6a591 100644 --- a/packages/google-cloud-datastore/unit_tests/test_helpers.py +++ b/packages/google-cloud-datastore/unit_tests/test_helpers.py @@ -22,7 +22,7 @@ def _call_fut(self, entity_pb, name): return _new_value_pb(entity_pb, name) def test_it(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 entity_pb = entity_pb2.Entity() name = 'foo' @@ -41,7 +41,7 @@ def _call_fut(self, entity_pb): def test_it(self): import types - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() @@ -63,7 +63,7 @@ def _call_fut(self, val): return entity_from_protobuf(val) def test_it(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb _PROJECT = 'PROJECT' @@ -109,7 +109,7 @@ def test_it(self): self.assertEqual(key.id, _ID) def test_mismatched_value_indexed(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb _PROJECT = 'PROJECT' @@ -133,7 +133,7 @@ def test_mismatched_value_indexed(self): self._call_fut(entity_pb) def test_entity_no_key(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 entity_pb = entity_pb2.Entity() entity = self._call_fut(entity_pb) @@ -142,7 +142,7 @@ def test_entity_no_key(self): self.assertEqual(dict(entity), {}) def test_entity_with_meaning(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() @@ -157,7 +157,7 @@ def test_entity_with_meaning(self): self.assertEqual(entity._meanings, {name: (meaning, val)}) def test_nested_entity_no_key(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb PROJECT = 'FOO' @@ -214,7 +214,7 @@ def _compareEntityProto(self, entity_pb1, entity_pb2): self.assertEqual(val1, val2) def test_empty(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() @@ -222,7 +222,7 @@ def test_empty(self): self._compareEntityProto(entity_pb, entity_pb2.Entity()) def test_key_only(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key @@ -241,7 +241,7 @@ def test_key_only(self): self._compareEntityProto(entity_pb, expected_pb) def test_simple_fields(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -261,7 +261,7 @@ def test_simple_fields(self): self._compareEntityProto(entity_pb, expected_pb) def test_with_empty_list(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() @@ -271,7 +271,7 @@ def test_with_empty_list(self): self._compareEntityProto(entity_pb, entity_pb2.Entity()) def test_inverts_to_protobuf(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb from google.cloud.datastore.helpers import entity_from_protobuf @@ -324,7 +324,7 @@ def test_inverts_to_protobuf(self): self._compareEntityProto(original_pb, new_pb) def test_meaning_with_change(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -342,7 +342,7 @@ def test_meaning_with_change(self): self._compareEntityProto(entity_pb, expected_pb) def test_variable_meanings(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -376,7 +376,7 @@ def _call_fut(self, val): return key_from_protobuf(val) def _makePB(self, project=None, namespace=None, path=()): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 pb = entity_pb2.Key() if project is not None: pb.partition_id.project_id = project @@ -546,7 +546,7 @@ def _call_fut(self, pb): return _get_value_from_value_pb(pb) def _makePB(self, attr_name, value): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 pb = entity_pb2.Value() setattr(pb, attr_name, value) @@ -556,7 +556,7 @@ def test_datetime(self): import calendar import datetime from google.cloud._helpers import UTC - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 micros = 4375 utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) @@ -566,7 +566,7 @@ def test_datetime(self): self.assertEqual(self._call_fut(pb), utc) def test_key(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.key import Key pb = entity_pb2.Value() @@ -596,7 +596,7 @@ def test_unicode(self): self.assertEqual(self._call_fut(pb), u'str') def test_entity(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -612,7 +612,7 @@ def test_entity(self): self.assertEqual(entity['foo'], 'Foo') def test_array(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 pb = entity_pb2.Value() array_pb = pb.array_value.values @@ -625,7 +625,7 @@ def test_array(self): def test_geo_point(self): from google.type import latlng_pb2 - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import GeoPoint lat = -3.14 @@ -639,14 +639,14 @@ def test_geo_point(self): def test_null(self): from google.protobuf import struct_pb2 - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 pb = entity_pb2.Value(null_value=struct_pb2.NULL_VALUE) result = self._call_fut(pb) self.assertIsNone(result) def test_unknown(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 pb = entity_pb2.Value() with self.assertRaises(ValueError): @@ -661,7 +661,7 @@ def _call_fut(self, value_pb, val): return _set_protobuf_value(value_pb, val) def _makePB(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 return entity_pb2.Value() def test_datetime(self): @@ -799,14 +799,14 @@ def _call_fut(self, *args, **kwargs): return _get_meaning(*args, **kwargs) def test_no_meaning(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 value_pb = entity_pb2.Value() result = self._call_fut(value_pb) self.assertIsNone(result) def test_single(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 value_pb = entity_pb2.Value() value_pb.meaning = meaning = 22 @@ -815,7 +815,7 @@ def test_single(self): self.assertEqual(meaning, result) def test_empty_array_value(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 value_pb = entity_pb2.Value() value_pb.array_value.values.add() @@ -825,7 +825,7 @@ def test_empty_array_value(self): self.assertEqual(None, result) def test_array_value(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 value_pb = entity_pb2.Value() meaning = 9 @@ -840,7 +840,7 @@ def test_array_value(self): self.assertEqual(meaning, result) def test_array_value_multiple_meanings(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 value_pb = entity_pb2.Value() meaning1 = 9 @@ -857,7 +857,7 @@ def test_array_value_multiple_meanings(self): self.assertEqual(result, [meaning1, meaning2]) def test_array_value_meaning_partially_unset(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 value_pb = entity_pb2.Value() meaning1 = 9 diff --git a/packages/google-cloud-datastore/unit_tests/test_key.py b/packages/google-cloud-datastore/unit_tests/test_key.py index b2227d297b31..ed2eb45b4cca 100644 --- a/packages/google-cloud-datastore/unit_tests/test_key.py +++ b/packages/google-cloud-datastore/unit_tests/test_key.py @@ -314,7 +314,7 @@ def test_completed_key_on_complete(self): self.assertRaises(ValueError, key.completed_key, 5678) def test_to_protobuf_defaults(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 _KIND = 'KIND' key = self._make_one(_KIND, project=self._DEFAULT_PROJECT) diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index 0e431623e369..255bfa8f014f 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -389,7 +389,7 @@ def test_constructor_explicit(self): self.assertTrue(iterator._more_results) def test__build_protobuf_empty(self): - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 from google.cloud.datastore.query import Query client = _Client(None, None) @@ -401,7 +401,7 @@ def test__build_protobuf_empty(self): self.assertEqual(pb, expected_pb) def test__build_protobuf_all_values(self): - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 from google.cloud.datastore.query import Query client = _Client(None, None) @@ -429,7 +429,7 @@ def test__build_protobuf_all_values(self): self.assertEqual(pb, expected_pb) def test__process_query_results(self): - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 iterator = self._make_one(None, None, end_cursor='abcd') @@ -450,7 +450,7 @@ def test__process_query_results(self): self.assertTrue(iterator._more_results) def test__process_query_results_done(self): - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 iterator = self._make_one(None, None, end_cursor='abcd') @@ -478,7 +478,7 @@ def test__process_query_results_bad_enum(self): def test__next_page(self): from google.cloud.iterator import Page - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 from google.cloud.datastore.query import Query connection = _Connection() @@ -546,7 +546,7 @@ def _call_fut(self, query): return _pb_from_query(query) def test_empty(self): - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 pb = self._call_fut(_Query()) self.assertEqual(list(pb.projection), []) @@ -574,7 +574,7 @@ def test_kind(self): def test_ancestor(self): from google.cloud.datastore.key import Key - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 ancestor = Key('Ancestor', 123, project='PROJECT') pb = self._call_fut(_Query(ancestor=ancestor)) @@ -587,7 +587,7 @@ def test_ancestor(self): self.assertEqual(pfilter.value.key_value, ancestor_pb) def test_filter(self): - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 query = _Query(filters=[('name', '=', u'John')]) query.OPERATORS = { @@ -603,7 +603,7 @@ def test_filter(self): def test_filter_key(self): from google.cloud.datastore.key import Key - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 key = Key('Kind', 123, project='PROJECT') query = _Query(filters=[('__key__', '=', key)]) @@ -620,7 +620,7 @@ def test_filter_key(self): self.assertEqual(pfilter.value.key_value, key_pb) def test_order(self): - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 pb = self._call_fut(_Query(order=['a', '-b', 'c'])) self.assertEqual([item.property.name for item in pb.order], diff --git a/packages/google-cloud-datastore/unit_tests/test_transaction.py b/packages/google-cloud-datastore/unit_tests/test_transaction.py index c09304df6f5b..6b6b005a6fa3 100644 --- a/packages/google-cloud-datastore/unit_tests/test_transaction.py +++ b/packages/google-cloud-datastore/unit_tests/test_transaction.py @@ -26,7 +26,7 @@ def _make_one(self, client, **kw): return self._get_target_class()(client, **kw) def test_ctor_defaults(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 _PROJECT = 'PROJECT' connection = _Connection() @@ -178,7 +178,7 @@ class Foo(Exception): def _make_key(kind, id_, project): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 key = entity_pb2.Key() key.partition_id.project_id = project From 4ba175c7e1741873fbed4324e16ceda384d8c3db Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 30 Dec 2016 13:44:12 -0800 Subject: [PATCH 067/611] Cutting release 0.22.1 of datastore. --- packages/google-cloud-datastore/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 8dee2f7bfc62..951551a9fdb8 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -57,7 +57,7 @@ setup( name='google-cloud-datastore', - version='0.22.0', + version='0.22.1', description='Python Client for Google Cloud Datastore', long_description=README, namespace_packages=[ From d3dcab57beb7aa23935154c7b26f718ad1c9c3f8 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 4 Jan 2017 09:38:34 -0800 Subject: [PATCH 068/611] Removing __all__ from datastore helpers. This way **all** public functions will be documented. --- .../google-cloud-datastore/google/cloud/datastore/helpers.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index ced1b83f20e7..1fe5dd2864cb 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -30,8 +30,6 @@ from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key -__all__ = ('entity_from_protobuf', 'key_from_protobuf') - def _get_meaning(value_pb, is_list=False): """Get the meaning from a protobuf value. From 076988f51687f442e95791d5e2f10de0d87c2498 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Wed, 18 Jan 2017 13:14:03 -0500 Subject: [PATCH 069/611] Update import spacing part 2. --- .../unit_tests/test__http.py | 21 +++++++++---------- .../unit_tests/test_batch.py | 2 ++ .../unit_tests/test_client.py | 7 +++++++ .../unit_tests/test_entity.py | 8 +++++++ .../unit_tests/test_helpers.py | 10 +++++++++ .../unit_tests/test_key.py | 1 + .../unit_tests/test_query.py | 10 +++++++++ .../unit_tests/test_transaction.py | 3 +++ 8 files changed, 51 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 6515767bd0ca..d779aacedf92 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -14,6 +14,8 @@ import unittest +import mock + from google.cloud.datastore._http import _HAVE_GRPC @@ -22,6 +24,7 @@ class Test_DatastoreAPIOverHttp(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.datastore._http import _DatastoreAPIOverHttp + return _DatastoreAPIOverHttp def _make_one(self, *args, **kw): @@ -112,6 +115,7 @@ class Test__grpc_catch_rendezvous(unittest.TestCase): def _call_fut(self): from google.cloud.datastore._http import _grpc_catch_rendezvous + return _grpc_catch_rendezvous() @staticmethod @@ -178,11 +182,10 @@ class Test_DatastoreAPIOverGRPC(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.datastore._http import _DatastoreAPIOverGRPC + return _DatastoreAPIOverGRPC def _make_one(self, stub, connection=None, secure=True, mock_args=None): - import mock - if connection is None: connection = _Connection(None) connection.credentials = object() @@ -364,6 +367,7 @@ def _get_target_class(): def _make_key_pb(self, project, id_=1234): from google.cloud.datastore.key import Key + path_args = ('Kind',) if id_ is not None: path_args += (id_,) @@ -371,13 +375,12 @@ def _make_key_pb(self, project, id_=1234): def _make_query_pb(self, kind): from google.cloud.grpc.datastore.v1 import query_pb2 + pb = query_pb2.Query() pb.kind.add().name = kind return pb def _make_one(self, credentials=None, http=None, use_grpc=False): - import mock - with mock.patch('google.cloud.datastore._http._USE_GRPC', new=use_grpc): return self._get_target_class()(credentials=credentials, http=http) @@ -396,7 +399,6 @@ def test_default_url(self): self.assertEqual(conn.api_base_url, klass.API_BASE_URL) def test_custom_url_from_env(self): - import mock from google.cloud._http import API_BASE_URL from google.cloud.environment_vars import GCD_HOST @@ -414,8 +416,6 @@ def test_ctor_defaults(self): self.assertIsNone(conn.credentials) def test_ctor_without_grpc(self): - import mock - connections = [] return_val = object() @@ -434,8 +434,6 @@ def mock_api(connection): self.assertEqual(connections, [conn]) def test_ctor_with_grpc(self): - import mock - api_args = [] return_val = object() @@ -917,7 +915,6 @@ def test_begin_transaction(self): request.ParseFromString(cw['body']) def test_commit_wo_transaction(self): - import mock from google.cloud.grpc.datastore.v1 import datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb @@ -965,7 +962,6 @@ def mock_parse(response): self.assertEqual(_parsed, [rsp_pb]) def test_commit_w_transaction(self): - import mock from google.cloud.grpc.datastore.v1 import datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb @@ -1014,6 +1010,7 @@ def mock_parse(response): def test_rollback_ok(self): from google.cloud.grpc.datastore.v1 import datastore_pb2 + PROJECT = 'PROJECT' TRANSACTION = b'xact' @@ -1094,6 +1091,7 @@ class Test__parse_commit_response(unittest.TestCase): def _call_fut(self, commit_response_pb): from google.cloud.datastore._http import _parse_commit_response + return _parse_commit_response(commit_response_pb) def test_it(self): @@ -1135,6 +1133,7 @@ class Http(object): def __init__(self, headers, content): from httplib2 import Response + self._response = Response(headers) self._content = content diff --git a/packages/google-cloud-datastore/unit_tests/test_batch.py b/packages/google-cloud-datastore/unit_tests/test_batch.py index 7681a8fd9201..6f2cda6bfbc3 100644 --- a/packages/google-cloud-datastore/unit_tests/test_batch.py +++ b/packages/google-cloud-datastore/unit_tests/test_batch.py @@ -28,6 +28,7 @@ def _make_one(self, client): def test_ctor(self): from google.cloud.grpc.datastore.v1 import datastore_pb2 + _PROJECT = 'PROJECT' _NAMESPACE = 'NAMESPACE' connection = _Connection() @@ -417,6 +418,7 @@ def is_partial(self): def to_protobuf(self): from google.cloud.grpc.datastore.v1 import entity_pb2 + key = self._key = entity_pb2.Key() # Don't assign it, because it will just get ripped out # key.partition_id.project_id = self.project diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index 67a0229870c0..8481b8a759e3 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -19,6 +19,7 @@ def _make_credentials(): import google.auth.credentials + return mock.Mock(spec=google.auth.credentials.Credentials) @@ -42,6 +43,7 @@ class Test__get_gcd_project(unittest.TestCase): def _call_fut(self): from google.cloud.datastore.client import _get_gcd_project + return _get_gcd_project() def test_no_value(self): @@ -65,6 +67,7 @@ class Test__determine_default_project(unittest.TestCase): def _call_fut(self, project=None): from google.cloud.datastore.client import ( _determine_default_project) + return _determine_default_project(project=project) def _determine_default_helper(self, gcd=None, fallback=None, @@ -131,6 +134,7 @@ def tearDown(self): @staticmethod def _get_target_class(): from google.cloud.datastore.client import Client + return Client def _make_one(self, project=PROJECT, namespace=None, @@ -952,6 +956,7 @@ class _NoCommitBatch(object): def __init__(self, client): from google.cloud.datastore.batch import Batch + self._client = client self._batch = Batch(client) self._batch.begin() @@ -969,6 +974,7 @@ class _NoCommitTransaction(object): def __init__(self, client, transaction_id='TRANSACTION'): from google.cloud.datastore.batch import Batch from google.cloud.datastore.transaction import Transaction + self._client = client xact = self._transaction = Transaction(client) xact._id = transaction_id @@ -1005,6 +1011,7 @@ def is_partial(self): def to_protobuf(self): from google.cloud.grpc.datastore.v1 import entity_pb2 + key = self._key = entity_pb2.Key() # Don't assign it, because it will just get ripped out # key.partition_id.project_id = self.project diff --git a/packages/google-cloud-datastore/unit_tests/test_entity.py b/packages/google-cloud-datastore/unit_tests/test_entity.py index 31c60a172001..4a04ac259577 100644 --- a/packages/google-cloud-datastore/unit_tests/test_entity.py +++ b/packages/google-cloud-datastore/unit_tests/test_entity.py @@ -24,6 +24,7 @@ class TestEntity(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.datastore.entity import Entity + return Entity def _make_one(self, key=None, exclude_from_indexes=()): @@ -53,6 +54,7 @@ def test_ctor_bad_exclude_from_indexes(self): def test___eq_____ne___w_non_entity(self): from google.cloud.datastore.key import Key + key = Key(_KIND, _ID, project=_PROJECT) entity = self._make_one(key=key) self.assertFalse(entity == object()) @@ -60,6 +62,7 @@ def test___eq_____ne___w_non_entity(self): def test___eq_____ne___w_different_keys(self): from google.cloud.datastore.key import Key + _ID1 = 1234 _ID2 = 2345 key1 = Key(_KIND, _ID1, project=_PROJECT) @@ -91,6 +94,7 @@ def test___eq_____ne___w_same_keys(self): def test___eq_____ne___w_same_keys_different_props(self): from google.cloud.datastore.key import Key + key1 = Key(_KIND, _ID, project=_PROJECT) entity1 = self._make_one(key=key1) entity1['foo'] = 'Foo' @@ -102,6 +106,7 @@ def test___eq_____ne___w_same_keys_different_props(self): def test___eq_____ne___w_same_keys_props_w_equiv_keys_as_value(self): from google.cloud.datastore.key import Key + key1 = Key(_KIND, _ID, project=_PROJECT) key2 = Key(_KIND, _ID, project=_PROJECT) entity1 = self._make_one(key=key1) @@ -113,6 +118,7 @@ def test___eq_____ne___w_same_keys_props_w_equiv_keys_as_value(self): def test___eq_____ne___w_same_keys_props_w_diff_keys_as_value(self): from google.cloud.datastore.key import Key + _ID1 = 1234 _ID2 = 2345 key1 = Key(_KIND, _ID1, project=_PROJECT) @@ -126,6 +132,7 @@ def test___eq_____ne___w_same_keys_props_w_diff_keys_as_value(self): def test___eq_____ne___w_same_keys_props_w_equiv_entities_as_value(self): from google.cloud.datastore.key import Key + key = Key(_KIND, _ID, project=_PROJECT) entity1 = self._make_one(key=key) sub1 = self._make_one() @@ -140,6 +147,7 @@ def test___eq_____ne___w_same_keys_props_w_equiv_entities_as_value(self): def test___eq_____ne___w_same_keys_props_w_diff_entities_as_value(self): from google.cloud.datastore.key import Key + key = Key(_KIND, _ID, project=_PROJECT) entity1 = self._make_one(key=key) sub1 = self._make_one() diff --git a/packages/google-cloud-datastore/unit_tests/test_helpers.py b/packages/google-cloud-datastore/unit_tests/test_helpers.py index f3d144e6a591..2ea54d5afe52 100644 --- a/packages/google-cloud-datastore/unit_tests/test_helpers.py +++ b/packages/google-cloud-datastore/unit_tests/test_helpers.py @@ -19,6 +19,7 @@ class Test__new_value_pb(unittest.TestCase): def _call_fut(self, entity_pb, name): from google.cloud.datastore.helpers import _new_value_pb + return _new_value_pb(entity_pb, name) def test_it(self): @@ -37,6 +38,7 @@ class Test__property_tuples(unittest.TestCase): def _call_fut(self, entity_pb): from google.cloud.datastore.helpers import _property_tuples + return _property_tuples(entity_pb) def test_it(self): @@ -60,6 +62,7 @@ class Test_entity_from_protobuf(unittest.TestCase): def _call_fut(self, val): from google.cloud.datastore.helpers import entity_from_protobuf + return entity_from_protobuf(val) def test_it(self): @@ -193,6 +196,7 @@ class Test_entity_to_protobuf(unittest.TestCase): def _call_fut(self, entity): from google.cloud.datastore.helpers import entity_to_protobuf + return entity_to_protobuf(entity) def _compareEntityProto(self, entity_pb1, entity_pb2): @@ -377,6 +381,7 @@ def _call_fut(self, val): def _makePB(self, project=None, namespace=None, path=()): from google.cloud.grpc.datastore.v1 import entity_pb2 + pb = entity_pb2.Key() if project is not None: pb.partition_id.project_id = project @@ -485,6 +490,7 @@ def test_long(self): def test_native_str(self): import six + name, value = self._call_fut('str') if six.PY2: self.assertEqual(name, 'blob_value') @@ -504,6 +510,7 @@ def test_unicode(self): def test_entity(self): from google.cloud.datastore.entity import Entity + entity = Entity() name, value = self._call_fut(entity) self.assertEqual(name, 'entity_value') @@ -718,6 +725,7 @@ def test_long(self): def test_native_str(self): import six + pb = self._makePB() self._call_fut(pb, 'str') if six.PY2: @@ -796,6 +804,7 @@ class Test__get_meaning(unittest.TestCase): def _call_fut(self, *args, **kwargs): from google.cloud.datastore.helpers import _get_meaning + return _get_meaning(*args, **kwargs) def test_no_meaning(self): @@ -877,6 +886,7 @@ class TestGeoPoint(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.datastore.helpers import GeoPoint + return GeoPoint def _make_one(self, *args, **kwargs): diff --git a/packages/google-cloud-datastore/unit_tests/test_key.py b/packages/google-cloud-datastore/unit_tests/test_key.py index ed2eb45b4cca..c699ec773885 100644 --- a/packages/google-cloud-datastore/unit_tests/test_key.py +++ b/packages/google-cloud-datastore/unit_tests/test_key.py @@ -22,6 +22,7 @@ class TestKey(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.datastore.key import Key + return Key def _make_one(self, *args, **kwargs): diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index 255bfa8f014f..e0a5a955beca 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -22,6 +22,7 @@ class TestQuery(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.datastore.query import Query + return Query def _make_one(self, *args, **kw): @@ -47,6 +48,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): from google.cloud.datastore.key import Key + _PROJECT = 'OTHER_PROJECT' _KIND = 'KIND' _NAMESPACE = 'OTHER_NAMESPACE' @@ -145,6 +147,7 @@ def _assign(val): def test_ancestor_setter_w_key(self): from google.cloud.datastore.key import Key + _NAME = u'NAME' key = Key('KIND', 123, project=self._PROJECT) query = self._make_one(self._makeClient()) @@ -154,6 +157,7 @@ def test_ancestor_setter_w_key(self): def test_ancestor_deleter_w_key(self): from google.cloud.datastore.key import Key + key = Key('KIND', 123, project=self._PROJECT) query = self._make_one(client=self._makeClient(), ancestor=key) del query.ancestor @@ -185,6 +189,7 @@ def test_add_filter_w_all_operators(self): def test_add_filter_w_known_operator_and_entity(self): from google.cloud.datastore.entity import Entity + query = self._make_one(self._makeClient()) other = Entity() other['firstname'] = u'John' @@ -200,6 +205,7 @@ def test_add_filter_w_whitespace_property_name(self): def test_add_filter___key__valid_key(self): from google.cloud.datastore.key import Key + query = self._make_one(self._makeClient()) key = Key('Foo', project=self._PROJECT) query.add_filter('__key__', '=', key) @@ -207,6 +213,7 @@ def test_add_filter___key__valid_key(self): def test_filter___key__not_equal_operator(self): from google.cloud.datastore.key import Key + key = Key('Foo', project=self._PROJECT) query = self._make_one(self._makeClient()) query.add_filter('__key__', '<', key) @@ -343,6 +350,7 @@ class TestIterator(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.datastore.query import Iterator + return Iterator def _make_one(self, *args, **kw): @@ -519,6 +527,7 @@ class Test__item_to_entity(unittest.TestCase): def _call_fut(self, iterator, entity_pb): from google.cloud.datastore.query import _item_to_entity + return _item_to_entity(iterator, entity_pb) def test_it(self): @@ -543,6 +552,7 @@ class Test__pb_from_query(unittest.TestCase): def _call_fut(self, query): from google.cloud.datastore.query import _pb_from_query + return _pb_from_query(query) def test_empty(self): diff --git a/packages/google-cloud-datastore/unit_tests/test_transaction.py b/packages/google-cloud-datastore/unit_tests/test_transaction.py index 6b6b005a6fa3..7aa295bf7fca 100644 --- a/packages/google-cloud-datastore/unit_tests/test_transaction.py +++ b/packages/google-cloud-datastore/unit_tests/test_transaction.py @@ -20,6 +20,7 @@ class TestTransaction(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.datastore.transaction import Transaction + return Transaction def _make_one(self, client, **kw): @@ -220,6 +221,7 @@ class _Entity(dict): def __init__(self): super(_Entity, self).__init__() from google.cloud.datastore.key import Key + self.key = Key('KIND', project='PROJECT') @@ -246,6 +248,7 @@ class _NoCommitBatch(object): def __init__(self, client): from google.cloud.datastore.batch import Batch + self._client = client self._batch = Batch(client) From 15b554b99ad63a09b267ec0a0aa41874ae19cb10 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 26 Jan 2017 15:53:28 -0800 Subject: [PATCH 070/611] Changing datastore Connection to only accept client. --- .../google/cloud/datastore/_http.py | 14 +- .../google/cloud/datastore/client.py | 16 +- .../unit_tests/test__http.py | 149 +++++++++--------- .../unit_tests/test_client.py | 11 +- 4 files changed, 89 insertions(+), 101 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index a6bae476dff8..4c01f60c84d2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -397,11 +397,8 @@ class Connection(connection_module.Connection): in method arguments, however it should be capable of returning advanced types. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` - :param credentials: The OAuth2 Credentials to use for this connection. - - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. + :type client: :class:`~google.cloud.datastore.client.Client` + :param client: The client that owns the current connection. """ API_BASE_URL = 'https://' + DATASTORE_API_HOST @@ -414,11 +411,8 @@ class Connection(connection_module.Connection): '/{project}:{method}') """A template for the URL of a particular API call.""" - SCOPE = ('https://www.googleapis.com/auth/datastore',) - """The scopes required for authenticating as a Cloud Datastore consumer.""" - - def __init__(self, credentials=None, http=None): - super(Connection, self).__init__(credentials=credentials, http=http) + def __init__(self, client): + super(Connection, self).__init__(client) try: self.host = os.environ[GCD_HOST] self.api_base_url = 'http://' + self.host diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 42b0c6497f88..b76a8cce7fc1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -18,8 +18,7 @@ from google.cloud._helpers import _LocalStack from google.cloud._helpers import ( _determine_default_project as _base_default_project) -from google.cloud.client import _ClientProjectMixin -from google.cloud.client import Client as _BaseClient +from google.cloud.client import ClientWithProject from google.cloud.datastore._http import Connection from google.cloud.datastore import helpers from google.cloud.datastore.batch import Batch @@ -143,7 +142,7 @@ def _extended_lookup(connection, project, key_pbs, return results -class Client(_BaseClient, _ClientProjectMixin): +class Client(ClientWithProject): """Convenience wrapper for invoking APIs/factories w/ a project. .. doctest:: @@ -171,13 +170,14 @@ class Client(_BaseClient, _ClientProjectMixin): ``credentials`` for the current object. """ + SCOPE = ('https://www.googleapis.com/auth/datastore',) + """The scopes required for authenticating as a Cloud Datastore consumer.""" + def __init__(self, project=None, namespace=None, credentials=None, http=None): - _ClientProjectMixin.__init__(self, project=project) - _BaseClient.__init__(self, credentials=credentials, http=http) - self._connection = Connection( - credentials=self._credentials, http=self._http) - + super(Client, self).__init__( + project=project, credentials=credentials, http=http) + self._connection = Connection(self) self.namespace = namespace self._batch_stack = _LocalStack() diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index d779aacedf92..ced9b65a4cd0 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -380,10 +380,10 @@ def _make_query_pb(self, kind): pb.kind.add().name = kind return pb - def _make_one(self, credentials=None, http=None, use_grpc=False): + def _make_one(self, client, use_grpc=False): with mock.patch('google.cloud.datastore._http._USE_GRPC', new=use_grpc): - return self._get_target_class()(credentials=credentials, http=http) + return self._get_target_class()(client) def _verifyProtobufCall(self, called_with, URI, conn): self.assertEqual(called_with['uri'], URI) @@ -395,7 +395,7 @@ def _verifyProtobufCall(self, called_with, URI, conn): def test_default_url(self): klass = self._get_target_class() - conn = self._make_one() + conn = self._make_one(object()) self.assertEqual(conn.api_base_url, klass.API_BASE_URL) def test_custom_url_from_env(self): @@ -406,17 +406,19 @@ def test_custom_url_from_env(self): fake_environ = {GCD_HOST: HOST} with mock.patch('os.environ', new=fake_environ): - conn = self._make_one() + conn = self._make_one(object()) self.assertNotEqual(conn.api_base_url, API_BASE_URL) self.assertEqual(conn.api_base_url, 'http://' + HOST) - def test_ctor_defaults(self): - conn = self._make_one() - self.assertIsNone(conn.credentials) + def test_constructor(self): + client = object() + conn = self._make_one(client) + self.assertIs(conn._client, client) - def test_ctor_without_grpc(self): + def test_constructor_without_grpc(self): connections = [] + client = object() return_val = object() def mock_api(connection): @@ -427,14 +429,15 @@ def mock_api(connection): 'google.cloud.datastore._http._DatastoreAPIOverHttp', new=mock_api) with patch: - conn = self._make_one(use_grpc=False) + conn = self._make_one(client, use_grpc=False) - self.assertIsNone(conn.credentials) + self.assertIs(conn._client, client) self.assertIs(conn._datastore_api, return_val) self.assertEqual(connections, [conn]) - def test_ctor_with_grpc(self): + def test_constructor_with_grpc(self): api_args = [] + client = object() return_val = object() def mock_api(connection, secure): @@ -445,43 +448,16 @@ def mock_api(connection, secure): 'google.cloud.datastore._http._DatastoreAPIOverGRPC', new=mock_api) with patch: - conn = self._make_one(use_grpc=True) + conn = self._make_one(client, use_grpc=True) - self.assertIsNone(conn.credentials) + self.assertIs(conn._client, client) self.assertIs(conn._datastore_api, return_val) self.assertEqual(api_args, [(conn, True)]) - def test_ctor_explicit(self): - class Creds(object): - pass - - creds = Creds() - conn = self._make_one(creds) - self.assertIs(conn.credentials, creds) - - def test_http_w_existing(self): - conn = self._make_one() - conn._http = http = object() - self.assertIs(conn.http, http) - - def test_http_wo_creds(self): - import httplib2 - - conn = self._make_one() - self.assertIsInstance(conn.http, httplib2.Http) - - def test_http_w_creds(self): - class Creds(object): - pass - - creds = Creds() - conn = self._make_one(creds) - self.assertIs(conn.http.credentials, creds) - def test_build_api_url_w_default_base_version(self): PROJECT = 'PROJECT' METHOD = 'METHOD' - conn = self._make_one() + conn = self._make_one(object()) URI = '/'.join([ conn.api_base_url, conn.API_VERSION, @@ -495,7 +471,7 @@ def test_build_api_url_w_explicit_base_version(self): VER = '3.1415926' PROJECT = 'PROJECT' METHOD = 'METHOD' - conn = self._make_one() + conn = self._make_one(object()) URI = '/'.join([ BASE, VER, @@ -511,14 +487,15 @@ def test_lookup_single_key_empty_response(self): PROJECT = 'PROJECT' key_pb = self._make_key_pb(PROJECT) rsp_pb = datastore_pb2.LookupResponse() - conn = self._make_one() + http = Http({'status': '200'}, rsp_pb.SerializeToString()) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':lookup', ]) - http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) found, missing, deferred = conn.lookup(PROJECT, [key_pb]) self.assertEqual(len(found), 0) self.assertEqual(len(missing), 0) @@ -538,14 +515,15 @@ def test_lookup_single_key_empty_response_w_eventual(self): PROJECT = 'PROJECT' key_pb = self._make_key_pb(PROJECT) rsp_pb = datastore_pb2.LookupResponse() - conn = self._make_one() + http = Http({'status': '200'}, rsp_pb.SerializeToString()) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':lookup', ]) - http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) found, missing, deferred = conn.lookup(PROJECT, [key_pb], eventual=True) self.assertEqual(len(found), 0) @@ -567,7 +545,7 @@ def test_lookup_single_key_empty_response_w_eventual_and_transaction(self): PROJECT = 'PROJECT' TRANSACTION = b'TRANSACTION' key_pb = self._make_key_pb(PROJECT) - conn = self._make_one() + conn = self._make_one(object()) self.assertRaises(ValueError, conn.lookup, PROJECT, key_pb, eventual=True, transaction_id=TRANSACTION) @@ -578,14 +556,15 @@ def test_lookup_single_key_empty_response_w_transaction(self): TRANSACTION = b'TRANSACTION' key_pb = self._make_key_pb(PROJECT) rsp_pb = datastore_pb2.LookupResponse() - conn = self._make_one() + http = Http({'status': '200'}, rsp_pb.SerializeToString()) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':lookup', ]) - http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) found, missing, deferred = conn.lookup(PROJECT, [key_pb], transaction_id=TRANSACTION) self.assertEqual(len(found), 0) @@ -611,14 +590,15 @@ def test_lookup_single_key_nonempty_response(self): entity = entity_pb2.Entity() entity.key.CopyFrom(key_pb) rsp_pb.found.add(entity=entity) - conn = self._make_one() + http = Http({'status': '200'}, rsp_pb.SerializeToString()) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':lookup', ]) - http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) (found,), missing, deferred = conn.lookup(PROJECT, [key_pb]) self.assertEqual(len(missing), 0) self.assertEqual(len(deferred), 0) @@ -640,14 +620,15 @@ def test_lookup_multiple_keys_empty_response(self): key_pb1 = self._make_key_pb(PROJECT) key_pb2 = self._make_key_pb(PROJECT, id_=2345) rsp_pb = datastore_pb2.LookupResponse() - conn = self._make_one() + http = Http({'status': '200'}, rsp_pb.SerializeToString()) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':lookup', ]) - http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) found, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2]) self.assertEqual(len(found), 0) self.assertEqual(len(missing), 0) @@ -673,14 +654,15 @@ def test_lookup_multiple_keys_w_missing(self): er_1.entity.key.CopyFrom(key_pb1) er_2 = rsp_pb.missing.add() er_2.entity.key.CopyFrom(key_pb2) - conn = self._make_one() + http = Http({'status': '200'}, rsp_pb.SerializeToString()) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':lookup', ]) - http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2]) self.assertEqual(result, []) self.assertEqual(len(deferred), 0) @@ -705,14 +687,15 @@ def test_lookup_multiple_keys_w_deferred(self): rsp_pb = datastore_pb2.LookupResponse() rsp_pb.deferred.add().CopyFrom(key_pb1) rsp_pb.deferred.add().CopyFrom(key_pb2) - conn = self._make_one() + http = Http({'status': '200'}, rsp_pb.SerializeToString()) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':lookup', ]) - http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2]) self.assertEqual(result, []) self.assertEqual(len(missing), 0) @@ -745,14 +728,15 @@ def test_run_query_w_eventual_no_transaction(self): no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL - conn = self._make_one() + http = Http({'status': '200'}, rsp_pb.SerializeToString()) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':runQuery', ]) - http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) pbs, end, more, skipped = conn.run_query(PROJECT, q_pb, eventual=True) self.assertEqual(pbs, []) @@ -784,14 +768,15 @@ def test_run_query_wo_eventual_w_transaction(self): no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL - conn = self._make_one() + http = Http({'status': '200'}, rsp_pb.SerializeToString()) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':runQuery', ]) - http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) pbs, end, more, skipped = conn.run_query( PROJECT, q_pb, transaction_id=TRANSACTION) self.assertEqual(pbs, []) @@ -824,7 +809,7 @@ def test_run_query_w_eventual_and_transaction(self): no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL - conn = self._make_one() + conn = self._make_one(object()) self.assertRaises(ValueError, conn.run_query, PROJECT, q_pb, eventual=True, transaction_id=TRANSACTION) @@ -841,14 +826,15 @@ def test_run_query_wo_namespace_empty_result(self): no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL - conn = self._make_one() + http = Http({'status': '200'}, rsp_pb.SerializeToString()) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':runQuery', ]) - http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) pbs, end, more, skipped = conn.run_query(PROJECT, q_pb) self.assertEqual(pbs, []) self.assertEqual(end, CURSOR) @@ -874,14 +860,15 @@ def test_run_query_w_namespace_nonempty_result(self): rsp_pb.batch.entity_results.add(entity=entity) rsp_pb.batch.entity_result_type = 1 # FULL rsp_pb.batch.more_results = 3 # NO_MORE_RESULTS - conn = self._make_one() + http = Http({'status': '200'}, rsp_pb.SerializeToString()) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':runQuery', ]) - http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) pbs = conn.run_query(PROJECT, q_pb, 'NS')[0] self.assertEqual(len(pbs), 1) cw = http._called_with @@ -899,14 +886,15 @@ def test_begin_transaction(self): TRANSACTION = b'TRANSACTION' rsp_pb = datastore_pb2.BeginTransactionResponse() rsp_pb.transaction = TRANSACTION - conn = self._make_one() + http = Http({'status': '200'}, rsp_pb.SerializeToString()) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':beginTransaction', ]) - http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.begin_transaction(PROJECT), TRANSACTION) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) @@ -927,14 +915,15 @@ def test_commit_wo_transaction(self): insert.key.CopyFrom(key_pb) value_pb = _new_value_pb(insert, 'foo') value_pb.string_value = u'Foo' - conn = self._make_one() + http = Http({'status': '200'}, rsp_pb.SerializeToString()) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':commit', ]) - http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) # Set up mock for parsing the response. expected_result = object() @@ -974,14 +963,15 @@ def test_commit_w_transaction(self): insert.key.CopyFrom(key_pb) value_pb = _new_value_pb(insert, 'foo') value_pb.string_value = u'Foo' - conn = self._make_one() + http = Http({'status': '200'}, rsp_pb.SerializeToString()) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':commit', ]) - http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) # Set up mock for parsing the response. expected_result = object() @@ -1015,14 +1005,15 @@ def test_rollback_ok(self): TRANSACTION = b'xact' rsp_pb = datastore_pb2.RollbackResponse() - conn = self._make_one() + http = Http({'status': '200'}, rsp_pb.SerializeToString()) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':rollback', ]) - http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertIsNone(conn.rollback(PROJECT, TRANSACTION)) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) @@ -1036,14 +1027,15 @@ def test_allocate_ids_empty(self): PROJECT = 'PROJECT' rsp_pb = datastore_pb2.AllocateIdsResponse() - conn = self._make_one() + http = Http({'status': '200'}, rsp_pb.SerializeToString()) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':allocateIds', ]) - http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.allocate_ids(PROJECT, []), []) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) @@ -1067,14 +1059,15 @@ def test_allocate_ids_non_empty(self): rsp_pb = datastore_pb2.AllocateIdsResponse() rsp_pb.keys.add().CopyFrom(after_key_pbs[0]) rsp_pb.keys.add().CopyFrom(after_key_pbs[1]) - conn = self._make_one() + http = Http({'status': '200'}, rsp_pb.SerializeToString()) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':allocateIds', ]) - http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.allocate_ids(PROJECT, before_key_pbs), after_key_pbs) cw = http._called_with diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index 8481b8a759e3..32236ecf2c49 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -168,16 +168,17 @@ def fallback_mock(project): new=fallback_mock) patch2 = mock.patch( 'google.cloud.client.get_credentials', - new=lambda: creds) + return_value=creds) with patch1: with patch2: client = klass() + self.assertEqual(client.project, OTHER) self.assertIsNone(client.namespace) self.assertIsInstance(client._connection, _MockConnection) - self.assertIs(client._connection.credentials, creds) - self.assertIsNone(client._connection.http) + self.assertIs(client._credentials, creds) + self.assertIsNone(client._http_internal) self.assertIsNone(client.current_batch) self.assertIsNone(client.current_transaction) self.assertEqual(default_called, [None]) @@ -194,8 +195,8 @@ def test_ctor_w_explicit_inputs(self): self.assertEqual(client.project, OTHER) self.assertEqual(client.namespace, NAMESPACE) self.assertIsInstance(client._connection, _MockConnection) - self.assertIs(client._connection.credentials, creds) - self.assertIs(client._connection.http, http) + self.assertIs(client._credentials, creds) + self.assertIs(client._http_internal, http) self.assertIsNone(client.current_batch) self.assertEqual(list(client._batch_stack), []) From cca968ddbfe2661fc74dfe750891c0532126d992 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Tue, 31 Jan 2017 09:17:12 -0500 Subject: [PATCH 071/611] Updates for pycodestyle. (#2973) --- .../google/cloud/datastore/transaction.py | 2 +- packages/google-cloud-datastore/unit_tests/test__http.py | 4 ++-- packages/google-cloud-datastore/unit_tests/test_batch.py | 2 +- packages/google-cloud-datastore/unit_tests/test_client.py | 2 +- packages/google-cloud-datastore/unit_tests/test_query.py | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index b63098959d0d..48a044ba789c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -186,7 +186,7 @@ def begin(self): try: self._id = self._client._connection.begin_transaction( self.project) - except: + except: # noqa: E722 do not use bare except, specify exception instead self._status = self._ABORTED raise diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index ced9b65a4cd0..7e47e7c2f65a 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -1051,11 +1051,11 @@ def test_allocate_ids_non_empty(self): before_key_pbs = [ self._make_key_pb(PROJECT, id_=None), self._make_key_pb(PROJECT, id_=None), - ] + ] after_key_pbs = [ self._make_key_pb(PROJECT), self._make_key_pb(PROJECT, id_=2345), - ] + ] rsp_pb = datastore_pb2.AllocateIdsResponse() rsp_pb.keys.add().CopyFrom(after_key_pbs[0]) rsp_pb.keys.add().CopyFrom(after_key_pbs[1]) diff --git a/packages/google-cloud-datastore/unit_tests/test_batch.py b/packages/google-cloud-datastore/unit_tests/test_batch.py index 6f2cda6bfbc3..72614d070cba 100644 --- a/packages/google-cloud-datastore/unit_tests/test_batch.py +++ b/packages/google-cloud-datastore/unit_tests/test_batch.py @@ -121,7 +121,7 @@ def test_put_entity_w_completed_key(self): 'baz': 'qux', 'spam': [1, 2, 3], 'frotz': [], # will be ignored - } + } connection = _Connection() client = _Client(_PROJECT, connection) batch = self._make_one(client) diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index 32236ecf2c49..e89acc9c0922 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -853,7 +853,7 @@ def test_query_explicit(self): projection=PROJECTION, order=ORDER, distinct_on=DISTINCT_ON, - ) + ) self.assertIsInstance(query, _Dummy) self.assertEqual(query.args, (client,)) diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index e0a5a955beca..f3ed774db336 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -68,7 +68,7 @@ def test_ctor_explicit(self): projection=PROJECTION, order=ORDER, distinct_on=DISTINCT_ON, - ) + ) self.assertIs(query._client, client) self.assertEqual(query.project, _PROJECT) self.assertEqual(query.kind, _KIND) From e6c96317d3152808762e8227e3c0b33c2e0ca536 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 14 Feb 2017 12:25:25 -0500 Subject: [PATCH 072/611] Prep spanner release. --- .../google-cloud-datastore/google/cloud/datastore/client.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index b76a8cce7fc1..87ab8f6ee0c6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -253,7 +253,8 @@ def get(self, key, missing=None, deferred=None, transaction=None): :param deferred: (Optional) If a list is passed, the keys returned by the backend as "deferred" will be copied into it. - :type transaction: :class:`~.transaction.Transaction` + :type transaction: + :class:`~google.cloud.datastore.transaction.Transaction` :param transaction: (Optional) Transaction to use for read consistency. If not passed, uses current transaction, if set. @@ -281,7 +282,8 @@ def get_multi(self, keys, missing=None, deferred=None, transaction=None): by the backend as "deferred" will be copied into it. If the list is not empty, an error will occur. - :type transaction: :class:`~.transaction.Transaction` + :type transaction: + :class:`~google.cloud.datastore.transaction.Transaction` :param transaction: (Optional) Transaction to use for read consistency. If not passed, uses current transaction, if set. From 3ae8f4b9681b9569af90667fe32b21326d1cdcb1 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Thu, 16 Feb 2017 14:19:53 -0500 Subject: [PATCH 073/611] Update core dependency to google-cloud-core >= 0.23.0, < 0.24dev. (#3028) * Update core dependency to google-cloud-core >= 0.23.0, < 0.24dev. --- packages/google-cloud-datastore/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 951551a9fdb8..5d448448efe3 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -50,7 +50,7 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.22.1, < 0.23dev', + 'google-cloud-core >= 0.23.0, < 0.24dev', 'grpcio >= 1.0.2, < 2.0dev', 'gapic-google-cloud-datastore-v1 >= 0.14.0, < 0.15dev', ] From 9624ca99d55c264387529e7ed7fb19520d714f76 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 16 Feb 2017 16:41:57 -0800 Subject: [PATCH 074/611] Adding ability to send version info header on HTTP requests. Added an "extra headers" feature to enable this. I am not a fan of changing `Connection()` so haphazardly, but I hope to completely re-factor / destory `Connection()` in the near-term so I am less worried. This only adds the storage and datastore header info, for the purposes of a simple review. Once we agree on the approach, I can add support in the other subpackages. --- .../google/cloud/datastore/_http.py | 5 ++ .../unit_tests/test__http.py | 57 ++++++++++++++----- 2 files changed, 47 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 4c01f60c84d2..ee9e004b1b0d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -16,6 +16,7 @@ import contextlib import os +from pkg_resources import get_distribution from google.rpc import status_pb2 @@ -61,6 +62,9 @@ _DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) _USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC +_DATASTORE_DIST = get_distribution('google-cloud-datastore') +_CLIENT_INFO = connection_module.CLIENT_INFO_TEMPLATE.format( + _DATASTORE_DIST.version) class _DatastoreAPIOverHttp(object): @@ -102,6 +106,7 @@ def _request(self, project, method, data): 'Content-Type': 'application/x-protobuf', 'Content-Length': str(len(data)), 'User-Agent': self.connection.USER_AGENT, + connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, } headers, content = self.connection.http.request( uri=self.connection.build_api_url(project=project, method=method), diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 7e47e7c2f65a..d50f297317cb 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -31,6 +31,9 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test__rpc(self): + from google.cloud import _http as connection_module + from google.cloud.datastore._http import _CLIENT_INFO + class ReqPB(object): def SerializeToString(self): @@ -56,17 +59,24 @@ def FromString(cls, pb): self.assertIsInstance(response, RspPB) self.assertEqual(response._pb, 'CONTENT') called_with = http._called_with + self.assertEqual(len(called_with), 4) self.assertEqual(called_with['uri'], URI) self.assertEqual(called_with['method'], 'POST') - self.assertEqual(called_with['headers']['Content-Type'], - 'application/x-protobuf') - self.assertEqual(called_with['headers']['User-Agent'], - conn.USER_AGENT) + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'User-Agent': conn.USER_AGENT, + 'Content-Length': '5', + connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, + } + self.assertEqual(called_with['headers'], expected_headers) self.assertEqual(called_with['body'], REQPB) self.assertEqual(conn.build_kwargs, [{'method': METHOD, 'project': PROJECT}]) def test__request_w_200(self): + from google.cloud import _http as connection_module + from google.cloud.datastore._http import _CLIENT_INFO + PROJECT = 'PROJECT' METHOD = 'METHOD' DATA = b'DATA' @@ -77,12 +87,16 @@ def test__request_w_200(self): self.assertEqual(datastore_api._request(PROJECT, METHOD, DATA), 'CONTENT') called_with = http._called_with + self.assertEqual(len(called_with), 4) self.assertEqual(called_with['uri'], URI) self.assertEqual(called_with['method'], 'POST') - self.assertEqual(called_with['headers']['Content-Type'], - 'application/x-protobuf') - self.assertEqual(called_with['headers']['User-Agent'], - conn.USER_AGENT) + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'User-Agent': conn.USER_AGENT, + 'Content-Length': '4', + connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, + } + self.assertEqual(called_with['headers'], expected_headers) self.assertEqual(called_with['body'], DATA) self.assertEqual(conn.build_kwargs, [{'method': METHOD, 'project': PROJECT}]) @@ -386,12 +400,18 @@ def _make_one(self, client, use_grpc=False): return self._get_target_class()(client) def _verifyProtobufCall(self, called_with, URI, conn): + from google.cloud import _http as connection_module + from google.cloud.datastore._http import _CLIENT_INFO + self.assertEqual(called_with['uri'], URI) self.assertEqual(called_with['method'], 'POST') - self.assertEqual(called_with['headers']['Content-Type'], - 'application/x-protobuf') - self.assertEqual(called_with['headers']['User-Agent'], - conn.USER_AGENT) + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'User-Agent': conn.USER_AGENT, + 'Content-Length': str(len(called_with['body'])), + connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, + } + self.assertEqual(called_with['headers'], expected_headers) def test_default_url(self): klass = self._get_target_class() @@ -681,6 +701,9 @@ def test_lookup_multiple_keys_w_missing(self): def test_lookup_multiple_keys_w_deferred(self): from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud import _http as connection_module + from google.cloud.datastore._http import _CLIENT_INFO + PROJECT = 'PROJECT' key_pb1 = self._make_key_pb(PROJECT) key_pb2 = self._make_key_pb(PROJECT, id_=2345) @@ -704,9 +727,13 @@ def test_lookup_multiple_keys_w_deferred(self): self._verifyProtobufCall(cw, URI, conn) self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') - self.assertEqual(cw['headers']['Content-Type'], - 'application/x-protobuf') - self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT) + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'User-Agent': conn.USER_AGENT, + 'Content-Length': '48', + connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, + } + self.assertEqual(cw['headers'], expected_headers) rq_class = datastore_pb2.LookupRequest request = rq_class() request.ParseFromString(cw['body']) From 5965a54dff4d58357825ed90ebd51ee8b0a43ced Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 16 Feb 2017 19:17:40 -0800 Subject: [PATCH 075/611] Moving datastore GAX/gRPC helpers out of _http and into dedicated module. --- .../google/cloud/datastore/_gax.py | 197 +++++++++++ .../google/cloud/datastore/_http.py | 183 +---------- .../unit_tests/test__gax.py | 310 ++++++++++++++++++ .../unit_tests/test__http.py | 286 ---------------- 4 files changed, 510 insertions(+), 466 deletions(-) create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_gax.py create mode 100644 packages/google-cloud-datastore/unit_tests/test__gax.py diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py new file mode 100644 index 000000000000..cd2739bd10ce --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py @@ -0,0 +1,197 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for making API requests via GAX / gRPC.""" + + +import contextlib + +from grpc import StatusCode + +from google.cloud._helpers import make_insecure_stub +from google.cloud._helpers import make_secure_stub +from google.cloud import exceptions + +from google.cloud.grpc.datastore.v1 import datastore_pb2_grpc + + +_GRPC_ERROR_MAPPING = { + StatusCode.UNKNOWN: exceptions.InternalServerError, + StatusCode.INVALID_ARGUMENT: exceptions.BadRequest, + StatusCode.DEADLINE_EXCEEDED: exceptions.GatewayTimeout, + StatusCode.NOT_FOUND: exceptions.NotFound, + StatusCode.ALREADY_EXISTS: exceptions.Conflict, + StatusCode.PERMISSION_DENIED: exceptions.Forbidden, + StatusCode.UNAUTHENTICATED: exceptions.Unauthorized, + StatusCode.RESOURCE_EXHAUSTED: exceptions.TooManyRequests, + StatusCode.FAILED_PRECONDITION: exceptions.PreconditionFailed, + StatusCode.ABORTED: exceptions.Conflict, + StatusCode.OUT_OF_RANGE: exceptions.BadRequest, + StatusCode.UNIMPLEMENTED: exceptions.MethodNotImplemented, + StatusCode.INTERNAL: exceptions.InternalServerError, + StatusCode.UNAVAILABLE: exceptions.ServiceUnavailable, + StatusCode.DATA_LOSS: exceptions.InternalServerError, +} + + +@contextlib.contextmanager +def _grpc_catch_rendezvous(): + """Remap gRPC exceptions that happen in context. + + .. _code.proto: https://github.com/googleapis/googleapis/blob/\ + master/google/rpc/code.proto + + Remaps gRPC exceptions to the classes defined in + :mod:`~google.cloud.exceptions` (according to the description + in `code.proto`_). + """ + try: + yield + except exceptions.GrpcRendezvous as exc: + error_code = exc.code() + error_class = _GRPC_ERROR_MAPPING.get(error_code) + if error_class is None: + raise + else: + raise error_class(exc.details()) + + +class _DatastoreAPIOverGRPC(object): + """Helper mapping datastore API methods. + + Makes requests to send / receive protobuf content over gRPC. + + Methods make bare API requests without any helpers for constructing + the requests or parsing the responses. + + :type connection: :class:`Connection` + :param connection: A connection object that contains helpful + information for making requests. + + :type secure: bool + :param secure: Flag indicating if a secure stub connection is needed. + """ + + def __init__(self, connection, secure): + if secure: + self._stub = make_secure_stub(connection.credentials, + connection.USER_AGENT, + datastore_pb2_grpc.DatastoreStub, + connection.host) + else: + self._stub = make_insecure_stub(datastore_pb2_grpc.DatastoreStub, + connection.host) + + def lookup(self, project, request_pb): + """Perform a ``lookup`` request. + + :type project: str + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type request_pb: :class:`.datastore_pb2.LookupRequest` + :param request_pb: The request protobuf object. + + :rtype: :class:`.datastore_pb2.LookupResponse` + :returns: The returned protobuf response object. + """ + request_pb.project_id = project + with _grpc_catch_rendezvous(): + return self._stub.Lookup(request_pb) + + def run_query(self, project, request_pb): + """Perform a ``runQuery`` request. + + :type project: str + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type request_pb: :class:`.datastore_pb2.RunQueryRequest` + :param request_pb: The request protobuf object. + + :rtype: :class:`.datastore_pb2.RunQueryResponse` + :returns: The returned protobuf response object. + """ + request_pb.project_id = project + with _grpc_catch_rendezvous(): + return self._stub.RunQuery(request_pb) + + def begin_transaction(self, project, request_pb): + """Perform a ``beginTransaction`` request. + + :type project: str + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type request_pb: + :class:`.datastore_pb2.BeginTransactionRequest` + :param request_pb: The request protobuf object. + + :rtype: :class:`.datastore_pb2.BeginTransactionResponse` + :returns: The returned protobuf response object. + """ + request_pb.project_id = project + with _grpc_catch_rendezvous(): + return self._stub.BeginTransaction(request_pb) + + def commit(self, project, request_pb): + """Perform a ``commit`` request. + + :type project: str + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type request_pb: :class:`.datastore_pb2.CommitRequest` + :param request_pb: The request protobuf object. + + :rtype: :class:`.datastore_pb2.CommitResponse` + :returns: The returned protobuf response object. + """ + request_pb.project_id = project + with _grpc_catch_rendezvous(): + return self._stub.Commit(request_pb) + + def rollback(self, project, request_pb): + """Perform a ``rollback`` request. + + :type project: str + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type request_pb: :class:`.datastore_pb2.RollbackRequest` + :param request_pb: The request protobuf object. + + :rtype: :class:`.datastore_pb2.RollbackResponse` + :returns: The returned protobuf response object. + """ + request_pb.project_id = project + with _grpc_catch_rendezvous(): + return self._stub.Rollback(request_pb) + + def allocate_ids(self, project, request_pb): + """Perform an ``allocateIds`` request. + + :type project: str + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type request_pb: :class:`.datastore_pb2.AllocateIdsRequest` + :param request_pb: The request protobuf object. + + :rtype: :class:`.datastore_pb2.AllocateIdsResponse` + :returns: The returned protobuf response object. + """ + request_pb.project_id = project + with _grpc_catch_rendezvous(): + return self._stub.AllocateIds(request_pb) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 4c01f60c84d2..d1defedfe632 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -14,46 +14,21 @@ """Connections to Google Cloud Datastore API servers.""" -import contextlib import os from google.rpc import status_pb2 -from google.cloud._helpers import make_insecure_stub -from google.cloud._helpers import make_secure_stub from google.cloud import _http as connection_module from google.cloud.environment_vars import DISABLE_GRPC from google.cloud.environment_vars import GCD_HOST from google.cloud import exceptions from google.cloud.grpc.datastore.v1 import datastore_pb2 as _datastore_pb2 try: - from grpc import StatusCode - from google.cloud.grpc.datastore.v1 import datastore_pb2_grpc + from google.cloud.datastore._gax import _DatastoreAPIOverGRPC + _HAVE_GRPC = True except ImportError: # pragma: NO COVER - _GRPC_ERROR_MAPPING = {} + _DatastoreAPIOverGRPC = None _HAVE_GRPC = False - datastore_pb2_grpc = None - StatusCode = None -else: - # NOTE: We don't include OK -> 200 or CANCELLED -> 499 - _GRPC_ERROR_MAPPING = { - StatusCode.UNKNOWN: exceptions.InternalServerError, - StatusCode.INVALID_ARGUMENT: exceptions.BadRequest, - StatusCode.DEADLINE_EXCEEDED: exceptions.GatewayTimeout, - StatusCode.NOT_FOUND: exceptions.NotFound, - StatusCode.ALREADY_EXISTS: exceptions.Conflict, - StatusCode.PERMISSION_DENIED: exceptions.Forbidden, - StatusCode.UNAUTHENTICATED: exceptions.Unauthorized, - StatusCode.RESOURCE_EXHAUSTED: exceptions.TooManyRequests, - StatusCode.FAILED_PRECONDITION: exceptions.PreconditionFailed, - StatusCode.ABORTED: exceptions.Conflict, - StatusCode.OUT_OF_RANGE: exceptions.BadRequest, - StatusCode.UNIMPLEMENTED: exceptions.MethodNotImplemented, - StatusCode.INTERNAL: exceptions.InternalServerError, - StatusCode.UNAVAILABLE: exceptions.ServiceUnavailable, - StatusCode.DATA_LOSS: exceptions.InternalServerError, - } - _HAVE_GRPC = True DATASTORE_API_HOST = 'datastore.googleapis.com' @@ -238,158 +213,6 @@ def allocate_ids(self, project, request_pb): _datastore_pb2.AllocateIdsResponse) -@contextlib.contextmanager -def _grpc_catch_rendezvous(): - """Re-map gRPC exceptions that happen in context. - - .. _code.proto: https://github.com/googleapis/googleapis/blob/\ - master/google/rpc/code.proto - - Remaps gRPC exceptions to the classes defined in - :mod:`~google.cloud.exceptions` (according to the description - in `code.proto`_). - """ - try: - yield - except exceptions.GrpcRendezvous as exc: - error_code = exc.code() - error_class = _GRPC_ERROR_MAPPING.get(error_code) - if error_class is None: - raise - else: - raise error_class(exc.details()) - - -class _DatastoreAPIOverGRPC(object): - """Helper mapping datastore API methods. - - Makes requests to send / receive protobuf content over gRPC. - - Methods make bare API requests without any helpers for constructing - the requests or parsing the responses. - - :type connection: :class:`Connection` - :param connection: A connection object that contains helpful - information for making requests. - - :type secure: bool - :param secure: Flag indicating if a secure stub connection is needed. - """ - - def __init__(self, connection, secure): - if secure: - self._stub = make_secure_stub(connection.credentials, - connection.USER_AGENT, - datastore_pb2_grpc.DatastoreStub, - connection.host) - else: - self._stub = make_insecure_stub(datastore_pb2_grpc.DatastoreStub, - connection.host) - - def lookup(self, project, request_pb): - """Perform a ``lookup`` request. - - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. - - :type request_pb: :class:`.datastore_pb2.LookupRequest` - :param request_pb: The request protobuf object. - - :rtype: :class:`.datastore_pb2.LookupResponse` - :returns: The returned protobuf response object. - """ - request_pb.project_id = project - with _grpc_catch_rendezvous(): - return self._stub.Lookup(request_pb) - - def run_query(self, project, request_pb): - """Perform a ``runQuery`` request. - - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. - - :type request_pb: :class:`.datastore_pb2.RunQueryRequest` - :param request_pb: The request protobuf object. - - :rtype: :class:`.datastore_pb2.RunQueryResponse` - :returns: The returned protobuf response object. - """ - request_pb.project_id = project - with _grpc_catch_rendezvous(): - return self._stub.RunQuery(request_pb) - - def begin_transaction(self, project, request_pb): - """Perform a ``beginTransaction`` request. - - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. - - :type request_pb: - :class:`.datastore_pb2.BeginTransactionRequest` - :param request_pb: The request protobuf object. - - :rtype: :class:`.datastore_pb2.BeginTransactionResponse` - :returns: The returned protobuf response object. - """ - request_pb.project_id = project - with _grpc_catch_rendezvous(): - return self._stub.BeginTransaction(request_pb) - - def commit(self, project, request_pb): - """Perform a ``commit`` request. - - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. - - :type request_pb: :class:`.datastore_pb2.CommitRequest` - :param request_pb: The request protobuf object. - - :rtype: :class:`.datastore_pb2.CommitResponse` - :returns: The returned protobuf response object. - """ - request_pb.project_id = project - with _grpc_catch_rendezvous(): - return self._stub.Commit(request_pb) - - def rollback(self, project, request_pb): - """Perform a ``rollback`` request. - - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. - - :type request_pb: :class:`.datastore_pb2.RollbackRequest` - :param request_pb: The request protobuf object. - - :rtype: :class:`.datastore_pb2.RollbackResponse` - :returns: The returned protobuf response object. - """ - request_pb.project_id = project - with _grpc_catch_rendezvous(): - return self._stub.Rollback(request_pb) - - def allocate_ids(self, project, request_pb): - """Perform an ``allocateIds`` request. - - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. - - :type request_pb: :class:`.datastore_pb2.AllocateIdsRequest` - :param request_pb: The request protobuf object. - - :rtype: :class:`.datastore_pb2.AllocateIdsResponse` - :returns: The returned protobuf response object. - """ - request_pb.project_id = project - with _grpc_catch_rendezvous(): - return self._stub.AllocateIds(request_pb) - - class Connection(connection_module.Connection): """A connection to the Google Cloud Datastore via the Protobuf API. diff --git a/packages/google-cloud-datastore/unit_tests/test__gax.py b/packages/google-cloud-datastore/unit_tests/test__gax.py new file mode 100644 index 000000000000..4f49483207d7 --- /dev/null +++ b/packages/google-cloud-datastore/unit_tests/test__gax.py @@ -0,0 +1,310 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import mock + +from google.cloud.datastore._http import _HAVE_GRPC + + +USER_AGENT = 'you-sir-age-int' + + +@unittest.skipUnless(_HAVE_GRPC, 'No gRPC') +class Test__grpc_catch_rendezvous(unittest.TestCase): + + def _call_fut(self): + from google.cloud.datastore._gax import _grpc_catch_rendezvous + + return _grpc_catch_rendezvous() + + @staticmethod + def _fake_method(exc, result=None): + if exc is None: + return result + else: + raise exc + + def test_success(self): + expected = object() + with self._call_fut(): + result = self._fake_method(None, expected) + self.assertIs(result, expected) + + def test_failure_aborted(self): + from grpc import StatusCode + from grpc._channel import _RPCState + from google.cloud.exceptions import Conflict + from google.cloud.exceptions import GrpcRendezvous + + details = 'Bad things.' + exc_state = _RPCState((), None, None, StatusCode.ABORTED, details) + exc = GrpcRendezvous(exc_state, None, None, None) + with self.assertRaises(Conflict): + with self._call_fut(): + self._fake_method(exc) + + def test_failure_invalid_argument(self): + from grpc import StatusCode + from grpc._channel import _RPCState + from google.cloud.exceptions import BadRequest + from google.cloud.exceptions import GrpcRendezvous + + details = ('Cannot have inequality filters on multiple ' + 'properties: [created, priority]') + exc_state = _RPCState((), None, None, + StatusCode.INVALID_ARGUMENT, details) + exc = GrpcRendezvous(exc_state, None, None, None) + with self.assertRaises(BadRequest): + with self._call_fut(): + self._fake_method(exc) + + def test_failure_cancelled(self): + from grpc import StatusCode + from grpc._channel import _RPCState + from google.cloud.exceptions import GrpcRendezvous + + exc_state = _RPCState((), None, None, StatusCode.CANCELLED, None) + exc = GrpcRendezvous(exc_state, None, None, None) + with self.assertRaises(GrpcRendezvous): + with self._call_fut(): + self._fake_method(exc) + + def test_commit_failure_non_grpc_err(self): + exc = RuntimeError('Not a gRPC error') + with self.assertRaises(RuntimeError): + with self._call_fut(): + self._fake_method(exc) + + +class Test_DatastoreAPIOverGRPC(unittest.TestCase): + + @staticmethod + def _get_target_class(): + from google.cloud.datastore._gax import _DatastoreAPIOverGRPC + + return _DatastoreAPIOverGRPC + + def _make_one(self, stub, connection=None, secure=True, mock_args=None): + if connection is None: + connection = mock.Mock( + credentials=object(), + host='CURR_HOST', + USER_AGENT=USER_AGENT, + spec=['credentials', 'host', 'USER_AGENT'], + ) + + if mock_args is None: + mock_args = [] + + def mock_make_stub(*args): + mock_args.append(args) + return stub + + if secure: + patch = mock.patch( + 'google.cloud.datastore._gax.make_secure_stub', + new=mock_make_stub) + else: + patch = mock.patch( + 'google.cloud.datastore._gax.make_insecure_stub', + new=mock_make_stub) + + with patch: + return self._get_target_class()(connection, secure) + + def test_constructor(self): + from google.cloud.grpc.datastore.v1 import datastore_pb2_grpc + + conn = mock.Mock( + credentials=object(), + host='CURR_HOST', + USER_AGENT=USER_AGENT, + spec=['credentials', 'host', 'USER_AGENT'], + ) + + stub = _GRPCStub() + mock_args = [] + datastore_api = self._make_one(stub, connection=conn, + mock_args=mock_args) + self.assertIs(datastore_api._stub, stub) + + self.assertEqual(mock_args, [( + conn.credentials, + conn.USER_AGENT, + datastore_pb2_grpc.DatastoreStub, + conn.host, + )]) + + def test_constructor_insecure(self): + from google.cloud.grpc.datastore.v1 import datastore_pb2_grpc + + conn = mock.Mock( + credentials=object(), + host='CURR_HOST:1234', + spec=['credentials', 'host'], + ) + + stub = _GRPCStub() + mock_args = [] + datastore_api = self._make_one(stub, connection=conn, + secure=False, + mock_args=mock_args) + self.assertIs(datastore_api._stub, stub) + + self.assertEqual(mock_args, [( + datastore_pb2_grpc.DatastoreStub, + conn.host, + )]) + + def test_lookup(self): + return_val = object() + stub = _GRPCStub(return_val) + datastore_api = self._make_one(stub=stub) + + request_pb = mock.Mock(project_id=None, spec=['project_id']) + project = 'PROJECT' + result = datastore_api.lookup(project, request_pb) + self.assertIs(result, return_val) + self.assertEqual(request_pb.project_id, project) + self.assertEqual(stub.method_calls, + [(request_pb, 'Lookup')]) + + def test_run_query(self): + return_val = object() + stub = _GRPCStub(return_val) + datastore_api = self._make_one(stub=stub) + + request_pb = mock.Mock(project_id=None, spec=['project_id']) + project = 'PROJECT' + result = datastore_api.run_query(project, request_pb) + self.assertIs(result, return_val) + self.assertEqual(request_pb.project_id, project) + self.assertEqual(stub.method_calls, + [(request_pb, 'RunQuery')]) + + def _run_query_failure_helper(self, exc, err_class): + stub = _GRPCStub(side_effect=exc) + datastore_api = self._make_one(stub=stub) + + request_pb = mock.Mock(project_id=None, spec=['project_id']) + project = 'PROJECT' + with self.assertRaises(err_class): + datastore_api.run_query(project, request_pb) + + self.assertEqual(request_pb.project_id, project) + self.assertEqual(stub.method_calls, + [(request_pb, 'RunQuery')]) + + @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') + def test_run_query_invalid_argument(self): + from grpc import StatusCode + from grpc._channel import _RPCState + from google.cloud.exceptions import BadRequest + from google.cloud.exceptions import GrpcRendezvous + + details = ('Cannot have inequality filters on multiple ' + 'properties: [created, priority]') + exc_state = _RPCState((), None, None, + StatusCode.INVALID_ARGUMENT, details) + exc = GrpcRendezvous(exc_state, None, None, None) + self._run_query_failure_helper(exc, BadRequest) + + def test_begin_transaction(self): + return_val = object() + stub = _GRPCStub(return_val) + datastore_api = self._make_one(stub=stub) + + request_pb = mock.Mock(project_id=None, spec=['project_id']) + project = 'PROJECT' + result = datastore_api.begin_transaction(project, request_pb) + self.assertIs(result, return_val) + self.assertEqual(request_pb.project_id, project) + self.assertEqual( + stub.method_calls, + [(request_pb, 'BeginTransaction')]) + + def test_commit_success(self): + return_val = object() + stub = _GRPCStub(return_val) + datastore_api = self._make_one(stub=stub) + + request_pb = mock.Mock(project_id=None, spec=['project_id']) + project = 'PROJECT' + result = datastore_api.commit(project, request_pb) + self.assertIs(result, return_val) + self.assertEqual(request_pb.project_id, project) + self.assertEqual(stub.method_calls, + [(request_pb, 'Commit')]) + + def test_rollback(self): + return_val = object() + stub = _GRPCStub(return_val) + datastore_api = self._make_one(stub=stub) + + request_pb = mock.Mock(project_id=None, spec=['project_id']) + project = 'PROJECT' + result = datastore_api.rollback(project, request_pb) + self.assertIs(result, return_val) + self.assertEqual(request_pb.project_id, project) + self.assertEqual(stub.method_calls, + [(request_pb, 'Rollback')]) + + def test_allocate_ids(self): + return_val = object() + stub = _GRPCStub(return_val) + datastore_api = self._make_one(stub=stub) + + request_pb = mock.Mock(project_id=None, spec=['project_id']) + project = 'PROJECT' + result = datastore_api.allocate_ids(project, request_pb) + self.assertIs(result, return_val) + self.assertEqual(request_pb.project_id, project) + self.assertEqual( + stub.method_calls, + [(request_pb, 'AllocateIds')]) + + +class _GRPCStub(object): + + def __init__(self, return_val=None, side_effect=Exception): + self.return_val = return_val + self.side_effect = side_effect + self.method_calls = [] + + def _method(self, request_pb, name): + self.method_calls.append((request_pb, name)) + if self.side_effect is Exception: + return self.return_val + else: + raise self.side_effect + + def Lookup(self, request_pb): + return self._method(request_pb, 'Lookup') + + def RunQuery(self, request_pb): + return self._method(request_pb, 'RunQuery') + + def BeginTransaction(self, request_pb): + return self._method(request_pb, 'BeginTransaction') + + def Commit(self, request_pb): + return self._method(request_pb, 'Commit') + + def Rollback(self, request_pb): + return self._method(request_pb, 'Rollback') + + def AllocateIds(self, request_pb): + return self._method(request_pb, 'AllocateIds') diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 7e47e7c2f65a..bd2c7ba98644 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -16,8 +16,6 @@ import mock -from google.cloud.datastore._http import _HAVE_GRPC - class Test_DatastoreAPIOverHttp(unittest.TestCase): @@ -110,253 +108,6 @@ def test__request_not_200(self): [{'method': METHOD, 'project': PROJECT}]) -@unittest.skipUnless(_HAVE_GRPC, 'No gRPC') -class Test__grpc_catch_rendezvous(unittest.TestCase): - - def _call_fut(self): - from google.cloud.datastore._http import _grpc_catch_rendezvous - - return _grpc_catch_rendezvous() - - @staticmethod - def _fake_method(exc, result=None): - if exc is None: - return result - else: - raise exc - - def test_success(self): - expected = object() - with self._call_fut(): - result = self._fake_method(None, expected) - self.assertIs(result, expected) - - def test_failure_aborted(self): - from grpc import StatusCode - from grpc._channel import _RPCState - from google.cloud.exceptions import Conflict - from google.cloud.exceptions import GrpcRendezvous - - details = 'Bad things.' - exc_state = _RPCState((), None, None, StatusCode.ABORTED, details) - exc = GrpcRendezvous(exc_state, None, None, None) - with self.assertRaises(Conflict): - with self._call_fut(): - self._fake_method(exc) - - def test_failure_invalid_argument(self): - from grpc import StatusCode - from grpc._channel import _RPCState - from google.cloud.exceptions import BadRequest - from google.cloud.exceptions import GrpcRendezvous - - details = ('Cannot have inequality filters on multiple ' - 'properties: [created, priority]') - exc_state = _RPCState((), None, None, - StatusCode.INVALID_ARGUMENT, details) - exc = GrpcRendezvous(exc_state, None, None, None) - with self.assertRaises(BadRequest): - with self._call_fut(): - self._fake_method(exc) - - def test_failure_cancelled(self): - from grpc import StatusCode - from grpc._channel import _RPCState - from google.cloud.exceptions import GrpcRendezvous - - exc_state = _RPCState((), None, None, StatusCode.CANCELLED, None) - exc = GrpcRendezvous(exc_state, None, None, None) - with self.assertRaises(GrpcRendezvous): - with self._call_fut(): - self._fake_method(exc) - - def test_commit_failure_non_grpc_err(self): - exc = RuntimeError('Not a gRPC error') - with self.assertRaises(RuntimeError): - with self._call_fut(): - self._fake_method(exc) - - -class Test_DatastoreAPIOverGRPC(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.datastore._http import _DatastoreAPIOverGRPC - - return _DatastoreAPIOverGRPC - - def _make_one(self, stub, connection=None, secure=True, mock_args=None): - if connection is None: - connection = _Connection(None) - connection.credentials = object() - connection.host = 'CURR_HOST' - - if mock_args is None: - mock_args = [] - - def mock_make_stub(*args): - mock_args.append(args) - return stub - - if secure: - patch = mock.patch( - 'google.cloud.datastore._http.make_secure_stub', - new=mock_make_stub) - else: - patch = mock.patch( - 'google.cloud.datastore._http.make_insecure_stub', - new=mock_make_stub) - - with patch: - return self._get_target_class()(connection, secure) - - def test_constructor(self): - from google.cloud.datastore import _http as MUT - - conn = _Connection(None) - conn.credentials = object() - conn.host = 'CURR_HOST' - - stub = _GRPCStub() - mock_args = [] - datastore_api = self._make_one(stub, connection=conn, - mock_args=mock_args) - self.assertIs(datastore_api._stub, stub) - - self.assertEqual(mock_args, [( - conn.credentials, - conn.USER_AGENT, - MUT.datastore_pb2_grpc.DatastoreStub, - conn.host, - )]) - - def test_constructor_insecure(self): - from google.cloud.datastore import _http as MUT - - conn = _Connection(None) - conn.credentials = object() - conn.host = 'CURR_HOST:1234' - - stub = _GRPCStub() - mock_args = [] - datastore_api = self._make_one(stub, connection=conn, - secure=False, - mock_args=mock_args) - self.assertIs(datastore_api._stub, stub) - - self.assertEqual(mock_args, [( - MUT.datastore_pb2_grpc.DatastoreStub, - conn.host, - )]) - - def test_lookup(self): - return_val = object() - stub = _GRPCStub(return_val) - datastore_api = self._make_one(stub=stub) - - request_pb = _RequestPB() - project = 'PROJECT' - result = datastore_api.lookup(project, request_pb) - self.assertIs(result, return_val) - self.assertEqual(request_pb.project_id, project) - self.assertEqual(stub.method_calls, - [(request_pb, 'Lookup')]) - - def test_run_query(self): - return_val = object() - stub = _GRPCStub(return_val) - datastore_api = self._make_one(stub=stub) - - request_pb = _RequestPB() - project = 'PROJECT' - result = datastore_api.run_query(project, request_pb) - self.assertIs(result, return_val) - self.assertEqual(request_pb.project_id, project) - self.assertEqual(stub.method_calls, - [(request_pb, 'RunQuery')]) - - def _run_query_failure_helper(self, exc, err_class): - stub = _GRPCStub(side_effect=exc) - datastore_api = self._make_one(stub=stub) - - request_pb = _RequestPB() - project = 'PROJECT' - with self.assertRaises(err_class): - datastore_api.run_query(project, request_pb) - - self.assertEqual(request_pb.project_id, project) - self.assertEqual(stub.method_calls, - [(request_pb, 'RunQuery')]) - - @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') - def test_run_query_invalid_argument(self): - from grpc import StatusCode - from grpc._channel import _RPCState - from google.cloud.exceptions import BadRequest - from google.cloud.exceptions import GrpcRendezvous - - details = ('Cannot have inequality filters on multiple ' - 'properties: [created, priority]') - exc_state = _RPCState((), None, None, - StatusCode.INVALID_ARGUMENT, details) - exc = GrpcRendezvous(exc_state, None, None, None) - self._run_query_failure_helper(exc, BadRequest) - - def test_begin_transaction(self): - return_val = object() - stub = _GRPCStub(return_val) - datastore_api = self._make_one(stub=stub) - - request_pb = _RequestPB() - project = 'PROJECT' - result = datastore_api.begin_transaction(project, request_pb) - self.assertIs(result, return_val) - self.assertEqual(request_pb.project_id, project) - self.assertEqual( - stub.method_calls, - [(request_pb, 'BeginTransaction')]) - - def test_commit_success(self): - return_val = object() - stub = _GRPCStub(return_val) - datastore_api = self._make_one(stub=stub) - - request_pb = _RequestPB() - project = 'PROJECT' - result = datastore_api.commit(project, request_pb) - self.assertIs(result, return_val) - self.assertEqual(request_pb.project_id, project) - self.assertEqual(stub.method_calls, - [(request_pb, 'Commit')]) - - def test_rollback(self): - return_val = object() - stub = _GRPCStub(return_val) - datastore_api = self._make_one(stub=stub) - - request_pb = _RequestPB() - project = 'PROJECT' - result = datastore_api.rollback(project, request_pb) - self.assertIs(result, return_val) - self.assertEqual(request_pb.project_id, project) - self.assertEqual(stub.method_calls, - [(request_pb, 'Rollback')]) - - def test_allocate_ids(self): - return_val = object() - stub = _GRPCStub(return_val) - datastore_api = self._make_one(stub=stub) - - request_pb = _RequestPB() - project = 'PROJECT' - result = datastore_api.allocate_ids(project, request_pb) - self.assertIs(result, return_val) - self.assertEqual(request_pb.project_id, project) - self.assertEqual( - stub.method_calls, - [(request_pb, 'AllocateIds')]) - - class TestConnection(unittest.TestCase): @staticmethod @@ -1147,40 +898,3 @@ def __init__(self, api_url): def build_api_url(self, **kwargs): self.build_kwargs.append(kwargs) return self.api_url - - -class _GRPCStub(object): - - def __init__(self, return_val=None, side_effect=Exception): - self.return_val = return_val - self.side_effect = side_effect - self.method_calls = [] - - def _method(self, request_pb, name): - self.method_calls.append((request_pb, name)) - if self.side_effect is Exception: - return self.return_val - else: - raise self.side_effect - - def Lookup(self, request_pb): - return self._method(request_pb, 'Lookup') - - def RunQuery(self, request_pb): - return self._method(request_pb, 'RunQuery') - - def BeginTransaction(self, request_pb): - return self._method(request_pb, 'BeginTransaction') - - def Commit(self, request_pb): - return self._method(request_pb, 'Commit') - - def Rollback(self, request_pb): - return self._method(request_pb, 'Rollback') - - def AllocateIds(self, request_pb): - return self._method(request_pb, 'AllocateIds') - - -class _RequestPB(object): - project_id = None From 386b6df42a5151dcc3922272d4bb10b6bcdde11d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 22 Feb 2017 07:40:10 -0800 Subject: [PATCH 076/611] Adding GCCL header for HTTP APIs. (#3046) --- .../google/cloud/datastore/__init__.py | 3 +++ .../google-cloud-datastore/google/cloud/datastore/_http.py | 7 +++---- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py index 89e0eaaa31ea..e09b2101e9ca 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py @@ -54,6 +54,9 @@ """ +from pkg_resources import get_distribution +__version__ = get_distribution('google-cloud-datastore').version + from google.cloud.datastore.batch import Batch from google.cloud.datastore.client import Client from google.cloud.datastore.entity import Entity diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index edef8a9f30a4..a4c0a3f8c906 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -15,7 +15,6 @@ """Connections to Google Cloud Datastore API servers.""" import os -from pkg_resources import get_distribution from google.rpc import status_pb2 @@ -24,6 +23,8 @@ from google.cloud.environment_vars import GCD_HOST from google.cloud import exceptions from google.cloud.grpc.datastore.v1 import datastore_pb2 as _datastore_pb2 + +from google.cloud.datastore import __version__ try: from google.cloud.datastore._gax import _DatastoreAPIOverGRPC _HAVE_GRPC = True @@ -37,9 +38,7 @@ _DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) _USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC -_DATASTORE_DIST = get_distribution('google-cloud-datastore') -_CLIENT_INFO = connection_module.CLIENT_INFO_TEMPLATE.format( - _DATASTORE_DIST.version) +_CLIENT_INFO = connection_module.CLIENT_INFO_TEMPLATE.format(__version__) class _DatastoreAPIOverHttp(object): From 449c52c5c9eb25e1d03189859da63664086295a4 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 23 Feb 2017 16:29:23 -0800 Subject: [PATCH 077/611] Making datastore Connection.run_query() return low-level protobuf. Towards #2746 (as is #3064). This approach is to slowly transition from our current approach to use the GAPIC generated surface. These unit tests weren't so bad to update. I did "editorialize" as I went, downgrading constants to just variables, renamining for PEP8, etc. --- .../google/cloud/datastore/_http.py | 14 +- .../google/cloud/datastore/query.py | 34 ++-- .../unit_tests/test__http.py | 166 ++++++++++-------- .../unit_tests/test_query.py | 142 +++++++++------ 4 files changed, 192 insertions(+), 164 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index a4c0a3f8c906..ac9059ff0340 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -379,10 +379,8 @@ def run_query(self, project, query_pb, namespace=None, the given transaction. Incompatible with ``eventual==True``. - :rtype: tuple - :returns: Four-tuple containing the entities returned, - the end cursor of the query, a ``more_results`` - enum and a count of the number of skipped results. + :rtype: :class:`.datastore_pb2.RunQueryResponse` + :returns: The protobuf response from a ``runQuery`` request. """ request = _datastore_pb2.RunQueryRequest() _set_read_options(request, eventual, transaction_id) @@ -391,13 +389,7 @@ def run_query(self, project, query_pb, namespace=None, request.partition_id.namespace_id = namespace request.query.CopyFrom(query_pb) - response = self._datastore_api.run_query(project, request) - return ( - [e.entity for e in response.batch.entity_results], - response.batch.end_cursor, # Assume response always has cursor. - response.batch.more_results, - response.batch.skipped_results, - ) + return self._datastore_api.run_query(project, request) def begin_transaction(self, project): """Begin a transaction. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index e8989a41a9dd..9ee565f2f2f2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -441,43 +441,33 @@ def _build_protobuf(self): return pb - def _process_query_results(self, entity_pbs, cursor_as_bytes, - more_results_enum, skipped_results): + def _process_query_results(self, response_pb): """Process the response from a datastore query. - :type entity_pbs: iterable - :param entity_pbs: The entities returned in the current page. - - :type cursor_as_bytes: bytes - :param cursor_as_bytes: The end cursor of the query. - - :type more_results_enum: - :class:`.query_pb2.QueryResultBatch.MoreResultsType` - :param more_results_enum: Enum indicating if there are more results. - - :type skipped_results: int - :param skipped_results: The number of skipped results. + :type response_pb: :class:`.datastore_pb2.RunQueryResponse` + :param response_pb: The protobuf response from a ``runQuery`` request. :rtype: iterable :returns: The next page of entity results. :raises ValueError: If ``more_results`` is an unexpected value. """ - self._skipped_results = skipped_results + self._skipped_results = response_pb.batch.skipped_results - if cursor_as_bytes == b'': # Empty-value for bytes. + if response_pb.batch.end_cursor == b'': # Empty-value for bytes. self.next_page_token = None else: - self.next_page_token = base64.urlsafe_b64encode(cursor_as_bytes) + self.next_page_token = base64.urlsafe_b64encode( + response_pb.batch.end_cursor) self._end_cursor = None - if more_results_enum == _NOT_FINISHED: + if response_pb.batch.more_results == _NOT_FINISHED: self._more_results = True - elif more_results_enum in _FINISHED: + elif response_pb.batch.more_results in _FINISHED: self._more_results = False else: raise ValueError('Unexpected value returned for `more_results`.') - return entity_pbs + return [result.entity for result in response_pb.batch.entity_results] def _next_page(self): """Get the next page in the iterator. @@ -492,13 +482,13 @@ def _next_page(self): pb = self._build_protobuf() transaction = self.client.current_transaction - query_results = self.client._connection.run_query( + response_pb = self.client._connection.run_query( query_pb=pb, project=self._query.project, namespace=self._query.namespace, transaction_id=transaction and transaction.id, ) - entity_pbs = self._process_query_results(*query_results) + entity_pbs = self._process_query_results(response_pb) return Page(self, entity_pbs, self._item_to_value) diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index ec4867f7c893..04087c7122c6 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -150,7 +150,7 @@ def _make_one(self, client, use_grpc=False): new=use_grpc): return self._get_target_class()(client) - def _verifyProtobufCall(self, called_with, URI, conn): + def _verify_protobuf_call(self, called_with, URI, conn): from google.cloud import _http as connection_module from google.cloud.datastore._http import _CLIENT_INFO @@ -272,7 +272,7 @@ def test_lookup_single_key_empty_response(self): self.assertEqual(len(missing), 0) self.assertEqual(len(deferred), 0) cw = http._called_with - self._verifyProtobufCall(cw, URI, conn) + self._verify_protobuf_call(cw, URI, conn) rq_class = datastore_pb2.LookupRequest request = rq_class() request.ParseFromString(cw['body']) @@ -301,7 +301,7 @@ def test_lookup_single_key_empty_response_w_eventual(self): self.assertEqual(len(missing), 0) self.assertEqual(len(deferred), 0) cw = http._called_with - self._verifyProtobufCall(cw, URI, conn) + self._verify_protobuf_call(cw, URI, conn) rq_class = datastore_pb2.LookupRequest request = rq_class() request.ParseFromString(cw['body']) @@ -342,7 +342,7 @@ def test_lookup_single_key_empty_response_w_transaction(self): self.assertEqual(len(missing), 0) self.assertEqual(len(deferred), 0) cw = http._called_with - self._verifyProtobufCall(cw, URI, conn) + self._verify_protobuf_call(cw, URI, conn) rq_class = datastore_pb2.LookupRequest request = rq_class() request.ParseFromString(cw['body']) @@ -376,7 +376,7 @@ def test_lookup_single_key_nonempty_response(self): self.assertEqual(found.key.path[0].kind, 'Kind') self.assertEqual(found.key.path[0].id, 1234) cw = http._called_with - self._verifyProtobufCall(cw, URI, conn) + self._verify_protobuf_call(cw, URI, conn) rq_class = datastore_pb2.LookupRequest request = rq_class() request.ParseFromString(cw['body']) @@ -405,7 +405,7 @@ def test_lookup_multiple_keys_empty_response(self): self.assertEqual(len(missing), 0) self.assertEqual(len(deferred), 0) cw = http._called_with - self._verifyProtobufCall(cw, URI, conn) + self._verify_protobuf_call(cw, URI, conn) rq_class = datastore_pb2.LookupRequest request = rq_class() request.ParseFromString(cw['body']) @@ -440,7 +440,7 @@ def test_lookup_multiple_keys_w_missing(self): self.assertEqual([missed.key for missed in missing], [key_pb1, key_pb2]) cw = http._called_with - self._verifyProtobufCall(cw, URI, conn) + self._verify_protobuf_call(cw, URI, conn) rq_class = datastore_pb2.LookupRequest request = rq_class() request.ParseFromString(cw['body']) @@ -475,7 +475,7 @@ def test_lookup_multiple_keys_w_deferred(self): self.assertEqual(len(missing), 0) self.assertEqual([def_key for def_key in deferred], [key_pb1, key_pb2]) cw = http._called_with - self._verifyProtobufCall(cw, URI, conn) + self._verify_protobuf_call(cw, URI, conn) self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') expected_headers = { @@ -497,34 +497,35 @@ def test_run_query_w_eventual_no_transaction(self): from google.cloud.grpc.datastore.v1 import datastore_pb2 from google.cloud.grpc.datastore.v1 import query_pb2 - PROJECT = 'PROJECT' - KIND = 'Nonesuch' - CURSOR = b'\x00' - q_pb = self._make_query_pb(KIND) + project = 'PROJECT' + kind = 'Nonesuch' + cursor = b'\x00' + q_pb = self._make_query_pb(kind) rsp_pb = datastore_pb2.RunQueryResponse() - rsp_pb.batch.end_cursor = CURSOR + rsp_pb.batch.end_cursor = cursor no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL + + # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) + + # Make request. conn = self._make_one(client) - URI = '/'.join([ + response = conn.run_query(project, q_pb, eventual=True) + + # Check the result and verify the callers. + self.assertEqual(response, rsp_pb) + uri = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', - PROJECT + ':runQuery', + project + ':runQuery', ]) - pbs, end, more, skipped = conn.run_query(PROJECT, q_pb, - eventual=True) - self.assertEqual(pbs, []) - self.assertEqual(end, CURSOR) - self.assertTrue(more) - self.assertEqual(skipped, 0) cw = http._called_with - self._verifyProtobufCall(cw, URI, conn) - rq_class = datastore_pb2.RunQueryRequest - request = rq_class() + self._verify_protobuf_call(cw, uri, conn) + request = datastore_pb2.RunQueryRequest() request.ParseFromString(cw['body']) self.assertEqual(request.partition_id.namespace_id, '') self.assertEqual(request.query, q_pb) @@ -536,42 +537,44 @@ def test_run_query_wo_eventual_w_transaction(self): from google.cloud.grpc.datastore.v1 import datastore_pb2 from google.cloud.grpc.datastore.v1 import query_pb2 - PROJECT = 'PROJECT' - KIND = 'Nonesuch' - CURSOR = b'\x00' - TRANSACTION = b'TRANSACTION' - q_pb = self._make_query_pb(KIND) + project = 'PROJECT' + kind = 'Nonesuch' + cursor = b'\x00' + transaction = b'TRANSACTION' + q_pb = self._make_query_pb(kind) rsp_pb = datastore_pb2.RunQueryResponse() - rsp_pb.batch.end_cursor = CURSOR + rsp_pb.batch.end_cursor = cursor no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL + + # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) + + # Make request. conn = self._make_one(client) - URI = '/'.join([ + response = conn.run_query( + project, q_pb, transaction_id=transaction) + + # Check the result and verify the callers. + self.assertEqual(response, rsp_pb) + uri = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', - PROJECT + ':runQuery', + project + ':runQuery', ]) - pbs, end, more, skipped = conn.run_query( - PROJECT, q_pb, transaction_id=TRANSACTION) - self.assertEqual(pbs, []) - self.assertEqual(end, CURSOR) - self.assertTrue(more) - self.assertEqual(skipped, 0) cw = http._called_with - self._verifyProtobufCall(cw, URI, conn) - rq_class = datastore_pb2.RunQueryRequest - request = rq_class() + self._verify_protobuf_call(cw, uri, conn) + request = datastore_pb2.RunQueryRequest() request.ParseFromString(cw['body']) self.assertEqual(request.partition_id.namespace_id, '') self.assertEqual(request.query, q_pb) self.assertEqual( request.read_options.read_consistency, datastore_pb2.ReadOptions.READ_CONSISTENCY_UNSPECIFIED) - self.assertEqual(request.read_options.transaction, TRANSACTION) + self.assertEqual(request.read_options.transaction, transaction) def test_run_query_w_eventual_and_transaction(self): from google.cloud.grpc.datastore.v1 import datastore_pb2 @@ -595,33 +598,35 @@ def test_run_query_wo_namespace_empty_result(self): from google.cloud.grpc.datastore.v1 import datastore_pb2 from google.cloud.grpc.datastore.v1 import query_pb2 - PROJECT = 'PROJECT' - KIND = 'Nonesuch' - CURSOR = b'\x00' - q_pb = self._make_query_pb(KIND) + project = 'PROJECT' + kind = 'Nonesuch' + cursor = b'\x00' + q_pb = self._make_query_pb(kind) rsp_pb = datastore_pb2.RunQueryResponse() - rsp_pb.batch.end_cursor = CURSOR + rsp_pb.batch.end_cursor = cursor no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL + + # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) + + # Make request. conn = self._make_one(client) - URI = '/'.join([ + response = conn.run_query(project, q_pb) + + # Check the result and verify the callers. + self.assertEqual(response, rsp_pb) + uri = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', - PROJECT + ':runQuery', + project + ':runQuery', ]) - pbs, end, more, skipped = conn.run_query(PROJECT, q_pb) - self.assertEqual(pbs, []) - self.assertEqual(end, CURSOR) - self.assertTrue(more) - self.assertEqual(skipped, 0) cw = http._called_with - self._verifyProtobufCall(cw, URI, conn) - rq_class = datastore_pb2.RunQueryRequest - request = rq_class() + self._verify_protobuf_call(cw, uri, conn) + request = datastore_pb2.RunQueryRequest() request.ParseFromString(cw['body']) self.assertEqual(request.partition_id.namespace_id, '') self.assertEqual(request.query, q_pb) @@ -629,32 +634,39 @@ def test_run_query_wo_namespace_empty_result(self): def test_run_query_w_namespace_nonempty_result(self): from google.cloud.grpc.datastore.v1 import datastore_pb2 from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 - PROJECT = 'PROJECT' - KIND = 'Kind' + project = 'PROJECT' + kind = 'Kind' entity = entity_pb2.Entity() - q_pb = self._make_query_pb(KIND) + q_pb = self._make_query_pb(kind) rsp_pb = datastore_pb2.RunQueryResponse() rsp_pb.batch.entity_results.add(entity=entity) - rsp_pb.batch.entity_result_type = 1 # FULL - rsp_pb.batch.more_results = 3 # NO_MORE_RESULTS + rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL + rsp_pb.batch.more_results = query_pb2.QueryResultBatch.NO_MORE_RESULTS + + # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) + + # Make request. conn = self._make_one(client) - URI = '/'.join([ + namespace = 'NS' + response = conn.run_query(project, q_pb, namespace=namespace) + + # Check the result and verify the callers. + self.assertEqual(response, rsp_pb) + cw = http._called_with + uri = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', - PROJECT + ':runQuery', + project + ':runQuery', ]) - pbs = conn.run_query(PROJECT, q_pb, 'NS')[0] - self.assertEqual(len(pbs), 1) - cw = http._called_with - self._verifyProtobufCall(cw, URI, conn) - rq_class = datastore_pb2.RunQueryRequest - request = rq_class() + self._verify_protobuf_call(cw, uri, conn) + request = datastore_pb2.RunQueryRequest() request.ParseFromString(cw['body']) - self.assertEqual(request.partition_id.namespace_id, 'NS') + self.assertEqual(request.partition_id.namespace_id, namespace) self.assertEqual(request.query, q_pb) def test_begin_transaction(self): @@ -675,7 +687,7 @@ def test_begin_transaction(self): ]) self.assertEqual(conn.begin_transaction(PROJECT), TRANSACTION) cw = http._called_with - self._verifyProtobufCall(cw, URI, conn) + self._verify_protobuf_call(cw, URI, conn) rq_class = datastore_pb2.BeginTransactionRequest request = rq_class() request.ParseFromString(cw['body']) @@ -719,7 +731,7 @@ def mock_parse(response): self.assertIs(result, expected_result) cw = http._called_with - self._verifyProtobufCall(cw, URI, conn) + self._verify_protobuf_call(cw, URI, conn) rq_class = datastore_pb2.CommitRequest request = rq_class() request.ParseFromString(cw['body']) @@ -767,7 +779,7 @@ def mock_parse(response): self.assertIs(result, expected_result) cw = http._called_with - self._verifyProtobufCall(cw, URI, conn) + self._verify_protobuf_call(cw, URI, conn) rq_class = datastore_pb2.CommitRequest request = rq_class() request.ParseFromString(cw['body']) @@ -794,7 +806,7 @@ def test_rollback_ok(self): ]) self.assertIsNone(conn.rollback(PROJECT, TRANSACTION)) cw = http._called_with - self._verifyProtobufCall(cw, URI, conn) + self._verify_protobuf_call(cw, URI, conn) rq_class = datastore_pb2.RollbackRequest request = rq_class() request.ParseFromString(cw['body']) @@ -816,7 +828,7 @@ def test_allocate_ids_empty(self): ]) self.assertEqual(conn.allocate_ids(PROJECT, []), []) cw = http._called_with - self._verifyProtobufCall(cw, URI, conn) + self._verify_protobuf_call(cw, URI, conn) rq_class = datastore_pb2.AllocateIdsRequest request = rq_class() request.ParseFromString(cw['body']) @@ -849,7 +861,7 @@ def test_allocate_ids_non_empty(self): self.assertEqual(conn.allocate_ids(PROJECT, before_key_pbs), after_key_pbs) cw = http._called_with - self._verifyProtobufCall(cw, URI, conn) + self._verify_protobuf_call(cw, URI, conn) rq_class = datastore_pb2.AllocateIdsRequest request = rq_class() request.ParseFromString(cw['body']) diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index f3ed774db336..a02e166aba33 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -28,13 +28,13 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def _makeClient(self, connection=None): + def _make_client(self, connection=None): if connection is None: connection = _Connection() return _Client(self._PROJECT, connection) def test_ctor_defaults(self): - client = self._makeClient() + client = self._make_client() query = self._make_one(client) self.assertIs(query._client, client) self.assertEqual(query.project, client.project) @@ -52,7 +52,7 @@ def test_ctor_explicit(self): _PROJECT = 'OTHER_PROJECT' _KIND = 'KIND' _NAMESPACE = 'OTHER_NAMESPACE' - client = self._makeClient() + client = self._make_client() ancestor = Key('ANCESTOR', 123, project=_PROJECT) FILTERS = [('foo', '=', 'Qux'), ('bar', '<', 17)] PROJECTION = ['foo', 'bar', 'baz'] @@ -81,26 +81,26 @@ def test_ctor_explicit(self): def test_ctor_bad_projection(self): BAD_PROJECTION = object() - self.assertRaises(TypeError, self._make_one, self._makeClient(), + self.assertRaises(TypeError, self._make_one, self._make_client(), projection=BAD_PROJECTION) def test_ctor_bad_order(self): BAD_ORDER = object() - self.assertRaises(TypeError, self._make_one, self._makeClient(), + self.assertRaises(TypeError, self._make_one, self._make_client(), order=BAD_ORDER) def test_ctor_bad_distinct_on(self): BAD_DISTINCT_ON = object() - self.assertRaises(TypeError, self._make_one, self._makeClient(), + self.assertRaises(TypeError, self._make_one, self._make_client(), distinct_on=BAD_DISTINCT_ON) def test_ctor_bad_filters(self): FILTERS_CANT_UNPACK = [('one', 'two')] - self.assertRaises(ValueError, self._make_one, self._makeClient(), + self.assertRaises(ValueError, self._make_one, self._make_client(), filters=FILTERS_CANT_UNPACK) def test_namespace_setter_w_non_string(self): - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) def _assign(val): query.namespace = val @@ -109,12 +109,12 @@ def _assign(val): def test_namespace_setter(self): _NAMESPACE = 'OTHER_NAMESPACE' - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) query.namespace = _NAMESPACE self.assertEqual(query.namespace, _NAMESPACE) def test_kind_setter_w_non_string(self): - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) def _assign(val): query.kind = val @@ -123,21 +123,21 @@ def _assign(val): def test_kind_setter_wo_existing(self): _KIND = 'KIND' - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) query.kind = _KIND self.assertEqual(query.kind, _KIND) def test_kind_setter_w_existing(self): _KIND_BEFORE = 'KIND_BEFORE' _KIND_AFTER = 'KIND_AFTER' - query = self._make_one(self._makeClient(), kind=_KIND_BEFORE) + query = self._make_one(self._make_client(), kind=_KIND_BEFORE) self.assertEqual(query.kind, _KIND_BEFORE) query.kind = _KIND_AFTER self.assertEqual(query.project, self._PROJECT) self.assertEqual(query.kind, _KIND_AFTER) def test_ancestor_setter_w_non_key(self): - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) def _assign(val): query.ancestor = val @@ -150,7 +150,7 @@ def test_ancestor_setter_w_key(self): _NAME = u'NAME' key = Key('KIND', 123, project=self._PROJECT) - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) query.add_filter('name', '=', _NAME) query.ancestor = key self.assertEqual(query.ancestor.path, key.path) @@ -159,22 +159,22 @@ def test_ancestor_deleter_w_key(self): from google.cloud.datastore.key import Key key = Key('KIND', 123, project=self._PROJECT) - query = self._make_one(client=self._makeClient(), ancestor=key) + query = self._make_one(client=self._make_client(), ancestor=key) del query.ancestor self.assertIsNone(query.ancestor) def test_add_filter_setter_w_unknown_operator(self): - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) self.assertRaises(ValueError, query.add_filter, 'firstname', '~~', 'John') def test_add_filter_w_known_operator(self): - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) query.add_filter('firstname', '=', u'John') self.assertEqual(query.filters, [('firstname', '=', u'John')]) def test_add_filter_w_all_operators(self): - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) query.add_filter('leq_prop', '<=', u'val1') query.add_filter('geq_prop', '>=', u'val2') query.add_filter('lt_prop', '<', u'val3') @@ -190,7 +190,7 @@ def test_add_filter_w_all_operators(self): def test_add_filter_w_known_operator_and_entity(self): from google.cloud.datastore.entity import Entity - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) other = Entity() other['firstname'] = u'John' other['lastname'] = u'Smith' @@ -198,7 +198,7 @@ def test_add_filter_w_known_operator_and_entity(self): self.assertEqual(query.filters, [('other', '=', other)]) def test_add_filter_w_whitespace_property_name(self): - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) PROPERTY_NAME = ' property with lots of space ' query.add_filter(PROPERTY_NAME, '=', u'John') self.assertEqual(query.filters, [(PROPERTY_NAME, '=', u'John')]) @@ -206,7 +206,7 @@ def test_add_filter_w_whitespace_property_name(self): def test_add_filter___key__valid_key(self): from google.cloud.datastore.key import Key - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) key = Key('Foo', project=self._PROJECT) query.add_filter('__key__', '=', key) self.assertEqual(query.filters, [('__key__', '=', key)]) @@ -215,47 +215,47 @@ def test_filter___key__not_equal_operator(self): from google.cloud.datastore.key import Key key = Key('Foo', project=self._PROJECT) - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) query.add_filter('__key__', '<', key) self.assertEqual(query.filters, [('__key__', '<', key)]) def test_filter___key__invalid_value(self): - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) self.assertRaises(ValueError, query.add_filter, '__key__', '=', None) def test_projection_setter_empty(self): - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) query.projection = [] self.assertEqual(query.projection, []) def test_projection_setter_string(self): - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) query.projection = 'field1' self.assertEqual(query.projection, ['field1']) def test_projection_setter_non_empty(self): - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) query.projection = ['field1', 'field2'] self.assertEqual(query.projection, ['field1', 'field2']) def test_projection_setter_multiple_calls(self): _PROJECTION1 = ['field1', 'field2'] _PROJECTION2 = ['field3'] - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) query.projection = _PROJECTION1 self.assertEqual(query.projection, _PROJECTION1) query.projection = _PROJECTION2 self.assertEqual(query.projection, _PROJECTION2) def test_keys_only(self): - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) query.keys_only() self.assertEqual(query.projection, ['__key__']) def test_key_filter_defaults(self): from google.cloud.datastore.key import Key - client = self._makeClient() + client = self._make_client() query = self._make_one(client) self.assertEqual(query.filters, []) key = Key('Kind', 1234, project='project') @@ -265,7 +265,7 @@ def test_key_filter_defaults(self): def test_key_filter_explicit(self): from google.cloud.datastore.key import Key - client = self._makeClient() + client = self._make_client() query = self._make_one(client) self.assertEqual(query.filters, []) key = Key('Kind', 1234, project='project') @@ -273,44 +273,44 @@ def test_key_filter_explicit(self): self.assertEqual(query.filters, [('__key__', '>', key)]) def test_order_setter_empty(self): - query = self._make_one(self._makeClient(), order=['foo', '-bar']) + query = self._make_one(self._make_client(), order=['foo', '-bar']) query.order = [] self.assertEqual(query.order, []) def test_order_setter_string(self): - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) query.order = 'field' self.assertEqual(query.order, ['field']) def test_order_setter_single_item_list_desc(self): - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) query.order = ['-field'] self.assertEqual(query.order, ['-field']) def test_order_setter_multiple(self): - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) query.order = ['foo', '-bar'] self.assertEqual(query.order, ['foo', '-bar']) def test_distinct_on_setter_empty(self): - query = self._make_one(self._makeClient(), distinct_on=['foo', 'bar']) + query = self._make_one(self._make_client(), distinct_on=['foo', 'bar']) query.distinct_on = [] self.assertEqual(query.distinct_on, []) def test_distinct_on_setter_string(self): - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) query.distinct_on = 'field1' self.assertEqual(query.distinct_on, ['field1']) def test_distinct_on_setter_non_empty(self): - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) query.distinct_on = ['field1', 'field2'] self.assertEqual(query.distinct_on, ['field1', 'field2']) def test_distinct_on_multiple_calls(self): _DISTINCT_ON1 = ['field1', 'field2'] _DISTINCT_ON2 = ['field3'] - query = self._make_one(self._makeClient()) + query = self._make_one(self._make_client()) query.distinct_on = _DISTINCT_ON1 self.assertEqual(query.distinct_on, _DISTINCT_ON1) query.distinct_on = _DISTINCT_ON2 @@ -320,7 +320,7 @@ def test_fetch_defaults_w_client_attr(self): from google.cloud.datastore.query import Iterator connection = _Connection() - client = self._makeClient(connection) + client = self._make_client(connection) query = self._make_one(client) iterator = query.fetch() @@ -334,8 +334,8 @@ def test_fetch_w_explicit_client(self): from google.cloud.datastore.query import Iterator connection = _Connection() - client = self._makeClient(connection) - other_client = self._makeClient(connection) + client = self._make_client(connection) + other_client = self._make_client(connection) query = self._make_one(client) iterator = query.fetch(limit=7, offset=8, client=other_client) self.assertIsInstance(iterator, Iterator) @@ -443,15 +443,17 @@ def test__process_query_results(self): end_cursor='abcd') self.assertIsNotNone(iterator._end_cursor) - entity_pbs = object() + entity_pbs = [ + _make_entity('Hello', 9998, 'PRAHJEKT'), + ] cursor_as_bytes = b'\x9ai\xe7' cursor = b'mmnn' skipped_results = 4 more_results_enum = query_pb2.QueryResultBatch.NOT_FINISHED - result = iterator._process_query_results( - entity_pbs, cursor_as_bytes, - more_results_enum, skipped_results) - self.assertIs(result, entity_pbs) + response_pb = _make_query_response( + entity_pbs, cursor_as_bytes, more_results_enum, skipped_results) + result = iterator._process_query_results(response_pb) + self.assertEqual(result, entity_pbs) self.assertEqual(iterator._skipped_results, skipped_results) self.assertEqual(iterator.next_page_token, cursor) @@ -464,14 +466,16 @@ def test__process_query_results_done(self): end_cursor='abcd') self.assertIsNotNone(iterator._end_cursor) - entity_pbs = object() + entity_pbs = [ + _make_entity('World', 1234, 'PROJECT'), + ] cursor_as_bytes = b'' skipped_results = 44 more_results_enum = query_pb2.QueryResultBatch.NO_MORE_RESULTS - result = iterator._process_query_results( - entity_pbs, cursor_as_bytes, - more_results_enum, skipped_results) - self.assertIs(result, entity_pbs) + response_pb = _make_query_response( + entity_pbs, cursor_as_bytes, more_results_enum, skipped_results) + result = iterator._process_query_results(response_pb) + self.assertEqual(result, entity_pbs) self.assertEqual(iterator._skipped_results, skipped_results) self.assertIsNone(iterator.next_page_token) @@ -480,9 +484,10 @@ def test__process_query_results_done(self): def test__process_query_results_bad_enum(self): iterator = self._make_one(None, None) more_results_enum = 999 + response_pb = _make_query_response( + [], b'', more_results_enum, 0) with self.assertRaises(ValueError): - iterator._process_query_results( - None, b'', more_results_enum, None) + iterator._process_query_results(response_pb) def test__next_page(self): from google.cloud.iterator import Page @@ -491,7 +496,7 @@ def test__next_page(self): connection = _Connection() more_enum = query_pb2.QueryResultBatch.NOT_FINISHED - result = ([], b'', more_enum, 0) + result = _make_query_response([], b'', more_enum, 0) connection._results = [result] project = 'prujekt' client = _Client(project, connection) @@ -695,3 +700,32 @@ def __init__(self, project, connection, namespace=None): @property def current_transaction(self): pass + + +def _make_entity(kind, id_, project): + from google.cloud.grpc.datastore.v1 import entity_pb2 + + key = entity_pb2.Key() + key.partition_id.project_id = project + elem = key.path.add() + elem.kind = kind + elem.id = id_ + return entity_pb2.Entity(key=key) + + +def _make_query_response( + entity_pbs, cursor_as_bytes, more_results_enum, skipped_results): + from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 + + return datastore_pb2.RunQueryResponse( + batch=query_pb2.QueryResultBatch( + skipped_results=skipped_results, + end_cursor=cursor_as_bytes, + more_results=more_results_enum, + entity_results=[ + query_pb2.EntityResult(entity=entity) + for entity in entity_pbs + ], + ), + ) From ed9dbb571c463e2e3046d4c9409eaa548d14a900 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 23 Feb 2017 15:17:58 -0800 Subject: [PATCH 078/611] Making datastore Connection.commit() return low-level protobuf. Towards #2746. This approach is to slowly transition from our current approach to use the GAPIC generated surface. It is unfortunately tangled quite a bit (partly because we may have too much mocked in the tests). --- .../google/cloud/datastore/_http.py | 27 +----- .../google/cloud/datastore/batch.py | 21 +++- .../unit_tests/test__http.py | 96 ++++--------------- .../unit_tests/test_batch.py | 78 +++++++++++---- .../unit_tests/test_client.py | 21 +++- .../unit_tests/test_transaction.py | 28 +++--- 6 files changed, 133 insertions(+), 138 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index ac9059ff0340..2f0b27bde952 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -426,10 +426,8 @@ def commit(self, project, request, transaction_id): This method will mutate ``request`` before using it. - :rtype: tuple - :returns: The pair of the number of index updates and a list of - :class:`.entity_pb2.Key` for each incomplete key - that was completed in the commit. + :rtype: :class:`.datastore_pb2.CommitResponse` + :returns: The protobuf response from a commit request. """ if transaction_id: request.mode = _datastore_pb2.CommitRequest.TRANSACTIONAL @@ -437,8 +435,7 @@ def commit(self, project, request, transaction_id): else: request.mode = _datastore_pb2.CommitRequest.NON_TRANSACTIONAL - response = self._datastore_api.commit(project, request) - return _parse_commit_response(response) + return self._datastore_api.commit(project, request) def rollback(self, project, transaction_id): """Rollback the connection's existing transaction. @@ -508,21 +505,3 @@ def _add_keys_to_request(request_field_pb, key_pbs): """ for key_pb in key_pbs: request_field_pb.add().CopyFrom(key_pb) - - -def _parse_commit_response(commit_response_pb): - """Extract response data from a commit response. - - :type commit_response_pb: :class:`.datastore_pb2.CommitResponse` - :param commit_response_pb: The protobuf response from a commit request. - - :rtype: tuple - :returns: The pair of the number of index updates and a list of - :class:`.entity_pb2.Key` for each incomplete key - that was completed in the commit. - """ - mut_results = commit_response_pb.mutation_results - index_updates = commit_response_pb.index_updates - completed_keys = [mut_result.key for mut_result in mut_results - if mut_result.HasField('key')] # Message field (Key) - return index_updates, completed_keys diff --git a/packages/google-cloud-datastore/google/cloud/datastore/batch.py b/packages/google-cloud-datastore/google/cloud/datastore/batch.py index 00854d2007b6..484af5a67c76 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/batch.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/batch.py @@ -238,8 +238,9 @@ def _commit(self): This is called by :meth:`commit`. """ # NOTE: ``self._commit_request`` will be modified. - _, updated_keys = self._client._connection.commit( + commit_response_pb = self._client._connection.commit( self.project, self._commit_request, self._id) + _, updated_keys = _parse_commit_response(commit_response_pb) # If the back-end returns without error, we are guaranteed that # :meth:`Connection.commit` will return keys that match (length and # order) directly ``_partial_key_entities``. @@ -311,3 +312,21 @@ def _assign_entity_to_pb(entity_pb, entity): bare_entity_pb = helpers.entity_to_protobuf(entity) bare_entity_pb.key.CopyFrom(bare_entity_pb.key) entity_pb.CopyFrom(bare_entity_pb) + + +def _parse_commit_response(commit_response_pb): + """Extract response data from a commit response. + + :type commit_response_pb: :class:`.datastore_pb2.CommitResponse` + :param commit_response_pb: The protobuf response from a commit request. + + :rtype: tuple + :returns: The pair of the number of index updates and a list of + :class:`.entity_pb2.Key` for each incomplete key + that was completed in the commit. + """ + mut_results = commit_response_pb.mutation_results + index_updates = commit_response_pb.index_updates + completed_keys = [mut_result.key for mut_result in mut_results + if mut_result.HasField('key')] # Message field (Key) + return index_updates, completed_keys diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 04087c7122c6..690cc45f325e 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -696,8 +696,8 @@ def test_commit_wo_transaction(self): from google.cloud.grpc.datastore.v1 import datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb - PROJECT = 'PROJECT' - key_pb = self._make_key_pb(PROJECT) + project = 'PROJECT' + key_pb = self._make_key_pb(project) rsp_pb = datastore_pb2.CommitResponse() req_pb = datastore_pb2.CommitRequest() mutation = req_pb.mutations.add() @@ -708,44 +708,32 @@ def test_commit_wo_transaction(self): http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_one(client) - URI = '/'.join([ + uri = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', - PROJECT + ':commit', + project + ':commit', ]) - # Set up mock for parsing the response. - expected_result = object() - _parsed = [] - - def mock_parse(response): - _parsed.append(response) - return expected_result - - patch = mock.patch( - 'google.cloud.datastore._http._parse_commit_response', - new=mock_parse) - with patch: - result = conn.commit(PROJECT, req_pb, None) + result = conn.commit(project, req_pb, None) + self.assertEqual(result, rsp_pb) - self.assertIs(result, expected_result) + # Verify the caller. cw = http._called_with - self._verify_protobuf_call(cw, URI, conn) + self._verify_protobuf_call(cw, uri, conn) rq_class = datastore_pb2.CommitRequest request = rq_class() request.ParseFromString(cw['body']) self.assertEqual(request.transaction, b'') self.assertEqual(list(request.mutations), [mutation]) self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL) - self.assertEqual(_parsed, [rsp_pb]) def test_commit_w_transaction(self): from google.cloud.grpc.datastore.v1 import datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb - PROJECT = 'PROJECT' - key_pb = self._make_key_pb(PROJECT) + project = 'PROJECT' + key_pb = self._make_key_pb(project) rsp_pb = datastore_pb2.CommitResponse() req_pb = datastore_pb2.CommitRequest() mutation = req_pb.mutations.add() @@ -756,37 +744,25 @@ def test_commit_w_transaction(self): http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_one(client) - URI = '/'.join([ + uri = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', - PROJECT + ':commit', + project + ':commit', ]) - # Set up mock for parsing the response. - expected_result = object() - _parsed = [] + result = conn.commit(project, req_pb, b'xact') + self.assertEqual(result, rsp_pb) - def mock_parse(response): - _parsed.append(response) - return expected_result - - patch = mock.patch( - 'google.cloud.datastore._http._parse_commit_response', - new=mock_parse) - with patch: - result = conn.commit(PROJECT, req_pb, b'xact') - - self.assertIs(result, expected_result) + # Verify the caller. cw = http._called_with - self._verify_protobuf_call(cw, URI, conn) + self._verify_protobuf_call(cw, uri, conn) rq_class = datastore_pb2.CommitRequest request = rq_class() request.ParseFromString(cw['body']) self.assertEqual(request.transaction, b'xact') self.assertEqual(list(request.mutations), [mutation]) self.assertEqual(request.mode, rq_class.TRANSACTIONAL) - self.assertEqual(_parsed, [rsp_pb]) def test_rollback_ok(self): from google.cloud.grpc.datastore.v1 import datastore_pb2 @@ -870,46 +846,6 @@ def test_allocate_ids_non_empty(self): self.assertEqual(key_before, key_after) -class Test__parse_commit_response(unittest.TestCase): - - def _call_fut(self, commit_response_pb): - from google.cloud.datastore._http import _parse_commit_response - - return _parse_commit_response(commit_response_pb) - - def test_it(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 - from google.cloud.grpc.datastore.v1 import entity_pb2 - - index_updates = 1337 - keys = [ - entity_pb2.Key( - path=[ - entity_pb2.Key.PathElement( - kind='Foo', - id=1234, - ), - ], - ), - entity_pb2.Key( - path=[ - entity_pb2.Key.PathElement( - kind='Bar', - name='baz', - ), - ], - ), - ] - response = datastore_pb2.CommitResponse( - mutation_results=[ - datastore_pb2.MutationResult(key=key) for key in keys - ], - index_updates=index_updates, - ) - result = self._call_fut(response) - self.assertEqual(result, (index_updates, keys)) - - class Http(object): _called_with = None diff --git a/packages/google-cloud-datastore/unit_tests/test_batch.py b/packages/google-cloud-datastore/unit_tests/test_batch.py index 72614d070cba..f2c54c680bef 100644 --- a/packages/google-cloud-datastore/unit_tests/test_batch.py +++ b/packages/google-cloud-datastore/unit_tests/test_batch.py @@ -249,13 +249,13 @@ def test_commit_wrong_status(self): self.assertRaises(ValueError, batch.commit) def test_commit_w_partial_key_entities(self): - _PROJECT = 'PROJECT' - _NEW_ID = 1234 - connection = _Connection(_NEW_ID) - client = _Client(_PROJECT, connection) + project = 'PROJECT' + new_id = 1234 + connection = _Connection(new_id) + client = _Client(project, connection) batch = self._make_one(client) entity = _Entity({}) - key = entity.key = _Key(_PROJECT) + key = entity.key = _Key(project) key._id = None batch._partial_key_entities.append(entity) @@ -266,9 +266,9 @@ def test_commit_w_partial_key_entities(self): self.assertEqual(batch._status, batch._FINISHED) self.assertEqual(connection._committed, - [(_PROJECT, batch._commit_request, None)]) + [(project, batch._commit_request, None)]) self.assertFalse(entity.key.is_partial) - self.assertEqual(entity.key._id, _NEW_ID) + self.assertEqual(entity.key._id, new_id) def test_as_context_mgr_wo_error(self): _PROJECT = 'PROJECT' @@ -369,30 +369,62 @@ def begin(self): self.assertEqual(client._batches, []) -class _PathElementPB(object): +class Test__parse_commit_response(unittest.TestCase): - def __init__(self, id_): - self.id = id_ + def _call_fut(self, commit_response_pb): + from google.cloud.datastore.batch import _parse_commit_response + return _parse_commit_response(commit_response_pb) -class _KeyPB(object): + def test_it(self): + from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 - def __init__(self, id_): - self.path = [_PathElementPB(id_)] + index_updates = 1337 + keys = [ + entity_pb2.Key( + path=[ + entity_pb2.Key.PathElement( + kind='Foo', + id=1234, + ), + ], + ), + entity_pb2.Key( + path=[ + entity_pb2.Key.PathElement( + kind='Bar', + name='baz', + ), + ], + ), + ] + response = datastore_pb2.CommitResponse( + mutation_results=[ + datastore_pb2.MutationResult(key=key) for key in keys + ], + index_updates=index_updates, + ) + result = self._call_fut(response) + self.assertEqual(result, (index_updates, keys)) class _Connection(object): _marker = object() _save_result = (False, None) - def __init__(self, *new_keys): - self._completed_keys = [_KeyPB(key) for key in new_keys] + def __init__(self, *new_key_ids): + from google.cloud.grpc.datastore.v1 import datastore_pb2 + self._committed = [] - self._index_updates = 0 + mutation_results = [ + _make_mutation(key_id) for key_id in new_key_ids] + self._commit_response_pb = datastore_pb2.CommitResponse( + mutation_results=mutation_results) def commit(self, project, commit_request, transaction_id): self._committed.append((project, commit_request, transaction_id)) - return self._index_updates, self._completed_keys + return self._commit_response_pb class _Entity(dict): @@ -472,3 +504,15 @@ def _mutated_pb(test_case, mutation_pb_list, mutation_type): mutation_type) return getattr(mutated_pb, mutation_type) + + +def _make_mutation(id_): + from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 + + key = entity_pb2.Key() + key.partition_id.project_id = 'PROJECT' + elem = key.path.add() + elem.kind = 'Kind' + elem.id = id_ + return datastore_pb2.MutationResult(key=key) diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index e89acc9c0922..0102abd7ca02 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -568,7 +568,8 @@ def test_put_multi_no_batch_w_partial_key(self): creds = _make_credentials() client = self._make_one(credentials=creds) - client._connection._commit.append([_KeyPB(key)]) + key_pb = _make_key(234) + client._connection._commit.append([key_pb]) result = client.put_multi([entity]) self.assertIsNone(result) @@ -931,7 +932,6 @@ def __init__(self, credentials=None, http=None): self._commit = [] self._alloc_cw = [] self._alloc = [] - self._index_updates = 0 def _add_lookup_result(self, results=(), missing=(), deferred=()): self._lookup.append((list(results), list(missing), list(deferred))) @@ -943,9 +943,13 @@ def lookup(self, project, key_pbs, eventual=False, transaction_id=None): return results, missing, deferred def commit(self, project, commit_request, transaction_id): + from google.cloud.grpc.datastore.v1 import datastore_pb2 + self._commit_cw.append((project, commit_request, transaction_id)) - response, self._commit = self._commit[0], self._commit[1:] - return self._index_updates, response + keys, self._commit = self._commit[0], self._commit[1:] + mutation_results = [ + datastore_pb2.MutationResult(key=key) for key in keys] + return datastore_pb2.CommitResponse(mutation_results=mutation_results) def allocate_ids(self, project, key_pbs): self._alloc_cw.append((project, key_pbs)) @@ -1058,3 +1062,12 @@ def _mutated_pb(test_case, mutation_pb_list, mutation_type): mutation_type) return getattr(mutated_pb, mutation_type) + + +def _make_key(id_): + from google.cloud.grpc.datastore.v1 import entity_pb2 + + key = entity_pb2.Key() + elem = key.path.add() + elem.id = id_ + return key diff --git a/packages/google-cloud-datastore/unit_tests/test_transaction.py b/packages/google-cloud-datastore/unit_tests/test_transaction.py index 7aa295bf7fca..4dec32420356 100644 --- a/packages/google-cloud-datastore/unit_tests/test_transaction.py +++ b/packages/google-cloud-datastore/unit_tests/test_transaction.py @@ -126,12 +126,12 @@ def test_commit_no_partial_keys(self): self.assertIsNone(xact.id) def test_commit_w_partial_keys(self): - _PROJECT = 'PROJECT' - _KIND = 'KIND' - _ID = 123 - connection = _Connection(234) - connection._completed_keys = [_make_key(_KIND, _ID, _PROJECT)] - client = _Client(_PROJECT, connection) + project = 'PROJECT' + kind = 'KIND' + id_ = 123 + key = _make_key(kind, id_, project) + connection = _Connection(234, keys=[key]) + client = _Client(project, connection) xact = self._make_one(client) xact.begin() entity = _Entity() @@ -139,9 +139,9 @@ def test_commit_w_partial_keys(self): xact._commit_request = commit_request = object() xact.commit() self.assertEqual(connection._committed, - (_PROJECT, commit_request, 234)) + (project, commit_request, 234)) self.assertIsNone(xact.id) - self.assertEqual(entity.key.path, [{'kind': _KIND, 'id': _ID}]) + self.assertEqual(entity.key.path, [{'kind': kind, 'id': id_}]) def test_context_manager_no_raise(self): _PROJECT = 'PROJECT' @@ -196,10 +196,14 @@ class _Connection(object): _committed = None _side_effect = None - def __init__(self, xact_id=123): + def __init__(self, xact_id=123, keys=()): + from google.cloud.grpc.datastore.v1 import datastore_pb2 + self._xact_id = xact_id - self._completed_keys = [] - self._index_updates = 0 + mutation_results = [ + datastore_pb2.MutationResult(key=key) for key in keys] + self._commit_response_pb = datastore_pb2.CommitResponse( + mutation_results=mutation_results) def begin_transaction(self, project): self._begun = project @@ -213,7 +217,7 @@ def rollback(self, project, transaction_id): def commit(self, project, commit_request, transaction_id): self._committed = (project, commit_request, transaction_id) - return self._index_updates, self._completed_keys + return self._commit_response_pb class _Entity(dict): From 06d8e2b8ce0140f80cdffe0bf016a382700dbe61 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 24 Feb 2017 11:30:18 -0800 Subject: [PATCH 079/611] Upgrading all versions for umbrella release. --- packages/google-cloud-datastore/setup.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 5d448448efe3..170692218b4f 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -50,14 +50,14 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.23.0, < 0.24dev', + 'google-cloud-core >= 0.23.1, < 0.24dev', 'grpcio >= 1.0.2, < 2.0dev', - 'gapic-google-cloud-datastore-v1 >= 0.14.0, < 0.15dev', + 'gapic-google-cloud-datastore-v1 >= 0.15.0, < 0.16dev', ] setup( name='google-cloud-datastore', - version='0.22.1', + version='0.23.0', description='Python Client for Google Cloud Datastore', long_description=README, namespace_packages=[ From 08268c923030b0891d36a8ec5ff7f3b956269199 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 24 Feb 2017 12:54:22 -0800 Subject: [PATCH 080/611] Updating datastore imports grpc->proto. Done via: $ cd datastore/ $ git grep -l '\.grpc\.' | xargs sed -i s/'\.grpc\.'/'\.proto\.'/g --- .../google/cloud/datastore/_gax.py | 2 +- .../google/cloud/datastore/_http.py | 2 +- .../google/cloud/datastore/batch.py | 2 +- .../google/cloud/datastore/helpers.py | 2 +- .../google/cloud/datastore/key.py | 2 +- .../google/cloud/datastore/query.py | 2 +- .../unit_tests/test__gax.py | 4 +- .../unit_tests/test__http.py | 52 ++++++++-------- .../unit_tests/test_batch.py | 14 ++--- .../unit_tests/test_client.py | 12 ++-- .../unit_tests/test_helpers.py | 60 +++++++++---------- .../unit_tests/test_key.py | 2 +- .../unit_tests/test_query.py | 26 ++++---- .../unit_tests/test_transaction.py | 6 +- 14 files changed, 94 insertions(+), 94 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py index cd2739bd10ce..b29ce874f298 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py @@ -23,7 +23,7 @@ from google.cloud._helpers import make_secure_stub from google.cloud import exceptions -from google.cloud.grpc.datastore.v1 import datastore_pb2_grpc +from google.cloud.proto.datastore.v1 import datastore_pb2_grpc _GRPC_ERROR_MAPPING = { diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 2f0b27bde952..4c384ad4d4ef 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -22,7 +22,7 @@ from google.cloud.environment_vars import DISABLE_GRPC from google.cloud.environment_vars import GCD_HOST from google.cloud import exceptions -from google.cloud.grpc.datastore.v1 import datastore_pb2 as _datastore_pb2 +from google.cloud.proto.datastore.v1 import datastore_pb2 as _datastore_pb2 from google.cloud.datastore import __version__ try: diff --git a/packages/google-cloud-datastore/google/cloud/datastore/batch.py b/packages/google-cloud-datastore/google/cloud/datastore/batch.py index 484af5a67c76..33cc536cf273 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/batch.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/batch.py @@ -22,7 +22,7 @@ """ from google.cloud.datastore import helpers -from google.cloud.grpc.datastore.v1 import datastore_pb2 as _datastore_pb2 +from google.cloud.proto.datastore.v1 import datastore_pb2 as _datastore_pb2 class Batch(object): diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index 1fe5dd2864cb..36b11d0f8900 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -26,7 +26,7 @@ from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import _pb_timestamp_to_datetime -from google.cloud.grpc.datastore.v1 import entity_pb2 as _entity_pb2 +from google.cloud.proto.datastore.v1 import entity_pb2 as _entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index 0af884c67301..5ae08c5642ca 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -17,7 +17,7 @@ import copy import six -from google.cloud.grpc.datastore.v1 import entity_pb2 as _entity_pb2 +from google.cloud.proto.datastore.v1 import entity_pb2 as _entity_pb2 class Key(object): diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 9ee565f2f2f2..8342554b558f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -20,7 +20,7 @@ from google.cloud.iterator import Iterator as BaseIterator from google.cloud.iterator import Page -from google.cloud.grpc.datastore.v1 import query_pb2 as _query_pb2 +from google.cloud.proto.datastore.v1 import query_pb2 as _query_pb2 from google.cloud.datastore import helpers from google.cloud.datastore.key import Key diff --git a/packages/google-cloud-datastore/unit_tests/test__gax.py b/packages/google-cloud-datastore/unit_tests/test__gax.py index 4f49483207d7..e0fc912e0081 100644 --- a/packages/google-cloud-datastore/unit_tests/test__gax.py +++ b/packages/google-cloud-datastore/unit_tests/test__gax.py @@ -126,7 +126,7 @@ def mock_make_stub(*args): return self._get_target_class()(connection, secure) def test_constructor(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2_grpc + from google.cloud.proto.datastore.v1 import datastore_pb2_grpc conn = mock.Mock( credentials=object(), @@ -149,7 +149,7 @@ def test_constructor(self): )]) def test_constructor_insecure(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2_grpc + from google.cloud.proto.datastore.v1 import datastore_pb2_grpc conn = mock.Mock( credentials=object(), diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 690cc45f325e..fffc9c2bdb0b 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -139,7 +139,7 @@ def _make_key_pb(self, project, id_=1234): return Key(*path_args, project=project).to_protobuf() def _make_query_pb(self, kind): - from google.cloud.grpc.datastore.v1 import query_pb2 + from google.cloud.proto.datastore.v1 import query_pb2 pb = query_pb2.Query() pb.kind.add().name = kind @@ -253,7 +253,7 @@ def test_build_api_url_w_explicit_base_version(self): URI) def test_lookup_single_key_empty_response(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' key_pb = self._make_key_pb(PROJECT) @@ -281,7 +281,7 @@ def test_lookup_single_key_empty_response(self): self.assertEqual(key_pb, keys[0]) def test_lookup_single_key_empty_response_w_eventual(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' key_pb = self._make_key_pb(PROJECT) @@ -321,7 +321,7 @@ def test_lookup_single_key_empty_response_w_eventual_and_transaction(self): eventual=True, transaction_id=TRANSACTION) def test_lookup_single_key_empty_response_w_transaction(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' TRANSACTION = b'TRANSACTION' @@ -352,8 +352,8 @@ def test_lookup_single_key_empty_response_w_transaction(self): self.assertEqual(request.read_options.transaction, TRANSACTION) def test_lookup_single_key_nonempty_response(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 PROJECT = 'PROJECT' key_pb = self._make_key_pb(PROJECT) @@ -385,7 +385,7 @@ def test_lookup_single_key_nonempty_response(self): self.assertEqual(key_pb, keys[0]) def test_lookup_multiple_keys_empty_response(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' key_pb1 = self._make_key_pb(PROJECT) @@ -415,7 +415,7 @@ def test_lookup_multiple_keys_empty_response(self): self.assertEqual(key_pb2, keys[1]) def test_lookup_multiple_keys_w_missing(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' key_pb1 = self._make_key_pb(PROJECT) @@ -450,7 +450,7 @@ def test_lookup_multiple_keys_w_missing(self): self.assertEqual(key_pb2, keys[1]) def test_lookup_multiple_keys_w_deferred(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud import _http as connection_module from google.cloud.datastore._http import _CLIENT_INFO @@ -494,8 +494,8 @@ def test_lookup_multiple_keys_w_deferred(self): self.assertEqual(key_pb2, keys[1]) def test_run_query_w_eventual_no_transaction(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 - from google.cloud.grpc.datastore.v1 import query_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import query_pb2 project = 'PROJECT' kind = 'Nonesuch' @@ -534,8 +534,8 @@ def test_run_query_w_eventual_no_transaction(self): self.assertEqual(request.read_options.transaction, b'') def test_run_query_wo_eventual_w_transaction(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 - from google.cloud.grpc.datastore.v1 import query_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import query_pb2 project = 'PROJECT' kind = 'Nonesuch' @@ -577,8 +577,8 @@ def test_run_query_wo_eventual_w_transaction(self): self.assertEqual(request.read_options.transaction, transaction) def test_run_query_w_eventual_and_transaction(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 - from google.cloud.grpc.datastore.v1 import query_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import query_pb2 PROJECT = 'PROJECT' KIND = 'Nonesuch' @@ -595,8 +595,8 @@ def test_run_query_w_eventual_and_transaction(self): eventual=True, transaction_id=TRANSACTION) def test_run_query_wo_namespace_empty_result(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 - from google.cloud.grpc.datastore.v1 import query_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import query_pb2 project = 'PROJECT' kind = 'Nonesuch' @@ -632,9 +632,9 @@ def test_run_query_wo_namespace_empty_result(self): self.assertEqual(request.query, q_pb) def test_run_query_w_namespace_nonempty_result(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 - from google.cloud.grpc.datastore.v1 import entity_pb2 - from google.cloud.grpc.datastore.v1 import query_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import query_pb2 project = 'PROJECT' kind = 'Kind' @@ -670,7 +670,7 @@ def test_run_query_w_namespace_nonempty_result(self): self.assertEqual(request.query, q_pb) def test_begin_transaction(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' TRANSACTION = b'TRANSACTION' @@ -693,7 +693,7 @@ def test_begin_transaction(self): request.ParseFromString(cw['body']) def test_commit_wo_transaction(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb project = 'PROJECT' @@ -729,7 +729,7 @@ def test_commit_wo_transaction(self): self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL) def test_commit_w_transaction(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb project = 'PROJECT' @@ -765,7 +765,7 @@ def test_commit_w_transaction(self): self.assertEqual(request.mode, rq_class.TRANSACTIONAL) def test_rollback_ok(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' TRANSACTION = b'xact' @@ -789,7 +789,7 @@ def test_rollback_ok(self): self.assertEqual(request.transaction, TRANSACTION) def test_allocate_ids_empty(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' rsp_pb = datastore_pb2.AllocateIdsResponse() @@ -811,7 +811,7 @@ def test_allocate_ids_empty(self): self.assertEqual(list(request.keys), []) def test_allocate_ids_non_empty(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' before_key_pbs = [ diff --git a/packages/google-cloud-datastore/unit_tests/test_batch.py b/packages/google-cloud-datastore/unit_tests/test_batch.py index f2c54c680bef..db62d0436c64 100644 --- a/packages/google-cloud-datastore/unit_tests/test_batch.py +++ b/packages/google-cloud-datastore/unit_tests/test_batch.py @@ -27,7 +27,7 @@ def _make_one(self, client): return self._get_target_class()(client) def test_ctor(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 _PROJECT = 'PROJECT' _NAMESPACE = 'NAMESPACE' @@ -377,8 +377,8 @@ def _call_fut(self, commit_response_pb): return _parse_commit_response(commit_response_pb) def test_it(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 index_updates = 1337 keys = [ @@ -414,7 +414,7 @@ class _Connection(object): _save_result = (False, None) def __init__(self, *new_key_ids): - from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 self._committed = [] mutation_results = [ @@ -449,7 +449,7 @@ def is_partial(self): return self._id is None def to_protobuf(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 key = self._key = entity_pb2.Key() # Don't assign it, because it will just get ripped out @@ -507,8 +507,8 @@ def _mutated_pb(test_case, mutation_pb_list, mutation_type): def _make_mutation(id_): - from google.cloud.grpc.datastore.v1 import datastore_pb2 - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 key = entity_pb2.Key() key.partition_id.project_id = 'PROJECT' diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index 0102abd7ca02..26a9c56dfcc6 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -24,7 +24,7 @@ def _make_credentials(): def _make_entity_pb(project, kind, integer_id, name=None, str_val=None): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() @@ -280,7 +280,7 @@ def test_get_multi_miss(self): self.assertEqual(results, []) def test_get_multi_miss_w_missing(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.key import Key KIND = 'Kind' @@ -344,7 +344,7 @@ def test_get_multi_miss_w_deferred(self): [key.to_protobuf()]) def test_get_multi_w_deferred_from_backend_but_not_passed(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key @@ -943,7 +943,7 @@ def lookup(self, project, key_pbs, eventual=False, transaction_id=None): return results, missing, deferred def commit(self, project, commit_request, transaction_id): - from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 self._commit_cw.append((project, commit_request, transaction_id)) keys, self._commit = self._commit[0], self._commit[1:] @@ -1015,7 +1015,7 @@ def is_partial(self): return self._id is None def to_protobuf(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 key = self._key = entity_pb2.Key() # Don't assign it, because it will just get ripped out @@ -1065,7 +1065,7 @@ def _mutated_pb(test_case, mutation_pb_list, mutation_type): def _make_key(id_): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 key = entity_pb2.Key() elem = key.path.add() diff --git a/packages/google-cloud-datastore/unit_tests/test_helpers.py b/packages/google-cloud-datastore/unit_tests/test_helpers.py index 2ea54d5afe52..18ff98e64781 100644 --- a/packages/google-cloud-datastore/unit_tests/test_helpers.py +++ b/packages/google-cloud-datastore/unit_tests/test_helpers.py @@ -23,7 +23,7 @@ def _call_fut(self, entity_pb, name): return _new_value_pb(entity_pb, name) def test_it(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 entity_pb = entity_pb2.Entity() name = 'foo' @@ -43,7 +43,7 @@ def _call_fut(self, entity_pb): def test_it(self): import types - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() @@ -66,7 +66,7 @@ def _call_fut(self, val): return entity_from_protobuf(val) def test_it(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb _PROJECT = 'PROJECT' @@ -112,7 +112,7 @@ def test_it(self): self.assertEqual(key.id, _ID) def test_mismatched_value_indexed(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb _PROJECT = 'PROJECT' @@ -136,7 +136,7 @@ def test_mismatched_value_indexed(self): self._call_fut(entity_pb) def test_entity_no_key(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 entity_pb = entity_pb2.Entity() entity = self._call_fut(entity_pb) @@ -145,7 +145,7 @@ def test_entity_no_key(self): self.assertEqual(dict(entity), {}) def test_entity_with_meaning(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() @@ -160,7 +160,7 @@ def test_entity_with_meaning(self): self.assertEqual(entity._meanings, {name: (meaning, val)}) def test_nested_entity_no_key(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb PROJECT = 'FOO' @@ -218,7 +218,7 @@ def _compareEntityProto(self, entity_pb1, entity_pb2): self.assertEqual(val1, val2) def test_empty(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() @@ -226,7 +226,7 @@ def test_empty(self): self._compareEntityProto(entity_pb, entity_pb2.Entity()) def test_key_only(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key @@ -245,7 +245,7 @@ def test_key_only(self): self._compareEntityProto(entity_pb, expected_pb) def test_simple_fields(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -265,7 +265,7 @@ def test_simple_fields(self): self._compareEntityProto(entity_pb, expected_pb) def test_with_empty_list(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() @@ -275,7 +275,7 @@ def test_with_empty_list(self): self._compareEntityProto(entity_pb, entity_pb2.Entity()) def test_inverts_to_protobuf(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb from google.cloud.datastore.helpers import entity_from_protobuf @@ -328,7 +328,7 @@ def test_inverts_to_protobuf(self): self._compareEntityProto(original_pb, new_pb) def test_meaning_with_change(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -346,7 +346,7 @@ def test_meaning_with_change(self): self._compareEntityProto(entity_pb, expected_pb) def test_variable_meanings(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -380,7 +380,7 @@ def _call_fut(self, val): return key_from_protobuf(val) def _makePB(self, project=None, namespace=None, path=()): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 pb = entity_pb2.Key() if project is not None: @@ -553,7 +553,7 @@ def _call_fut(self, pb): return _get_value_from_value_pb(pb) def _makePB(self, attr_name, value): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 pb = entity_pb2.Value() setattr(pb, attr_name, value) @@ -563,7 +563,7 @@ def test_datetime(self): import calendar import datetime from google.cloud._helpers import UTC - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 micros = 4375 utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) @@ -573,7 +573,7 @@ def test_datetime(self): self.assertEqual(self._call_fut(pb), utc) def test_key(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.key import Key pb = entity_pb2.Value() @@ -603,7 +603,7 @@ def test_unicode(self): self.assertEqual(self._call_fut(pb), u'str') def test_entity(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -619,7 +619,7 @@ def test_entity(self): self.assertEqual(entity['foo'], 'Foo') def test_array(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 pb = entity_pb2.Value() array_pb = pb.array_value.values @@ -632,7 +632,7 @@ def test_array(self): def test_geo_point(self): from google.type import latlng_pb2 - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import GeoPoint lat = -3.14 @@ -646,14 +646,14 @@ def test_geo_point(self): def test_null(self): from google.protobuf import struct_pb2 - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 pb = entity_pb2.Value(null_value=struct_pb2.NULL_VALUE) result = self._call_fut(pb) self.assertIsNone(result) def test_unknown(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 pb = entity_pb2.Value() with self.assertRaises(ValueError): @@ -668,7 +668,7 @@ def _call_fut(self, value_pb, val): return _set_protobuf_value(value_pb, val) def _makePB(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 return entity_pb2.Value() def test_datetime(self): @@ -808,14 +808,14 @@ def _call_fut(self, *args, **kwargs): return _get_meaning(*args, **kwargs) def test_no_meaning(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 value_pb = entity_pb2.Value() result = self._call_fut(value_pb) self.assertIsNone(result) def test_single(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 value_pb = entity_pb2.Value() value_pb.meaning = meaning = 22 @@ -824,7 +824,7 @@ def test_single(self): self.assertEqual(meaning, result) def test_empty_array_value(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 value_pb = entity_pb2.Value() value_pb.array_value.values.add() @@ -834,7 +834,7 @@ def test_empty_array_value(self): self.assertEqual(None, result) def test_array_value(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 value_pb = entity_pb2.Value() meaning = 9 @@ -849,7 +849,7 @@ def test_array_value(self): self.assertEqual(meaning, result) def test_array_value_multiple_meanings(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 value_pb = entity_pb2.Value() meaning1 = 9 @@ -866,7 +866,7 @@ def test_array_value_multiple_meanings(self): self.assertEqual(result, [meaning1, meaning2]) def test_array_value_meaning_partially_unset(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 value_pb = entity_pb2.Value() meaning1 = 9 diff --git a/packages/google-cloud-datastore/unit_tests/test_key.py b/packages/google-cloud-datastore/unit_tests/test_key.py index c699ec773885..904338368c02 100644 --- a/packages/google-cloud-datastore/unit_tests/test_key.py +++ b/packages/google-cloud-datastore/unit_tests/test_key.py @@ -315,7 +315,7 @@ def test_completed_key_on_complete(self): self.assertRaises(ValueError, key.completed_key, 5678) def test_to_protobuf_defaults(self): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 _KIND = 'KIND' key = self._make_one(_KIND, project=self._DEFAULT_PROJECT) diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index a02e166aba33..fdf18a858775 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -397,7 +397,7 @@ def test_constructor_explicit(self): self.assertTrue(iterator._more_results) def test__build_protobuf_empty(self): - from google.cloud.grpc.datastore.v1 import query_pb2 + from google.cloud.proto.datastore.v1 import query_pb2 from google.cloud.datastore.query import Query client = _Client(None, None) @@ -409,7 +409,7 @@ def test__build_protobuf_empty(self): self.assertEqual(pb, expected_pb) def test__build_protobuf_all_values(self): - from google.cloud.grpc.datastore.v1 import query_pb2 + from google.cloud.proto.datastore.v1 import query_pb2 from google.cloud.datastore.query import Query client = _Client(None, None) @@ -437,7 +437,7 @@ def test__build_protobuf_all_values(self): self.assertEqual(pb, expected_pb) def test__process_query_results(self): - from google.cloud.grpc.datastore.v1 import query_pb2 + from google.cloud.proto.datastore.v1 import query_pb2 iterator = self._make_one(None, None, end_cursor='abcd') @@ -460,7 +460,7 @@ def test__process_query_results(self): self.assertTrue(iterator._more_results) def test__process_query_results_done(self): - from google.cloud.grpc.datastore.v1 import query_pb2 + from google.cloud.proto.datastore.v1 import query_pb2 iterator = self._make_one(None, None, end_cursor='abcd') @@ -491,7 +491,7 @@ def test__process_query_results_bad_enum(self): def test__next_page(self): from google.cloud.iterator import Page - from google.cloud.grpc.datastore.v1 import query_pb2 + from google.cloud.proto.datastore.v1 import query_pb2 from google.cloud.datastore.query import Query connection = _Connection() @@ -561,7 +561,7 @@ def _call_fut(self, query): return _pb_from_query(query) def test_empty(self): - from google.cloud.grpc.datastore.v1 import query_pb2 + from google.cloud.proto.datastore.v1 import query_pb2 pb = self._call_fut(_Query()) self.assertEqual(list(pb.projection), []) @@ -589,7 +589,7 @@ def test_kind(self): def test_ancestor(self): from google.cloud.datastore.key import Key - from google.cloud.grpc.datastore.v1 import query_pb2 + from google.cloud.proto.datastore.v1 import query_pb2 ancestor = Key('Ancestor', 123, project='PROJECT') pb = self._call_fut(_Query(ancestor=ancestor)) @@ -602,7 +602,7 @@ def test_ancestor(self): self.assertEqual(pfilter.value.key_value, ancestor_pb) def test_filter(self): - from google.cloud.grpc.datastore.v1 import query_pb2 + from google.cloud.proto.datastore.v1 import query_pb2 query = _Query(filters=[('name', '=', u'John')]) query.OPERATORS = { @@ -618,7 +618,7 @@ def test_filter(self): def test_filter_key(self): from google.cloud.datastore.key import Key - from google.cloud.grpc.datastore.v1 import query_pb2 + from google.cloud.proto.datastore.v1 import query_pb2 key = Key('Kind', 123, project='PROJECT') query = _Query(filters=[('__key__', '=', key)]) @@ -635,7 +635,7 @@ def test_filter_key(self): self.assertEqual(pfilter.value.key_value, key_pb) def test_order(self): - from google.cloud.grpc.datastore.v1 import query_pb2 + from google.cloud.proto.datastore.v1 import query_pb2 pb = self._call_fut(_Query(order=['a', '-b', 'c'])) self.assertEqual([item.property.name for item in pb.order], @@ -703,7 +703,7 @@ def current_transaction(self): def _make_entity(kind, id_, project): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 key = entity_pb2.Key() key.partition_id.project_id = project @@ -715,8 +715,8 @@ def _make_entity(kind, id_, project): def _make_query_response( entity_pbs, cursor_as_bytes, more_results_enum, skipped_results): - from google.cloud.grpc.datastore.v1 import datastore_pb2 - from google.cloud.grpc.datastore.v1 import query_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import query_pb2 return datastore_pb2.RunQueryResponse( batch=query_pb2.QueryResultBatch( diff --git a/packages/google-cloud-datastore/unit_tests/test_transaction.py b/packages/google-cloud-datastore/unit_tests/test_transaction.py index 4dec32420356..a4d278a421c3 100644 --- a/packages/google-cloud-datastore/unit_tests/test_transaction.py +++ b/packages/google-cloud-datastore/unit_tests/test_transaction.py @@ -27,7 +27,7 @@ def _make_one(self, client, **kw): return self._get_target_class()(client, **kw) def test_ctor_defaults(self): - from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 _PROJECT = 'PROJECT' connection = _Connection() @@ -179,7 +179,7 @@ class Foo(Exception): def _make_key(kind, id_, project): - from google.cloud.grpc.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 key = entity_pb2.Key() key.partition_id.project_id = project @@ -197,7 +197,7 @@ class _Connection(object): _side_effect = None def __init__(self, xact_id=123, keys=()): - from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 self._xact_id = xact_id mutation_results = [ From b5b7733d1f304c1143e7bf3f9e92b4ded9b81127 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 3 Mar 2017 09:44:35 -0800 Subject: [PATCH 081/611] Making datastore Connection.begin_transaction() return low-level protobuf. --- .../google/cloud/datastore/_http.py | 5 ++-- .../google/cloud/datastore/transaction.py | 3 ++- .../unit_tests/test__http.py | 26 ++++++++++++------- .../unit_tests/test_transaction.py | 5 +++- 4 files changed, 25 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 4c384ad4d4ef..2650321068bf 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -399,12 +399,11 @@ def begin_transaction(self, project): :type project: str :param project: The project to which the transaction applies. - :rtype: bytes + :rtype: :class:`.datastore_pb2.BeginTransactionResponse` :returns: The serialized transaction that was begun. """ request = _datastore_pb2.BeginTransactionRequest() - response = self._datastore_api.begin_transaction(project, request) - return response.transaction + return self._datastore_api.begin_transaction(project, request) def commit(self, project, request, transaction_id): """Commit mutations in context of current transaction (if any). diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index 48a044ba789c..c0781a733711 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -184,8 +184,9 @@ def begin(self): """ super(Transaction, self).begin() try: - self._id = self._client._connection.begin_transaction( + response_pb = self._client._connection.begin_transaction( self.project) + self._id = response_pb.transaction except: # noqa: E722 do not use bare except, specify exception instead self._status = self._ABORTED raise diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index fffc9c2bdb0b..0103e4883479 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -672,25 +672,33 @@ def test_run_query_w_namespace_nonempty_result(self): def test_begin_transaction(self): from google.cloud.proto.datastore.v1 import datastore_pb2 - PROJECT = 'PROJECT' - TRANSACTION = b'TRANSACTION' + project = 'PROJECT' + transaction = b'TRANSACTION' rsp_pb = datastore_pb2.BeginTransactionResponse() - rsp_pb.transaction = TRANSACTION + rsp_pb.transaction = transaction + + # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) + + # Make request. conn = self._make_one(client) - URI = '/'.join([ + response = conn.begin_transaction(project) + + # Check the result and verify the callers. + self.assertEqual(response, rsp_pb) + uri = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', - PROJECT + ':beginTransaction', + project + ':beginTransaction', ]) - self.assertEqual(conn.begin_transaction(PROJECT), TRANSACTION) cw = http._called_with - self._verify_protobuf_call(cw, URI, conn) - rq_class = datastore_pb2.BeginTransactionRequest - request = rq_class() + self._verify_protobuf_call(cw, uri, conn) + request = datastore_pb2.BeginTransactionRequest() request.ParseFromString(cw['body']) + # The RPC-over-HTTP request does not set the project in the request. + self.assertEqual(request.project_id, u'') def test_commit_wo_transaction(self): from google.cloud.proto.datastore.v1 import datastore_pb2 diff --git a/packages/google-cloud-datastore/unit_tests/test_transaction.py b/packages/google-cloud-datastore/unit_tests/test_transaction.py index a4d278a421c3..db7bf6bcc97f 100644 --- a/packages/google-cloud-datastore/unit_tests/test_transaction.py +++ b/packages/google-cloud-datastore/unit_tests/test_transaction.py @@ -206,9 +206,12 @@ def __init__(self, xact_id=123, keys=()): mutation_results=mutation_results) def begin_transaction(self, project): + import mock + self._begun = project if self._side_effect is None: - return self._xact_id + return mock.Mock( + transaction=self._xact_id, spec=['transaction']) else: raise self._side_effect From 696eec9a26b3597a744cff08387d5dc2180ac928 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 3 Mar 2017 10:01:28 -0800 Subject: [PATCH 082/611] Making datastore Connection.rollback() return low-level protobuf. --- .../google/cloud/datastore/_http.py | 7 ++++-- .../google/cloud/datastore/transaction.py | 1 + .../unit_tests/test__http.py | 25 +++++++++++-------- 3 files changed, 21 insertions(+), 12 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 4c384ad4d4ef..3ee30eb491df 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -448,11 +448,14 @@ def rollback(self, project, transaction_id): :type transaction_id: str :param transaction_id: The transaction ID returned from :meth:`begin_transaction`. + + :rtype: :class:`.datastore_pb2.RollbackResponse` + :returns: The returned protobuf response object. """ request = _datastore_pb2.RollbackRequest() request.transaction = transaction_id - # Nothing to do with this response, so just execute the method. - self._datastore_api.rollback(project, request) + # Response is empty (i.e. no fields) but we return it anyway. + return self._datastore_api.rollback(project, request) def allocate_ids(self, project, key_pbs): """Obtain backend-generated IDs for a set of keys. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index 48a044ba789c..fa07681ffa3a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -199,6 +199,7 @@ def rollback(self): - Sets the current transaction's ID to None. """ try: + # No need to use the response it contains nothing. self._client._connection.rollback(self.project, self._id) finally: super(Transaction, self).rollback() diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index fffc9c2bdb0b..885016749519 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -767,26 +767,31 @@ def test_commit_w_transaction(self): def test_rollback_ok(self): from google.cloud.proto.datastore.v1 import datastore_pb2 - PROJECT = 'PROJECT' - TRANSACTION = b'xact' - + project = 'PROJECT' + transaction = b'xact' rsp_pb = datastore_pb2.RollbackResponse() + + # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) + + # Make request. conn = self._make_one(client) - URI = '/'.join([ + response = conn.rollback(project, transaction) + + # Check the result and verify the callers. + self.assertEqual(response, rsp_pb) + uri = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', - PROJECT + ':rollback', + project + ':rollback', ]) - self.assertIsNone(conn.rollback(PROJECT, TRANSACTION)) cw = http._called_with - self._verify_protobuf_call(cw, URI, conn) - rq_class = datastore_pb2.RollbackRequest - request = rq_class() + self._verify_protobuf_call(cw, uri, conn) + request = datastore_pb2.RollbackRequest() request.ParseFromString(cw['body']) - self.assertEqual(request.transaction, TRANSACTION) + self.assertEqual(request.transaction, transaction) def test_allocate_ids_empty(self): from google.cloud.proto.datastore.v1 import datastore_pb2 From 978fa99b90d0383e7f804ca43a41172e8c944ab0 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 3 Mar 2017 10:22:14 -0800 Subject: [PATCH 083/611] Making datastore Connection.allocate_ids() return low-level protobuf. --- .../google/cloud/datastore/_http.py | 8 ++- .../google/cloud/datastore/client.py | 6 +-- .../unit_tests/test__http.py | 51 ++++++++++++------- .../unit_tests/test_client.py | 15 +++--- 4 files changed, 46 insertions(+), 34 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 4c384ad4d4ef..9113993c1d14 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -466,14 +466,12 @@ def allocate_ids(self, project, key_pbs): :class:`.entity_pb2.Key` :param key_pbs: The keys for which the backend should allocate IDs. - :rtype: list of :class:`.entity_pb2.Key` - :returns: An equal number of keys, with IDs filled in by the backend. + :rtype: :class:`.datastore_pb2.AllocateIdsResponse` + :returns: The protobuf response from an allocate IDs request. """ request = _datastore_pb2.AllocateIdsRequest() _add_keys_to_request(request.keys, key_pbs) - # Nothing to do with this response, so just execute the method. - response = self._datastore_api.allocate_ids(project, request) - return list(response.keys) + return self._datastore_api.allocate_ids(project, request) def _set_read_options(request, eventual, transaction_id): diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 87ab8f6ee0c6..aecfe603705e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -425,10 +425,10 @@ def allocate_ids(self, incomplete_key, num_ids): incomplete_key_pbs = [incomplete_key_pb] * num_ids conn = self._connection - allocated_key_pbs = conn.allocate_ids(incomplete_key.project, - incomplete_key_pbs) + response_pb = conn.allocate_ids( + incomplete_key.project, incomplete_key_pbs) allocated_ids = [allocated_key_pb.path[-1].id - for allocated_key_pb in allocated_key_pbs] + for allocated_key_pb in response_pb.keys] return [incomplete_key.completed_key(allocated_id) for allocated_id in allocated_ids] diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index fffc9c2bdb0b..e138fb67e979 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -791,55 +791,68 @@ def test_rollback_ok(self): def test_allocate_ids_empty(self): from google.cloud.proto.datastore.v1 import datastore_pb2 - PROJECT = 'PROJECT' + project = 'PROJECT' rsp_pb = datastore_pb2.AllocateIdsResponse() + + # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) + + # Make request. conn = self._make_one(client) - URI = '/'.join([ + response = conn.allocate_ids(project, []) + + # Check the result and verify the callers. + self.assertEqual(list(response.keys), []) + self.assertEqual(response, rsp_pb) + uri = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', - PROJECT + ':allocateIds', + project + ':allocateIds', ]) - self.assertEqual(conn.allocate_ids(PROJECT, []), []) cw = http._called_with - self._verify_protobuf_call(cw, URI, conn) - rq_class = datastore_pb2.AllocateIdsRequest - request = rq_class() + self._verify_protobuf_call(cw, uri, conn) + request = datastore_pb2.AllocateIdsRequest() request.ParseFromString(cw['body']) self.assertEqual(list(request.keys), []) def test_allocate_ids_non_empty(self): from google.cloud.proto.datastore.v1 import datastore_pb2 - PROJECT = 'PROJECT' + project = 'PROJECT' before_key_pbs = [ - self._make_key_pb(PROJECT, id_=None), - self._make_key_pb(PROJECT, id_=None), + self._make_key_pb(project, id_=None), + self._make_key_pb(project, id_=None), ] after_key_pbs = [ - self._make_key_pb(PROJECT), - self._make_key_pb(PROJECT, id_=2345), + self._make_key_pb(project), + self._make_key_pb(project, id_=2345), ] rsp_pb = datastore_pb2.AllocateIdsResponse() rsp_pb.keys.add().CopyFrom(after_key_pbs[0]) rsp_pb.keys.add().CopyFrom(after_key_pbs[1]) + + # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) + + # Make request. conn = self._make_one(client) - URI = '/'.join([ + response = conn.allocate_ids(project, before_key_pbs) + + # Check the result and verify the callers. + self.assertEqual(list(response.keys), after_key_pbs) + self.assertEqual(response, rsp_pb) + uri = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', - PROJECT + ':allocateIds', + project + ':allocateIds', ]) - self.assertEqual(conn.allocate_ids(PROJECT, before_key_pbs), - after_key_pbs) cw = http._called_with - self._verify_protobuf_call(cw, URI, conn) - rq_class = datastore_pb2.AllocateIdsRequest - request = rq_class() + self._verify_protobuf_call(cw, uri, conn) + request = datastore_pb2.AllocateIdsRequest() request.ParseFromString(cw['body']) self.assertEqual(len(request.keys), len(before_key_pbs)) for key_before, key_after in zip(before_key_pbs, request.keys): diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index 26a9c56dfcc6..b76de128d41d 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -149,7 +149,7 @@ def test_ctor_w_project_no_environ(self): # this test would fail artificially. patch = mock.patch( 'google.cloud.datastore.client._base_default_project', - new=lambda project: None) + return_value=None) with patch: self.assertRaises(EnvironmentError, self._make_one, None) @@ -679,18 +679,18 @@ def test_delete_multi_w_existing_transaction(self): self.assertEqual(len(client._connection._commit_cw), 0) def test_allocate_ids_w_partial_key(self): - NUM_IDS = 2 + num_ids = 2 - INCOMPLETE_KEY = _Key(self.PROJECT) - INCOMPLETE_KEY._id = None + incomplete_key = _Key(self.PROJECT) + incomplete_key._id = None creds = _make_credentials() client = self._make_one(credentials=creds) - result = client.allocate_ids(INCOMPLETE_KEY, NUM_IDS) + result = client.allocate_ids(incomplete_key, num_ids) # Check the IDs returned. - self.assertEqual([key._id for key in result], list(range(NUM_IDS))) + self.assertEqual([key._id for key in result], list(range(num_ids))) def test_allocate_ids_with_completed_key(self): creds = _make_credentials() @@ -954,7 +954,8 @@ def commit(self, project, commit_request, transaction_id): def allocate_ids(self, project, key_pbs): self._alloc_cw.append((project, key_pbs)) num_pbs = len(key_pbs) - return [_KeyPB(i) for i in list(range(num_pbs))] + keys = [_KeyPB(i) for i in list(range(num_pbs))] + return mock.Mock(keys=keys, spec=['keys']) class _NoCommitBatch(object): From cb6e053da3a6c360c53b6e798e363a148cb97908 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 3 Mar 2017 11:14:22 -0800 Subject: [PATCH 084/611] Making datastore Connection.lookup() return low-level protobuf. --- .../google/cloud/datastore/_http.py | 15 +- .../google/cloud/datastore/client.py | 13 +- .../unit_tests/test__http.py | 212 +++++++++++------- .../unit_tests/test_client.py | 12 +- 4 files changed, 148 insertions(+), 104 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 2650321068bf..08b8ac3d4964 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -326,23 +326,14 @@ def lookup(self, project, key_pbs, the given transaction. Incompatible with ``eventual==True``. - :rtype: tuple - :returns: A triple of (``results``, ``missing``, ``deferred``) where - both ``results`` and ``missing`` are lists of - :class:`.entity_pb2.Entity` - and ``deferred`` is a list of - :class:`.entity_pb2.Key`. + :rtype: :class:`.datastore_pb2.LookupResponse` + :returns: The returned protobuf for the lookup request. """ lookup_request = _datastore_pb2.LookupRequest() _set_read_options(lookup_request, eventual, transaction_id) _add_keys_to_request(lookup_request.keys, key_pbs) - lookup_response = self._datastore_api.lookup(project, lookup_request) - - results = [result.entity for result in lookup_response.found] - missing = [result.entity for result in lookup_response.missing] - - return results, missing, list(lookup_response.deferred) + return self._datastore_api.lookup(project, lookup_request) def run_query(self, project, query_pb, namespace=None, eventual=False, transaction_id=None): diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 87ab8f6ee0c6..beb7c49dcb5f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -116,28 +116,29 @@ def _extended_lookup(connection, project, key_pbs, while loop_num < _MAX_LOOPS: # loop against possible deferred. loop_num += 1 - results_found, missing_found, deferred_found = connection.lookup( + lookup_response = connection.lookup( project=project, key_pbs=key_pbs, eventual=eventual, transaction_id=transaction_id, ) - results.extend(results_found) + # Accumulate the new results. + results.extend(result.entity for result in lookup_response.found) if missing is not None: - missing.extend(missing_found) + missing.extend(result.entity for result in lookup_response.missing) if deferred is not None: - deferred.extend(deferred_found) + deferred.extend(lookup_response.deferred) break - if len(deferred_found) == 0: + if len(lookup_response.deferred) == 0: break # We have deferred keys, and the user didn't ask to know about # them, so retry (but only with the deferred ones). - key_pbs = deferred_found + key_pbs = lookup_response.deferred return results diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 0103e4883479..4d1fdc1bc357 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -255,26 +255,32 @@ def test_build_api_url_w_explicit_base_version(self): def test_lookup_single_key_empty_response(self): from google.cloud.proto.datastore.v1 import datastore_pb2 - PROJECT = 'PROJECT' - key_pb = self._make_key_pb(PROJECT) + project = 'PROJECT' + key_pb = self._make_key_pb(project) rsp_pb = datastore_pb2.LookupResponse() + + # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) + + # Make request. conn = self._make_one(client) - URI = '/'.join([ + response = conn.lookup(project, [key_pb]) + + # Check the result and verify the callers. + self.assertEqual(response, rsp_pb) + uri = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', - PROJECT + ':lookup', + project + ':lookup', ]) - found, missing, deferred = conn.lookup(PROJECT, [key_pb]) - self.assertEqual(len(found), 0) - self.assertEqual(len(missing), 0) - self.assertEqual(len(deferred), 0) + self.assertEqual(len(response.found), 0) + self.assertEqual(len(response.missing), 0) + self.assertEqual(len(response.deferred), 0) cw = http._called_with - self._verify_protobuf_call(cw, URI, conn) - rq_class = datastore_pb2.LookupRequest - request = rq_class() + self._verify_protobuf_call(cw, uri, conn) + request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) keys = list(request.keys) self.assertEqual(len(keys), 1) @@ -283,27 +289,32 @@ def test_lookup_single_key_empty_response(self): def test_lookup_single_key_empty_response_w_eventual(self): from google.cloud.proto.datastore.v1 import datastore_pb2 - PROJECT = 'PROJECT' - key_pb = self._make_key_pb(PROJECT) + project = 'PROJECT' + key_pb = self._make_key_pb(project) rsp_pb = datastore_pb2.LookupResponse() + + # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) + + # Make request. conn = self._make_one(client) - URI = '/'.join([ + response = conn.lookup(project, [key_pb], eventual=True) + + # Check the result and verify the callers. + self.assertEqual(response, rsp_pb) + uri = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', - PROJECT + ':lookup', + project + ':lookup', ]) - found, missing, deferred = conn.lookup(PROJECT, [key_pb], - eventual=True) - self.assertEqual(len(found), 0) - self.assertEqual(len(missing), 0) - self.assertEqual(len(deferred), 0) + self.assertEqual(len(response.found), 0) + self.assertEqual(len(response.missing), 0) + self.assertEqual(len(response.deferred), 0) cw = http._called_with - self._verify_protobuf_call(cw, URI, conn) - rq_class = datastore_pb2.LookupRequest - request = rq_class() + self._verify_protobuf_call(cw, uri, conn) + request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) keys = list(request.keys) self.assertEqual(len(keys), 1) @@ -323,62 +334,75 @@ def test_lookup_single_key_empty_response_w_eventual_and_transaction(self): def test_lookup_single_key_empty_response_w_transaction(self): from google.cloud.proto.datastore.v1 import datastore_pb2 - PROJECT = 'PROJECT' - TRANSACTION = b'TRANSACTION' - key_pb = self._make_key_pb(PROJECT) + project = 'PROJECT' + transaction = b'TRANSACTION' + key_pb = self._make_key_pb(project) rsp_pb = datastore_pb2.LookupResponse() + + # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) + + # Make request. conn = self._make_one(client) - URI = '/'.join([ + response = conn.lookup(project, [key_pb], transaction_id=transaction) + + # Check the result and verify the callers. + self.assertEqual(response, rsp_pb) + uri = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', - PROJECT + ':lookup', + project + ':lookup', ]) - found, missing, deferred = conn.lookup(PROJECT, [key_pb], - transaction_id=TRANSACTION) - self.assertEqual(len(found), 0) - self.assertEqual(len(missing), 0) - self.assertEqual(len(deferred), 0) + self.assertEqual(len(response.found), 0) + self.assertEqual(len(response.missing), 0) + self.assertEqual(len(response.deferred), 0) cw = http._called_with - self._verify_protobuf_call(cw, URI, conn) - rq_class = datastore_pb2.LookupRequest - request = rq_class() + self._verify_protobuf_call(cw, uri, conn) + request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) keys = list(request.keys) self.assertEqual(len(keys), 1) self.assertEqual(key_pb, keys[0]) - self.assertEqual(request.read_options.transaction, TRANSACTION) + self.assertEqual(request.read_options.transaction, transaction) def test_lookup_single_key_nonempty_response(self): from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.proto.datastore.v1 import entity_pb2 - PROJECT = 'PROJECT' - key_pb = self._make_key_pb(PROJECT) + project = 'PROJECT' + key_pb = self._make_key_pb(project) rsp_pb = datastore_pb2.LookupResponse() entity = entity_pb2.Entity() entity.key.CopyFrom(key_pb) rsp_pb.found.add(entity=entity) + + # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) + + # Make request. conn = self._make_one(client) - URI = '/'.join([ + response = conn.lookup(project, [key_pb]) + + # Check the result and verify the callers. + self.assertEqual(response, rsp_pb) + uri = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', - PROJECT + ':lookup', + project + ':lookup', ]) - (found,), missing, deferred = conn.lookup(PROJECT, [key_pb]) - self.assertEqual(len(missing), 0) - self.assertEqual(len(deferred), 0) + self.assertEqual(len(response.found), 1) + self.assertEqual(len(response.missing), 0) + self.assertEqual(len(response.deferred), 0) + found = response.found[0].entity self.assertEqual(found.key.path[0].kind, 'Kind') self.assertEqual(found.key.path[0].id, 1234) cw = http._called_with - self._verify_protobuf_call(cw, URI, conn) - rq_class = datastore_pb2.LookupRequest - request = rq_class() + self._verify_protobuf_call(cw, uri, conn) + request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) keys = list(request.keys) self.assertEqual(len(keys), 1) @@ -387,27 +411,33 @@ def test_lookup_single_key_nonempty_response(self): def test_lookup_multiple_keys_empty_response(self): from google.cloud.proto.datastore.v1 import datastore_pb2 - PROJECT = 'PROJECT' - key_pb1 = self._make_key_pb(PROJECT) - key_pb2 = self._make_key_pb(PROJECT, id_=2345) + project = 'PROJECT' + key_pb1 = self._make_key_pb(project) + key_pb2 = self._make_key_pb(project, id_=2345) rsp_pb = datastore_pb2.LookupResponse() + + # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) + + # Make request. conn = self._make_one(client) - URI = '/'.join([ + response = conn.lookup(project, [key_pb1, key_pb2]) + + # Check the result and verify the callers. + self.assertEqual(response, rsp_pb) + uri = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', - PROJECT + ':lookup', + project + ':lookup', ]) - found, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2]) - self.assertEqual(len(found), 0) - self.assertEqual(len(missing), 0) - self.assertEqual(len(deferred), 0) + self.assertEqual(len(response.found), 0) + self.assertEqual(len(response.missing), 0) + self.assertEqual(len(response.deferred), 0) cw = http._called_with - self._verify_protobuf_call(cw, URI, conn) - rq_class = datastore_pb2.LookupRequest - request = rq_class() + self._verify_protobuf_call(cw, uri, conn) + request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) keys = list(request.keys) self.assertEqual(len(keys), 2) @@ -417,32 +447,38 @@ def test_lookup_multiple_keys_empty_response(self): def test_lookup_multiple_keys_w_missing(self): from google.cloud.proto.datastore.v1 import datastore_pb2 - PROJECT = 'PROJECT' - key_pb1 = self._make_key_pb(PROJECT) - key_pb2 = self._make_key_pb(PROJECT, id_=2345) + project = 'PROJECT' + key_pb1 = self._make_key_pb(project) + key_pb2 = self._make_key_pb(project, id_=2345) rsp_pb = datastore_pb2.LookupResponse() er_1 = rsp_pb.missing.add() er_1.entity.key.CopyFrom(key_pb1) er_2 = rsp_pb.missing.add() er_2.entity.key.CopyFrom(key_pb2) + + # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) + + # Make request. conn = self._make_one(client) - URI = '/'.join([ + response = conn.lookup(project, [key_pb1, key_pb2]) + + # Check the result and verify the callers. + self.assertEqual(response, rsp_pb) + uri = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', - PROJECT + ':lookup', + project + ':lookup', ]) - result, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2]) - self.assertEqual(result, []) - self.assertEqual(len(deferred), 0) - self.assertEqual([missed.key for missed in missing], - [key_pb1, key_pb2]) + self.assertEqual(len(response.found), 0) + self.assertEqual(len(response.deferred), 0) + missing_keys = [result.entity.key for result in response.missing] + self.assertEqual(missing_keys, [key_pb1, key_pb2]) cw = http._called_with - self._verify_protobuf_call(cw, URI, conn) - rq_class = datastore_pb2.LookupRequest - request = rq_class() + self._verify_protobuf_call(cw, uri, conn) + request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) keys = list(request.keys) self.assertEqual(len(keys), 2) @@ -455,28 +491,35 @@ def test_lookup_multiple_keys_w_deferred(self): from google.cloud import _http as connection_module from google.cloud.datastore._http import _CLIENT_INFO - PROJECT = 'PROJECT' - key_pb1 = self._make_key_pb(PROJECT) - key_pb2 = self._make_key_pb(PROJECT, id_=2345) + project = 'PROJECT' + key_pb1 = self._make_key_pb(project) + key_pb2 = self._make_key_pb(project, id_=2345) rsp_pb = datastore_pb2.LookupResponse() rsp_pb.deferred.add().CopyFrom(key_pb1) rsp_pb.deferred.add().CopyFrom(key_pb2) + + # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) + + # Make request. conn = self._make_one(client) - URI = '/'.join([ + response = conn.lookup(project, [key_pb1, key_pb2]) + + # Check the result and verify the callers. + self.assertEqual(response, rsp_pb) + uri = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', - PROJECT + ':lookup', + project + ':lookup', ]) - result, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2]) - self.assertEqual(result, []) - self.assertEqual(len(missing), 0) - self.assertEqual([def_key for def_key in deferred], [key_pb1, key_pb2]) + self.assertEqual(len(response.found), 0) + self.assertEqual(len(response.missing), 0) + self.assertEqual(list(response.deferred), [key_pb1, key_pb2]) cw = http._called_with - self._verify_protobuf_call(cw, URI, conn) - self.assertEqual(cw['uri'], URI) + self._verify_protobuf_call(cw, uri, conn) + self.assertEqual(cw['uri'], uri) self.assertEqual(cw['method'], 'POST') expected_headers = { 'Content-Type': 'application/x-protobuf', @@ -485,8 +528,7 @@ def test_lookup_multiple_keys_w_deferred(self): connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, } self.assertEqual(cw['headers'], expected_headers) - rq_class = datastore_pb2.LookupRequest - request = rq_class() + request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) keys = list(request.keys) self.assertEqual(len(keys), 2) diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index 26a9c56dfcc6..9ffe2dfd4996 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -940,7 +940,17 @@ def lookup(self, project, key_pbs, eventual=False, transaction_id=None): self._lookup_cw.append((project, key_pbs, eventual, transaction_id)) triple, self._lookup = self._lookup[0], self._lookup[1:] results, missing, deferred = triple - return results, missing, deferred + + entity_results_found = [ + mock.Mock(entity=result, spec=['entity']) for result in results] + entity_results_missing = [ + mock.Mock(entity=missing_entity, spec=['entity']) + for missing_entity in missing] + return mock.Mock( + found=entity_results_found, + missing=entity_results_missing, + deferred=deferred, + spec=['found', 'missing', 'deferred']) def commit(self, project, commit_request, transaction_id): from google.cloud.proto.datastore.v1 import datastore_pb2 From f69979208de9edb4370e9cd2b39a11cb18a294ab Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Sat, 4 Mar 2017 10:31:21 -0800 Subject: [PATCH 085/611] Sending x-goog-api-client header for Datastore gRPC implementation. (#3096) --- .../google/cloud/datastore/_gax.py | 19 ++++-- packages/google-cloud-datastore/setup.py | 2 +- .../unit_tests/test__gax.py | 60 ++++++++----------- 3 files changed, 41 insertions(+), 40 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py index b29ce874f298..58500301c950 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py @@ -17,15 +17,24 @@ import contextlib +from google.cloud.proto.datastore.v1 import datastore_pb2_grpc +from google.gax.utils import metrics from grpc import StatusCode from google.cloud._helpers import make_insecure_stub from google.cloud._helpers import make_secure_stub from google.cloud import exceptions -from google.cloud.proto.datastore.v1 import datastore_pb2_grpc +from google.cloud.datastore import __version__ +_METRICS_HEADERS = ( + ('gccl', __version__), +) +_HEADER_STR = metrics.stringify(metrics.fill(_METRICS_HEADERS)) +_GRPC_EXTRA_OPTIONS = ( + ('x-goog-api-client', _HEADER_STR), +) _GRPC_ERROR_MAPPING = { StatusCode.UNKNOWN: exceptions.InternalServerError, StatusCode.INVALID_ARGUMENT: exceptions.BadRequest, @@ -85,10 +94,10 @@ class _DatastoreAPIOverGRPC(object): def __init__(self, connection, secure): if secure: - self._stub = make_secure_stub(connection.credentials, - connection.USER_AGENT, - datastore_pb2_grpc.DatastoreStub, - connection.host) + self._stub = make_secure_stub( + connection.credentials, connection.USER_AGENT, + datastore_pb2_grpc.DatastoreStub, connection.host, + extra_options=_GRPC_EXTRA_OPTIONS) else: self._stub = make_insecure_stub(datastore_pb2_grpc.DatastoreStub, connection.host) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 170692218b4f..625415c65ddb 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -51,7 +51,7 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.23.1, < 0.24dev', - 'grpcio >= 1.0.2, < 2.0dev', + 'google-gax>=0.15.7, <0.16dev', 'gapic-google-cloud-datastore-v1 >= 0.15.0, < 0.16dev', ] diff --git a/packages/google-cloud-datastore/unit_tests/test__gax.py b/packages/google-cloud-datastore/unit_tests/test__gax.py index e0fc912e0081..419c85cc9fdc 100644 --- a/packages/google-cloud-datastore/unit_tests/test__gax.py +++ b/packages/google-cloud-datastore/unit_tests/test__gax.py @@ -97,7 +97,7 @@ def _get_target_class(): return _DatastoreAPIOverGRPC - def _make_one(self, stub, connection=None, secure=True, mock_args=None): + def _make_one(self, stub, connection=None, secure=True): if connection is None: connection = mock.Mock( credentials=object(), @@ -106,27 +106,21 @@ def _make_one(self, stub, connection=None, secure=True, mock_args=None): spec=['credentials', 'host', 'USER_AGENT'], ) - if mock_args is None: - mock_args = [] - - def mock_make_stub(*args): - mock_args.append(args) - return stub - if secure: patch = mock.patch( 'google.cloud.datastore._gax.make_secure_stub', - new=mock_make_stub) + return_value=stub) else: patch = mock.patch( 'google.cloud.datastore._gax.make_insecure_stub', - new=mock_make_stub) + return_value=stub) - with patch: - return self._get_target_class()(connection, secure) + with patch as make_stub_mock: + api_obj = self._get_target_class()(connection, secure) + return api_obj, make_stub_mock def test_constructor(self): - from google.cloud.proto.datastore.v1 import datastore_pb2_grpc + import google.cloud.datastore._gax as MUT conn = mock.Mock( credentials=object(), @@ -136,17 +130,17 @@ def test_constructor(self): ) stub = _GRPCStub() - mock_args = [] - datastore_api = self._make_one(stub, connection=conn, - mock_args=mock_args) - self.assertIs(datastore_api._stub, stub) + datastore_api, make_stub_mock = self._make_one( + stub, connection=conn) - self.assertEqual(mock_args, [( + self.assertIs(datastore_api._stub, stub) + make_stub_mock.assert_called_once_with( conn.credentials, conn.USER_AGENT, - datastore_pb2_grpc.DatastoreStub, + MUT.datastore_pb2_grpc.DatastoreStub, conn.host, - )]) + extra_options=MUT._GRPC_EXTRA_OPTIONS, + ) def test_constructor_insecure(self): from google.cloud.proto.datastore.v1 import datastore_pb2_grpc @@ -158,21 +152,19 @@ def test_constructor_insecure(self): ) stub = _GRPCStub() - mock_args = [] - datastore_api = self._make_one(stub, connection=conn, - secure=False, - mock_args=mock_args) - self.assertIs(datastore_api._stub, stub) + datastore_api, make_stub_mock = self._make_one( + stub, connection=conn, secure=False) - self.assertEqual(mock_args, [( + self.assertIs(datastore_api._stub, stub) + make_stub_mock.assert_called_once_with( datastore_pb2_grpc.DatastoreStub, conn.host, - )]) + ) def test_lookup(self): return_val = object() stub = _GRPCStub(return_val) - datastore_api = self._make_one(stub=stub) + datastore_api, _ = self._make_one(stub=stub) request_pb = mock.Mock(project_id=None, spec=['project_id']) project = 'PROJECT' @@ -185,7 +177,7 @@ def test_lookup(self): def test_run_query(self): return_val = object() stub = _GRPCStub(return_val) - datastore_api = self._make_one(stub=stub) + datastore_api, _ = self._make_one(stub=stub) request_pb = mock.Mock(project_id=None, spec=['project_id']) project = 'PROJECT' @@ -197,7 +189,7 @@ def test_run_query(self): def _run_query_failure_helper(self, exc, err_class): stub = _GRPCStub(side_effect=exc) - datastore_api = self._make_one(stub=stub) + datastore_api, _ = self._make_one(stub=stub) request_pb = mock.Mock(project_id=None, spec=['project_id']) project = 'PROJECT' @@ -225,7 +217,7 @@ def test_run_query_invalid_argument(self): def test_begin_transaction(self): return_val = object() stub = _GRPCStub(return_val) - datastore_api = self._make_one(stub=stub) + datastore_api, _ = self._make_one(stub=stub) request_pb = mock.Mock(project_id=None, spec=['project_id']) project = 'PROJECT' @@ -239,7 +231,7 @@ def test_begin_transaction(self): def test_commit_success(self): return_val = object() stub = _GRPCStub(return_val) - datastore_api = self._make_one(stub=stub) + datastore_api, _ = self._make_one(stub=stub) request_pb = mock.Mock(project_id=None, spec=['project_id']) project = 'PROJECT' @@ -252,7 +244,7 @@ def test_commit_success(self): def test_rollback(self): return_val = object() stub = _GRPCStub(return_val) - datastore_api = self._make_one(stub=stub) + datastore_api, _ = self._make_one(stub=stub) request_pb = mock.Mock(project_id=None, spec=['project_id']) project = 'PROJECT' @@ -265,7 +257,7 @@ def test_rollback(self): def test_allocate_ids(self): return_val = object() stub = _GRPCStub(return_val) - datastore_api = self._make_one(stub=stub) + datastore_api, _ = self._make_one(stub=stub) request_pb = mock.Mock(project_id=None, spec=['project_id']) project = 'PROJECT' From 2024035655319e20789f1ceaafb6ab6f739e6c72 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 23 Feb 2017 14:25:45 -0800 Subject: [PATCH 086/611] Add basic helpers needed for GAPIC client in datastore. Includes - ``use_gax`` argument in Client constructor - ``make_datastore_api`` helper to make a gRPC channel with the correct credentials - A basic ``HTTPDatastoreAPI`` class to act as the equivalent to the GAPIC ``DatastoreClient`` - A lazy property on ``Client`` that will hold the API object. --- .../google/cloud/datastore/_gax.py | 19 +++++ .../google/cloud/datastore/_http.py | 13 ++++ .../google/cloud/datastore/client.py | 36 ++++++++- .../unit_tests/test__gax.py | 31 ++++++++ .../unit_tests/test__http.py | 17 +++++ .../unit_tests/test_client.py | 74 +++++++++++++++++-- 6 files changed, 183 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py index 58500301c950..1bf7b21fbe5f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py @@ -17,12 +17,15 @@ import contextlib +from google.cloud.gapic.datastore.v1 import datastore_client from google.cloud.proto.datastore.v1 import datastore_pb2_grpc from google.gax.utils import metrics from grpc import StatusCode from google.cloud._helpers import make_insecure_stub +from google.cloud._helpers import make_secure_channel from google.cloud._helpers import make_secure_stub +from google.cloud._http import DEFAULT_USER_AGENT from google.cloud import exceptions from google.cloud.datastore import __version__ @@ -204,3 +207,19 @@ def allocate_ids(self, project, request_pb): request_pb.project_id = project with _grpc_catch_rendezvous(): return self._stub.AllocateIds(request_pb) + + +def make_datastore_api(client): + """Create an instance of the GAPIC Datastore API. + + :type client: :class:`~google.cloud.datastore.client.Client` + :param client: The client that holds configuration details. + + :rtype: :class:`.datastore.v1.datastore_client.DatastoreClient` + :returns: A datastore API instance with the proper credentials. + """ + channel = make_secure_channel( + client._credentials, DEFAULT_USER_AGENT, + datastore_client.DatastoreClient.SERVICE_ADDRESS) + return datastore_client.DatastoreClient( + channel=channel, lib_name='gccl', lib_version=__version__) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index a5231431820d..3e82ffbb6e16 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -467,6 +467,19 @@ def allocate_ids(self, project, key_pbs): return self._datastore_api.allocate_ids(project, request) +class HTTPDatastoreAPI(object): + """An API object that sends proto-over-HTTP requests. + + Intended to provide the same methods as the GAPIC ``DatastoreClient``. + + :type client: :class:`~google.cloud.datastore.client.Client` + :param client: The client that provides configuration. + """ + + def __init__(self, client): + self.client = client + + def _set_read_options(request, eventual, transaction_id): """Validate rules for read options, and assign to the request. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 438996ceff36..ce924edda183 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -19,19 +19,30 @@ from google.cloud._helpers import ( _determine_default_project as _base_default_project) from google.cloud.client import ClientWithProject +from google.cloud.environment_vars import DISABLE_GRPC +from google.cloud.environment_vars import GCD_DATASET + from google.cloud.datastore._http import Connection +from google.cloud.datastore._http import HTTPDatastoreAPI from google.cloud.datastore import helpers from google.cloud.datastore.batch import Batch from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key from google.cloud.datastore.query import Query from google.cloud.datastore.transaction import Transaction -from google.cloud.environment_vars import GCD_DATASET +try: + from google.cloud.datastore._gax import make_datastore_api + _HAVE_GRPC = True +except ImportError: # pragma: NO COVER + make_datastore_api = None + _HAVE_GRPC = False _MAX_LOOPS = 128 """Maximum number of iterations to wait for deferred keys.""" +_USE_GAX = _HAVE_GRPC and not os.getenv(DISABLE_GRPC, False) + def _get_gcd_project(): """Gets the GCD application ID if it can be inferred.""" @@ -169,24 +180,45 @@ class Client(ClientWithProject): :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. + + :type use_gax: bool + :param use_gax: (Optional) Explicitly specifies whether + to use the gRPC transport (via GAX) or HTTP. If unset, + falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` environment + variable. """ SCOPE = ('https://www.googleapis.com/auth/datastore',) """The scopes required for authenticating as a Cloud Datastore consumer.""" def __init__(self, project=None, namespace=None, - credentials=None, http=None): + credentials=None, http=None, use_gax=None): super(Client, self).__init__( project=project, credentials=credentials, http=http) self._connection = Connection(self) self.namespace = namespace self._batch_stack = _LocalStack() + self._datastore_api_internal = None + if use_gax is None: + self._use_gax = _USE_GAX + else: + self._use_gax = use_gax @staticmethod def _determine_default(project): """Helper: override default project detection.""" return _determine_default_project(project) + @property + def _datastore_api(self): + """Getter for a wrapped API object.""" + if self._datastore_api_internal is None: + if self._use_gax: + self._datastore_api_internal = make_datastore_api(self) + else: + self._datastore_api_internal = HTTPDatastoreAPI(self) + return self._datastore_api_internal + def _push_batch(self, batch): """Push a batch/transaction onto our stack. diff --git a/packages/google-cloud-datastore/unit_tests/test__gax.py b/packages/google-cloud-datastore/unit_tests/test__gax.py index 419c85cc9fdc..aea60801cfe5 100644 --- a/packages/google-cloud-datastore/unit_tests/test__gax.py +++ b/packages/google-cloud-datastore/unit_tests/test__gax.py @@ -269,6 +269,37 @@ def test_allocate_ids(self): [(request_pb, 'AllocateIds')]) +@unittest.skipUnless(_HAVE_GRPC, 'No gRPC') +class Test_make_datastore_api(unittest.TestCase): + + def _call_fut(self, client): + from google.cloud.datastore._gax import make_datastore_api + + return make_datastore_api(client) + + @mock.patch( + 'google.cloud.gapic.datastore.v1.datastore_client.DatastoreClient', + SERVICE_ADDRESS='datastore.mock.mock', + return_value=mock.sentinel.ds_client) + @mock.patch('google.cloud.datastore._gax.make_secure_channel', + return_value=mock.sentinel.channel) + def test_it(self, make_chan, mock_klass): + from google.cloud._http import DEFAULT_USER_AGENT + from google.cloud.datastore import __version__ + + client = mock.Mock( + _credentials=mock.sentinel.credentials, spec=['_credentials']) + ds_api = self._call_fut(client) + self.assertIs(ds_api, mock.sentinel.ds_client) + + make_chan.assert_called_once_with( + mock.sentinel.credentials, DEFAULT_USER_AGENT, + mock_klass.SERVICE_ADDRESS) + mock_klass.assert_called_once_with( + channel=mock.sentinel.channel, lib_name='gccl', + lib_version=__version__) + + class _GRPCStub(object): def __init__(self, return_val=None, side_effect=Exception): diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 23de36cdc2b1..a924fcf40316 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -914,6 +914,23 @@ def test_allocate_ids_non_empty(self): self.assertEqual(key_before, key_after) +class TestHTTPDatastoreAPI(unittest.TestCase): + + @staticmethod + def _get_target_class(): + from google.cloud.datastore._http import HTTPDatastoreAPI + + return HTTPDatastoreAPI + + def _make_one(self, *args, **kwargs): + return self._get_target_class()(*args, **kwargs) + + def test_constructor(self): + client = object() + ds_api = self._make_one(client) + self.assertIs(ds_api.client, client) + + class Http(object): _called_with = None diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index 41cf19013a14..63c2e0adebeb 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -138,13 +138,14 @@ def _get_target_class(): return Client def _make_one(self, project=PROJECT, namespace=None, - credentials=None, http=None): + credentials=None, http=None, use_gax=None): return self._get_target_class()(project=project, namespace=namespace, credentials=credentials, - http=http) + http=http, + use_gax=use_gax) - def test_ctor_w_project_no_environ(self): + def test_constructor_w_project_no_environ(self): # Some environments (e.g. AppVeyor CI) run in GCE, so # this test would fail artificially. patch = mock.patch( @@ -153,7 +154,7 @@ def test_ctor_w_project_no_environ(self): with patch: self.assertRaises(EnvironmentError, self._make_one, None) - def test_ctor_w_implicit_inputs(self): + def test_constructor_w_implicit_inputs(self): OTHER = 'other' creds = _make_credentials() default_called = [] @@ -183,7 +184,7 @@ def fallback_mock(project): self.assertIsNone(client.current_transaction) self.assertEqual(default_called, [None]) - def test_ctor_w_explicit_inputs(self): + def test_constructor_w_explicit_inputs(self): OTHER = 'other' NAMESPACE = 'namespace' creds = _make_credentials() @@ -200,6 +201,69 @@ def test_ctor_w_explicit_inputs(self): self.assertIsNone(client.current_batch) self.assertEqual(list(client._batch_stack), []) + def test_constructor_use_gax_default(self): + import google.cloud.datastore.client as MUT + + project = 'PROJECT' + creds = _make_credentials() + http = object() + + with mock.patch.object(MUT, '_USE_GAX', new=True): + client1 = self._make_one( + project=project, credentials=creds, http=http) + self.assertTrue(client1._use_gax) + # Explicitly over-ride the environment. + client2 = self._make_one( + project=project, credentials=creds, http=http, + use_gax=False) + self.assertFalse(client2._use_gax) + + with mock.patch.object(MUT, '_USE_GAX', new=False): + client3 = self._make_one( + project=project, credentials=creds, http=http) + self.assertFalse(client3._use_gax) + # Explicitly over-ride the environment. + client4 = self._make_one( + project=project, credentials=creds, http=http, + use_gax=True) + self.assertTrue(client4._use_gax) + + def test__datastore_api_property_gax(self): + client = self._make_one( + project='prahj-ekt', credentials=_make_credentials(), + http=object(), use_gax=True) + + self.assertIsNone(client._datastore_api_internal) + patch = mock.patch( + 'google.cloud.datastore.client.make_datastore_api', + return_value=mock.sentinel.ds_api) + with patch as make_api: + ds_api = client._datastore_api + self.assertIs(ds_api, mock.sentinel.ds_api) + make_api.assert_called_once_with(client) + self.assertIs( + client._datastore_api_internal, mock.sentinel.ds_api) + # Make sure the cached value is used. + self.assertEqual(make_api.call_count, 1) + self.assertIs( + client._datastore_api, mock.sentinel.ds_api) + self.assertEqual(make_api.call_count, 1) + + def test__datastore_api_property_http(self): + from google.cloud.datastore._http import HTTPDatastoreAPI + + client = self._make_one( + project='prahj-ekt', credentials=_make_credentials(), + http=object(), use_gax=False) + + self.assertIsNone(client._datastore_api_internal) + ds_api = client._datastore_api + self.assertIsInstance(ds_api, HTTPDatastoreAPI) + self.assertIs(ds_api.client, client) + # Make sure the cached value is used. + self.assertIs(client._datastore_api_internal, ds_api) + self.assertIs(client._datastore_api, ds_api) + def test__push_batch_and__pop_batch(self): creds = _make_credentials() client = self._make_one(credentials=creds) From b040cf189ea32334704021cd9fb2b28bd3e40c8d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 6 Mar 2017 11:29:40 -0800 Subject: [PATCH 087/611] Moving _request and _rpc off the datastore HTTP helper. This is to prepare for a removal of the Connection class (relates to #2746 and #3105). --- .../google/cloud/datastore/_http.py | 154 ++++++++------- .../unit_tests/test__http.py | 176 +++++++++++------- 2 files changed, 187 insertions(+), 143 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index a5231431820d..ab9ff5aeb7dc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -41,83 +41,93 @@ _CLIENT_INFO = connection_module.CLIENT_INFO_TEMPLATE.format(__version__) -class _DatastoreAPIOverHttp(object): - """Helper mapping datastore API methods. +def _request(connection, project, method, data): + """Make a request over the Http transport to the Cloud Datastore API. - Makes requests to send / receive protobuf content over HTTP/1.1. + :type connection: :class:`Connection` + :param connection: A connection object that contains helpful + information for making requests. - Methods make bare API requests without any helpers for constructing - the requests or parsing the responses. + :type project: str + :param project: The project to make the request for. + + :type method: str + :param method: The API call method name (ie, ``runQuery``, + ``lookup``, etc) + + :type data: str + :param data: The data to send with the API call. + Typically this is a serialized Protobuf string. + + :rtype: str + :returns: The string response content from the API call. + :raises: :class:`google.cloud.exceptions.GoogleCloudError` if the + response code is not 200 OK. + """ + headers = { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': str(len(data)), + 'User-Agent': connection.USER_AGENT, + connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, + } + headers, content = connection.http.request( + uri=connection.build_api_url(project=project, method=method), + method='POST', headers=headers, body=data) + + status = headers['status'] + if status != '200': + error_status = status_pb2.Status.FromString(content) + raise exceptions.make_exception( + headers, error_status.message, use_json=False) + + return content + + +def _rpc(connection, project, method, request_pb, response_pb_cls): + """Make a protobuf RPC request. :type connection: :class:`Connection` :param connection: A connection object that contains helpful information for making requests. - """ - def __init__(self, connection): - self.connection = connection + :type project: str + :param project: The project to connect to. This is + usually your project name in the cloud console. - def _request(self, project, method, data): - """Make a request over the Http transport to the Cloud Datastore API. + :type method: str + :param method: The name of the method to invoke. - :type project: str - :param project: The project to make the request for. + :type request_pb: :class:`google.protobuf.message.Message` instance + :param request_pb: the protobuf instance representing the request. - :type method: str - :param method: The API call method name (ie, ``runQuery``, - ``lookup``, etc) + :type response_pb_cls: A :class:`google.protobuf.message.Message` + subclass. + :param response_pb_cls: The class used to unmarshall the response + protobuf. - :type data: str - :param data: The data to send with the API call. - Typically this is a serialized Protobuf string. + :rtype: :class:`google.protobuf.message.Message` + :returns: The RPC message parsed from the response. + """ + response = _request(connection=connection, project=project, + method=method, data=request_pb.SerializeToString()) + return response_pb_cls.FromString(response) - :rtype: str - :returns: The string response content from the API call. - :raises: :class:`google.cloud.exceptions.GoogleCloudError` if the - response code is not 200 OK. - """ - headers = { - 'Content-Type': 'application/x-protobuf', - 'Content-Length': str(len(data)), - 'User-Agent': self.connection.USER_AGENT, - connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, - } - headers, content = self.connection.http.request( - uri=self.connection.build_api_url(project=project, method=method), - method='POST', headers=headers, body=data) - - status = headers['status'] - if status != '200': - error_status = status_pb2.Status.FromString(content) - raise exceptions.make_exception( - headers, error_status.message, use_json=False) - - return content - - def _rpc(self, project, method, request_pb, response_pb_cls): - """Make a protobuf RPC request. - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. +class _DatastoreAPIOverHttp(object): + """Helper mapping datastore API methods. - :type method: str - :param method: The name of the method to invoke. + Makes requests to send / receive protobuf content over HTTP/1.1. - :type request_pb: :class:`google.protobuf.message.Message` instance - :param request_pb: the protobuf instance representing the request. + Methods make bare API requests without any helpers for constructing + the requests or parsing the responses. - :type response_pb_cls: A :class:`google.protobuf.message.Message` - subclass. - :param response_pb_cls: The class used to unmarshall the response - protobuf. + :type connection: :class:`Connection` + :param connection: A connection object that contains helpful + information for making requests. + """ - :rtype: :class:`google.protobuf.message.Message` - :returns: The RPC message parsed from the response. - """ - response = self._request(project=project, method=method, - data=request_pb.SerializeToString()) - return response_pb_cls.FromString(response) + def __init__(self, connection): + self.connection = connection def lookup(self, project, request_pb): """Perform a ``lookup`` request. @@ -132,8 +142,8 @@ def lookup(self, project, request_pb): :rtype: :class:`.datastore_pb2.LookupResponse` :returns: The returned protobuf response object. """ - return self._rpc(project, 'lookup', request_pb, - _datastore_pb2.LookupResponse) + return _rpc(self.connection, project, 'lookup', request_pb, + _datastore_pb2.LookupResponse) def run_query(self, project, request_pb): """Perform a ``runQuery`` request. @@ -148,8 +158,8 @@ def run_query(self, project, request_pb): :rtype: :class:`.datastore_pb2.RunQueryResponse` :returns: The returned protobuf response object. """ - return self._rpc(project, 'runQuery', request_pb, - _datastore_pb2.RunQueryResponse) + return _rpc(self.connection, project, 'runQuery', request_pb, + _datastore_pb2.RunQueryResponse) def begin_transaction(self, project, request_pb): """Perform a ``beginTransaction`` request. @@ -165,8 +175,8 @@ def begin_transaction(self, project, request_pb): :rtype: :class:`.datastore_pb2.BeginTransactionResponse` :returns: The returned protobuf response object. """ - return self._rpc(project, 'beginTransaction', request_pb, - _datastore_pb2.BeginTransactionResponse) + return _rpc(self.connection, project, 'beginTransaction', request_pb, + _datastore_pb2.BeginTransactionResponse) def commit(self, project, request_pb): """Perform a ``commit`` request. @@ -181,8 +191,8 @@ def commit(self, project, request_pb): :rtype: :class:`.datastore_pb2.CommitResponse` :returns: The returned protobuf response object. """ - return self._rpc(project, 'commit', request_pb, - _datastore_pb2.CommitResponse) + return _rpc(self.connection, project, 'commit', request_pb, + _datastore_pb2.CommitResponse) def rollback(self, project, request_pb): """Perform a ``rollback`` request. @@ -197,8 +207,8 @@ def rollback(self, project, request_pb): :rtype: :class:`.datastore_pb2.RollbackResponse` :returns: The returned protobuf response object. """ - return self._rpc(project, 'rollback', request_pb, - _datastore_pb2.RollbackResponse) + return _rpc(self.connection, project, 'rollback', request_pb, + _datastore_pb2.RollbackResponse) def allocate_ids(self, project, request_pb): """Perform an ``allocateIds`` request. @@ -213,8 +223,8 @@ def allocate_ids(self, project, request_pb): :rtype: :class:`.datastore_pb2.AllocateIdsResponse` :returns: The returned protobuf response object. """ - return self._rpc(project, 'allocateIds', request_pb, - _datastore_pb2.AllocateIdsResponse) + return _rpc(self.connection, project, 'allocateIds', request_pb, + _datastore_pb2.AllocateIdsResponse) class Connection(connection_module.Connection): diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 23de36cdc2b1..de304c8e69ba 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -17,76 +17,36 @@ import mock -class Test_DatastoreAPIOverHttp(unittest.TestCase): +class Test__request(unittest.TestCase): @staticmethod - def _get_target_class(): - from google.cloud.datastore._http import _DatastoreAPIOverHttp + def _call_fut(*args, **kwargs): + from google.cloud.datastore._http import _request - return _DatastoreAPIOverHttp + return _request(*args, **kwargs) - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test__rpc(self): + def test_success(self): from google.cloud import _http as connection_module from google.cloud.datastore._http import _CLIENT_INFO - class ReqPB(object): - - def SerializeToString(self): - return REQPB - - class RspPB(object): - - def __init__(self, pb): - self._pb = pb - - @classmethod - def FromString(cls, pb): - return cls(pb) + project = 'PROJECT' + method = 'METHOD' + data = b'DATA' + uri = 'http://api-url' - REQPB = b'REQPB' - PROJECT = 'PROJECT' - METHOD = 'METHOD' - URI = 'http://api-url' - conn = _Connection(URI) - datastore_api = self._make_one(conn) - http = conn.http = Http({'status': '200'}, 'CONTENT') - response = datastore_api._rpc(PROJECT, METHOD, ReqPB(), RspPB) - self.assertIsInstance(response, RspPB) - self.assertEqual(response._pb, 'CONTENT') - called_with = http._called_with - self.assertEqual(len(called_with), 4) - self.assertEqual(called_with['uri'], URI) - self.assertEqual(called_with['method'], 'POST') - expected_headers = { - 'Content-Type': 'application/x-protobuf', - 'User-Agent': conn.USER_AGENT, - 'Content-Length': '5', - connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, - } - self.assertEqual(called_with['headers'], expected_headers) - self.assertEqual(called_with['body'], REQPB) - self.assertEqual(conn.build_kwargs, - [{'method': METHOD, 'project': PROJECT}]) + # Make mock Connection object with canned response. + conn = _Connection(uri) + response_data = 'CONTENT' + http = conn.http = Http({'status': '200'}, response_data) - def test__request_w_200(self): - from google.cloud import _http as connection_module - from google.cloud.datastore._http import _CLIENT_INFO + # Call actual function under test. + response = self._call_fut(conn, project, method, data) + self.assertEqual(response, response_data) - PROJECT = 'PROJECT' - METHOD = 'METHOD' - DATA = b'DATA' - URI = 'http://api-url' - conn = _Connection(URI) - datastore_api = self._make_one(conn) - http = conn.http = Http({'status': '200'}, 'CONTENT') - self.assertEqual(datastore_api._request(PROJECT, METHOD, DATA), - 'CONTENT') + # Check that the mocks were called as expected. called_with = http._called_with self.assertEqual(len(called_with), 4) - self.assertEqual(called_with['uri'], URI) + self.assertEqual(called_with['uri'], uri) self.assertEqual(called_with['method'], 'POST') expected_headers = { 'Content-Type': 'application/x-protobuf', @@ -95,31 +55,105 @@ def test__request_w_200(self): connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, } self.assertEqual(called_with['headers'], expected_headers) - self.assertEqual(called_with['body'], DATA) + self.assertEqual(called_with['body'], data) self.assertEqual(conn.build_kwargs, - [{'method': METHOD, 'project': PROJECT}]) + [{'method': method, 'project': project}]) - def test__request_not_200(self): + def test_failure(self): from google.cloud.exceptions import BadRequest + from google.rpc import code_pb2 from google.rpc import status_pb2 + project = 'PROJECT' + method = 'METHOD' + data = 'DATA' + uri = 'http://api-url' + + # Make mock Connection object with canned response. + conn = _Connection(uri) error = status_pb2.Status() error.message = 'Entity value is indexed.' - error.code = 9 # FAILED_PRECONDITION - - PROJECT = 'PROJECT' - METHOD = 'METHOD' - DATA = 'DATA' - URI = 'http://api-url' - conn = _Connection(URI) - datastore_api = self._make_one(conn) + error.code = code_pb2.FAILED_PRECONDITION conn.http = Http({'status': '400'}, error.SerializeToString()) + + # Call actual function under test. with self.assertRaises(BadRequest) as exc: - datastore_api._request(PROJECT, METHOD, DATA) + self._call_fut(conn, project, method, data) + + # Check that the mocks were called as expected. expected_message = '400 Entity value is indexed.' self.assertEqual(str(exc.exception), expected_message) self.assertEqual(conn.build_kwargs, - [{'method': METHOD, 'project': PROJECT}]) + [{'method': method, 'project': project}]) + + +class Test__rpc(unittest.TestCase): + + @staticmethod + def _call_fut(*args, **kwargs): + from google.cloud.datastore._http import _rpc + + return _rpc(*args, **kwargs) + + def test_it(self): + from google.cloud.proto.datastore.v1 import datastore_pb2 + + connection = object() + project = 'projectOK' + method = 'beginTransaction' + request_pb = datastore_pb2.BeginTransactionRequest( + project_id=project) + + response_pb = datastore_pb2.BeginTransactionResponse( + transaction=b'7830rmc') + patch = mock.patch('google.cloud.datastore._http._request', + return_value=response_pb.SerializeToString()) + with patch as mock_request: + result = self._call_fut( + connection, project, method, request_pb, + datastore_pb2.BeginTransactionResponse) + self.assertEqual(result, response_pb) + + mock_request.assert_called_once_with( + connection=connection, data=request_pb.SerializeToString(), + method=method, project=project) + + +class Test_DatastoreAPIOverHttp(unittest.TestCase): + + @staticmethod + def _get_target_class(): + from google.cloud.datastore._http import _DatastoreAPIOverHttp + + return _DatastoreAPIOverHttp + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + def test_constructor(self): + connection = object() + ds_api = self._make_one(connection) + self.assertIs(ds_api.connection, connection) + + def test_lookup(self): + from google.cloud.proto.datastore.v1 import datastore_pb2 + + connection = object() + ds_api = self._make_one(connection) + + project = 'project' + request_pb = object() + + patch = mock.patch( + 'google.cloud.datastore._http._rpc', + return_value=mock.sentinel.looked_up) + with patch as mock_rpc: + result = ds_api.lookup(project, request_pb) + self.assertIs(result, mock.sentinel.looked_up) + + mock_rpc.assert_called_once_with( + connection, project, 'lookup', request_pb, + datastore_pb2.LookupResponse) class TestConnection(unittest.TestCase): From cc4a4f3f692c634216697e0cbb3e2c4b6b84b154 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 6 Mar 2017 11:58:36 -0800 Subject: [PATCH 088/611] Moving build_api_url() off of datastore Connection. Also sending DEFAULT_USER_AGENT header instead of relying on the Connection.USER_AGENT property. --- .../google/cloud/datastore/_http.py | 80 ++++---- .../unit_tests/test__http.py | 182 ++++-------------- 2 files changed, 73 insertions(+), 189 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index e52885d343b9..3a1b4adb0e65 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -35,6 +35,13 @@ DATASTORE_API_HOST = 'datastore.googleapis.com' """Datastore API request host.""" +API_BASE_URL = 'https://' + DATASTORE_API_HOST +"""The base of the API call URL.""" +API_VERSION = 'v1' +"""The version of the API, used in building the API call's URL.""" +API_URL_TEMPLATE = ('{api_base}/{api_version}/projects' + '/{project}:{method}') +"""A template for the URL of a particular API call.""" _DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) _USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC @@ -67,12 +74,12 @@ def _request(connection, project, method, data): headers = { 'Content-Type': 'application/x-protobuf', 'Content-Length': str(len(data)), - 'User-Agent': connection.USER_AGENT, + 'User-Agent': connection_module.DEFAULT_USER_AGENT, connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, } + api_url = build_api_url(project, method, connection.api_base_url) headers, content = connection.http.request( - uri=connection.build_api_url(project=project, method=method), - method='POST', headers=headers, body=data) + uri=api_url, method='POST', headers=headers, body=data) status = headers['status'] if status != '200': @@ -113,6 +120,31 @@ def _rpc(connection, project, method, request_pb, response_pb_cls): return response_pb_cls.FromString(response) +def build_api_url(project, method, base_url): + """Construct the URL for a particular API call. + + This method is used internally to come up with the URL to use when + making RPCs to the Cloud Datastore API. + + :type project: str + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type method: str + :param method: The API method to call (e.g. 'runQuery', 'lookup'). + + :type base_url: str + :param base_url: The base URL where the API lives. + You shouldn't have to provide this. + + :rtype: str + :returns: The API URL created. + """ + return API_URL_TEMPLATE.format( + api_base=base_url, api_version=API_VERSION, + project=project, method=method) + + class _DatastoreAPIOverHttp(object): """Helper mapping datastore API methods. @@ -238,16 +270,6 @@ class Connection(connection_module.Connection): :param client: The client that owns the current connection. """ - API_BASE_URL = 'https://' + DATASTORE_API_HOST - """The base of the API call URL.""" - - API_VERSION = 'v1' - """The version of the API, used in building the API call's URL.""" - - API_URL_TEMPLATE = ('{api_base}/{api_version}/projects' - '/{project}:{method}') - """A template for the URL of a particular API call.""" - def __init__(self, client): super(Connection, self).__init__(client) try: @@ -256,43 +278,13 @@ def __init__(self, client): secure = False except KeyError: self.host = DATASTORE_API_HOST - self.api_base_url = self.__class__.API_BASE_URL + self.api_base_url = API_BASE_URL secure = True if _USE_GRPC: self._datastore_api = _DatastoreAPIOverGRPC(self, secure=secure) else: self._datastore_api = _DatastoreAPIOverHttp(self) - def build_api_url(self, project, method, base_url=None, - api_version=None): - """Construct the URL for a particular API call. - - This method is used internally to come up with the URL to use when - making RPCs to the Cloud Datastore API. - - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. - - :type method: str - :param method: The API method to call (e.g. 'runQuery', 'lookup'). - - :type base_url: str - :param base_url: The base URL where the API lives. - You shouldn't have to provide this. - - :type api_version: str - :param api_version: The version of the API to connect to. - You shouldn't have to provide this. - - :rtype: str - :returns: The API URL created. - """ - return self.API_URL_TEMPLATE.format( - api_base=(base_url or self.api_base_url), - api_version=(api_version or self.API_VERSION), - project=project, method=method) - def lookup(self, project, key_pbs, eventual=False, transaction_id=None): """Lookup keys from a project in the Cloud Datastore. diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 69e5727346d9..5e89d0cc8454 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -46,18 +46,16 @@ def test_success(self): # Check that the mocks were called as expected. called_with = http._called_with self.assertEqual(len(called_with), 4) - self.assertEqual(called_with['uri'], uri) + self.assertTrue(called_with['uri'].startswith(uri)) self.assertEqual(called_with['method'], 'POST') expected_headers = { 'Content-Type': 'application/x-protobuf', - 'User-Agent': conn.USER_AGENT, + 'User-Agent': connection_module.DEFAULT_USER_AGENT, 'Content-Length': '4', connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, } self.assertEqual(called_with['headers'], expected_headers) self.assertEqual(called_with['body'], data) - self.assertEqual(conn.build_kwargs, - [{'method': method, 'project': project}]) def test_failure(self): from google.cloud.exceptions import BadRequest @@ -71,6 +69,7 @@ def test_failure(self): # Make mock Connection object with canned response. conn = _Connection(uri) + conn.api_base_url = uri error = status_pb2.Status() error.message = 'Entity value is indexed.' error.code = code_pb2.FAILED_PRECONDITION @@ -83,8 +82,6 @@ def test_failure(self): # Check that the mocks were called as expected. expected_message = '400 Entity value is indexed.' self.assertEqual(str(exc.exception), expected_message) - self.assertEqual(conn.build_kwargs, - [{'method': method, 'project': project}]) class Test__rpc(unittest.TestCase): @@ -198,10 +195,22 @@ def _verify_protobuf_call(self, called_with, URI, conn): } self.assertEqual(called_with['headers'], expected_headers) + @staticmethod + def _build_expected_url(connection, project, method): + from google.cloud.datastore._http import API_VERSION + + return '/'.join([ + connection.api_base_url, + API_VERSION, + 'projects', + project + ':' + method, + ]) + def test_default_url(self): - klass = self._get_target_class() + from google.cloud.datastore._http import API_BASE_URL + conn = self._make_one(object()) - self.assertEqual(conn.api_base_url, klass.API_BASE_URL) + self.assertEqual(conn.api_base_url, API_BASE_URL) def test_custom_url_from_env(self): from google.cloud._http import API_BASE_URL @@ -259,33 +268,6 @@ def mock_api(connection, secure): self.assertIs(conn._datastore_api, return_val) self.assertEqual(api_args, [(conn, True)]) - def test_build_api_url_w_default_base_version(self): - PROJECT = 'PROJECT' - METHOD = 'METHOD' - conn = self._make_one(object()) - URI = '/'.join([ - conn.api_base_url, - conn.API_VERSION, - 'projects', - PROJECT + ':' + METHOD, - ]) - self.assertEqual(conn.build_api_url(PROJECT, METHOD), URI) - - def test_build_api_url_w_explicit_base_version(self): - BASE = 'http://example.com/' - VER = '3.1415926' - PROJECT = 'PROJECT' - METHOD = 'METHOD' - conn = self._make_one(object()) - URI = '/'.join([ - BASE, - VER, - 'projects', - PROJECT + ':' + METHOD, - ]) - self.assertEqual(conn.build_api_url(PROJECT, METHOD, BASE, VER), - URI) - def test_lookup_single_key_empty_response(self): from google.cloud.proto.datastore.v1 import datastore_pb2 @@ -303,12 +285,7 @@ def test_lookup_single_key_empty_response(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = '/'.join([ - conn.api_base_url, - conn.API_VERSION, - 'projects', - project + ':lookup', - ]) + uri = self._build_expected_url(conn, project, 'lookup') self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) @@ -337,12 +314,7 @@ def test_lookup_single_key_empty_response_w_eventual(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = '/'.join([ - conn.api_base_url, - conn.API_VERSION, - 'projects', - project + ':lookup', - ]) + uri = self._build_expected_url(conn, project, 'lookup') self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) @@ -383,12 +355,7 @@ def test_lookup_single_key_empty_response_w_transaction(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = '/'.join([ - conn.api_base_url, - conn.API_VERSION, - 'projects', - project + ':lookup', - ]) + uri = self._build_expected_url(conn, project, 'lookup') self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) @@ -422,12 +389,7 @@ def test_lookup_single_key_nonempty_response(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = '/'.join([ - conn.api_base_url, - conn.API_VERSION, - 'projects', - project + ':lookup', - ]) + uri = self._build_expected_url(conn, project, 'lookup') self.assertEqual(len(response.found), 1) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) @@ -460,12 +422,7 @@ def test_lookup_multiple_keys_empty_response(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = '/'.join([ - conn.api_base_url, - conn.API_VERSION, - 'projects', - project + ':lookup', - ]) + uri = self._build_expected_url(conn, project, 'lookup') self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) @@ -500,12 +457,7 @@ def test_lookup_multiple_keys_w_missing(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = '/'.join([ - conn.api_base_url, - conn.API_VERSION, - 'projects', - project + ':lookup', - ]) + uri = self._build_expected_url(conn, project, 'lookup') self.assertEqual(len(response.found), 0) self.assertEqual(len(response.deferred), 0) missing_keys = [result.entity.key for result in response.missing] @@ -542,12 +494,7 @@ def test_lookup_multiple_keys_w_deferred(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = '/'.join([ - conn.api_base_url, - conn.API_VERSION, - 'projects', - project + ':lookup', - ]) + uri = self._build_expected_url(conn, project, 'lookup') self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(list(response.deferred), [key_pb1, key_pb2]) @@ -593,12 +540,7 @@ def test_run_query_w_eventual_no_transaction(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = '/'.join([ - conn.api_base_url, - conn.API_VERSION, - 'projects', - project + ':runQuery', - ]) + uri = self._build_expected_url(conn, project, 'runQuery') cw = http._called_with self._verify_protobuf_call(cw, uri, conn) request = datastore_pb2.RunQueryRequest() @@ -635,12 +577,7 @@ def test_run_query_wo_eventual_w_transaction(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = '/'.join([ - conn.api_base_url, - conn.API_VERSION, - 'projects', - project + ':runQuery', - ]) + uri = self._build_expected_url(conn, project, 'runQuery') cw = http._called_with self._verify_protobuf_call(cw, uri, conn) request = datastore_pb2.RunQueryRequest() @@ -694,12 +631,7 @@ def test_run_query_wo_namespace_empty_result(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = '/'.join([ - conn.api_base_url, - conn.API_VERSION, - 'projects', - project + ':runQuery', - ]) + uri = self._build_expected_url(conn, project, 'runQuery') cw = http._called_with self._verify_protobuf_call(cw, uri, conn) request = datastore_pb2.RunQueryRequest() @@ -733,12 +665,7 @@ def test_run_query_w_namespace_nonempty_result(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) cw = http._called_with - uri = '/'.join([ - conn.api_base_url, - conn.API_VERSION, - 'projects', - project + ':runQuery', - ]) + uri = self._build_expected_url(conn, project, 'runQuery') self._verify_protobuf_call(cw, uri, conn) request = datastore_pb2.RunQueryRequest() request.ParseFromString(cw['body']) @@ -763,12 +690,7 @@ def test_begin_transaction(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = '/'.join([ - conn.api_base_url, - conn.API_VERSION, - 'projects', - project + ':beginTransaction', - ]) + uri = self._build_expected_url(conn, project, 'beginTransaction') cw = http._called_with self._verify_protobuf_call(cw, uri, conn) request = datastore_pb2.BeginTransactionRequest() @@ -792,12 +714,7 @@ def test_commit_wo_transaction(self): http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_one(client) - uri = '/'.join([ - conn.api_base_url, - conn.API_VERSION, - 'projects', - project + ':commit', - ]) + uri = self._build_expected_url(conn, project, 'commit') result = conn.commit(project, req_pb, None) self.assertEqual(result, rsp_pb) @@ -828,12 +745,7 @@ def test_commit_w_transaction(self): http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_one(client) - uri = '/'.join([ - conn.api_base_url, - conn.API_VERSION, - 'projects', - project + ':commit', - ]) + uri = self._build_expected_url(conn, project, 'commit') result = conn.commit(project, req_pb, b'xact') self.assertEqual(result, rsp_pb) @@ -865,12 +777,7 @@ def test_rollback_ok(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = '/'.join([ - conn.api_base_url, - conn.API_VERSION, - 'projects', - project + ':rollback', - ]) + uri = self._build_expected_url(conn, project, 'rollback') cw = http._called_with self._verify_protobuf_call(cw, uri, conn) request = datastore_pb2.RollbackRequest() @@ -894,12 +801,7 @@ def test_allocate_ids_empty(self): # Check the result and verify the callers. self.assertEqual(list(response.keys), []) self.assertEqual(response, rsp_pb) - uri = '/'.join([ - conn.api_base_url, - conn.API_VERSION, - 'projects', - project + ':allocateIds', - ]) + uri = self._build_expected_url(conn, project, 'allocateIds') cw = http._called_with self._verify_protobuf_call(cw, uri, conn) request = datastore_pb2.AllocateIdsRequest() @@ -933,12 +835,7 @@ def test_allocate_ids_non_empty(self): # Check the result and verify the callers. self.assertEqual(list(response.keys), after_key_pbs) self.assertEqual(response, rsp_pb) - uri = '/'.join([ - conn.api_base_url, - conn.API_VERSION, - 'projects', - project + ':allocateIds', - ]) + uri = self._build_expected_url(conn, project, 'allocateIds') cw = http._called_with self._verify_protobuf_call(cw, uri, conn) request = datastore_pb2.AllocateIdsRequest() @@ -983,12 +880,7 @@ def request(self, **kw): class _Connection(object): host = None - USER_AGENT = 'you-sir-age-int' - - def __init__(self, api_url): - self.api_url = api_url - self.build_kwargs = [] + http = None - def build_api_url(self, **kwargs): - self.build_kwargs.append(kwargs) - return self.api_url + def __init__(self, api_base_url): + self.api_base_url = api_base_url From 5dc68b5065adf13e32c13fec05ef48b0a04e8d96 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 6 Mar 2017 12:30:16 -0800 Subject: [PATCH 089/611] Only requiring HTTP object to make datastore RPC-over-HTTP. --- .../google/cloud/datastore/_http.py | 60 +++++++++++-------- .../unit_tests/test__http.py | 43 ++++++------- 2 files changed, 52 insertions(+), 51 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 3a1b4adb0e65..22bd0cb19bc1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -48,12 +48,11 @@ _CLIENT_INFO = connection_module.CLIENT_INFO_TEMPLATE.format(__version__) -def _request(connection, project, method, data): +def _request(http, project, method, data, base_url): """Make a request over the Http transport to the Cloud Datastore API. - :type connection: :class:`Connection` - :param connection: A connection object that contains helpful - information for making requests. + :type http: :class:`~httplib2.Http` + :param http: HTTP object to make requests. :type project: str :param project: The project to make the request for. @@ -66,6 +65,9 @@ def _request(connection, project, method, data): :param data: The data to send with the API call. Typically this is a serialized Protobuf string. + :type base_url: str + :param base_url: The base URL where the API lives. + :rtype: str :returns: The string response content from the API call. :raises: :class:`google.cloud.exceptions.GoogleCloudError` if the @@ -77,8 +79,8 @@ def _request(connection, project, method, data): 'User-Agent': connection_module.DEFAULT_USER_AGENT, connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, } - api_url = build_api_url(project, method, connection.api_base_url) - headers, content = connection.http.request( + api_url = build_api_url(project, method, base_url) + headers, content = http.request( uri=api_url, method='POST', headers=headers, body=data) status = headers['status'] @@ -90,12 +92,11 @@ def _request(connection, project, method, data): return content -def _rpc(connection, project, method, request_pb, response_pb_cls): +def _rpc(http, project, method, base_url, request_pb, response_pb_cls): """Make a protobuf RPC request. - :type connection: :class:`Connection` - :param connection: A connection object that contains helpful - information for making requests. + :type http: :class:`~httplib2.Http` + :param http: HTTP object to make requests. :type project: str :param project: The project to connect to. This is @@ -104,6 +105,9 @@ def _rpc(connection, project, method, request_pb, response_pb_cls): :type method: str :param method: The name of the method to invoke. + :type base_url: str + :param base_url: The base URL where the API lives. + :type request_pb: :class:`google.protobuf.message.Message` instance :param request_pb: the protobuf instance representing the request. @@ -115,8 +119,9 @@ def _rpc(connection, project, method, request_pb, response_pb_cls): :rtype: :class:`google.protobuf.message.Message` :returns: The RPC message parsed from the response. """ - response = _request(connection=connection, project=project, - method=method, data=request_pb.SerializeToString()) + req_data = request_pb.SerializeToString() + response = _request( + http, project, method, req_data, base_url) return response_pb_cls.FromString(response) @@ -135,7 +140,6 @@ def build_api_url(project, method, base_url): :type base_url: str :param base_url: The base URL where the API lives. - You shouldn't have to provide this. :rtype: str :returns: The API URL created. @@ -174,8 +178,9 @@ def lookup(self, project, request_pb): :rtype: :class:`.datastore_pb2.LookupResponse` :returns: The returned protobuf response object. """ - return _rpc(self.connection, project, 'lookup', request_pb, - _datastore_pb2.LookupResponse) + return _rpc(self.connection.http, project, 'lookup', + self.connection.api_base_url, + request_pb, _datastore_pb2.LookupResponse) def run_query(self, project, request_pb): """Perform a ``runQuery`` request. @@ -190,8 +195,9 @@ def run_query(self, project, request_pb): :rtype: :class:`.datastore_pb2.RunQueryResponse` :returns: The returned protobuf response object. """ - return _rpc(self.connection, project, 'runQuery', request_pb, - _datastore_pb2.RunQueryResponse) + return _rpc(self.connection.http, project, 'runQuery', + self.connection.api_base_url, + request_pb, _datastore_pb2.RunQueryResponse) def begin_transaction(self, project, request_pb): """Perform a ``beginTransaction`` request. @@ -207,8 +213,9 @@ def begin_transaction(self, project, request_pb): :rtype: :class:`.datastore_pb2.BeginTransactionResponse` :returns: The returned protobuf response object. """ - return _rpc(self.connection, project, 'beginTransaction', request_pb, - _datastore_pb2.BeginTransactionResponse) + return _rpc(self.connection.http, project, 'beginTransaction', + self.connection.api_base_url, + request_pb, _datastore_pb2.BeginTransactionResponse) def commit(self, project, request_pb): """Perform a ``commit`` request. @@ -223,8 +230,9 @@ def commit(self, project, request_pb): :rtype: :class:`.datastore_pb2.CommitResponse` :returns: The returned protobuf response object. """ - return _rpc(self.connection, project, 'commit', request_pb, - _datastore_pb2.CommitResponse) + return _rpc(self.connection.http, project, 'commit', + self.connection.api_base_url, + request_pb, _datastore_pb2.CommitResponse) def rollback(self, project, request_pb): """Perform a ``rollback`` request. @@ -239,8 +247,9 @@ def rollback(self, project, request_pb): :rtype: :class:`.datastore_pb2.RollbackResponse` :returns: The returned protobuf response object. """ - return _rpc(self.connection, project, 'rollback', request_pb, - _datastore_pb2.RollbackResponse) + return _rpc(self.connection.http, project, 'rollback', + self.connection.api_base_url, + request_pb, _datastore_pb2.RollbackResponse) def allocate_ids(self, project, request_pb): """Perform an ``allocateIds`` request. @@ -255,8 +264,9 @@ def allocate_ids(self, project, request_pb): :rtype: :class:`.datastore_pb2.AllocateIdsResponse` :returns: The returned protobuf response object. """ - return _rpc(self.connection, project, 'allocateIds', request_pb, - _datastore_pb2.AllocateIdsResponse) + return _rpc(self.connection.http, project, 'allocateIds', + self.connection.api_base_url, + request_pb, _datastore_pb2.AllocateIdsResponse) class Connection(connection_module.Connection): diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 5e89d0cc8454..b5ad973f9a92 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -34,13 +34,12 @@ def test_success(self): data = b'DATA' uri = 'http://api-url' - # Make mock Connection object with canned response. - conn = _Connection(uri) + # Make mock HTTP object with canned response. response_data = 'CONTENT' - http = conn.http = Http({'status': '200'}, response_data) + http = Http({'status': '200'}, response_data) # Call actual function under test. - response = self._call_fut(conn, project, method, data) + response = self._call_fut(http, project, method, data, uri) self.assertEqual(response, response_data) # Check that the mocks were called as expected. @@ -67,17 +66,15 @@ def test_failure(self): data = 'DATA' uri = 'http://api-url' - # Make mock Connection object with canned response. - conn = _Connection(uri) - conn.api_base_url = uri + # Make mock HTTP object with canned response. error = status_pb2.Status() error.message = 'Entity value is indexed.' error.code = code_pb2.FAILED_PRECONDITION - conn.http = Http({'status': '400'}, error.SerializeToString()) + http = Http({'status': '400'}, error.SerializeToString()) # Call actual function under test. with self.assertRaises(BadRequest) as exc: - self._call_fut(conn, project, method, data) + self._call_fut(http, project, method, data, uri) # Check that the mocks were called as expected. expected_message = '400 Entity value is indexed.' @@ -95,9 +92,10 @@ def _call_fut(*args, **kwargs): def test_it(self): from google.cloud.proto.datastore.v1 import datastore_pb2 - connection = object() + http = object() project = 'projectOK' method = 'beginTransaction' + base_url = 'test.invalid' request_pb = datastore_pb2.BeginTransactionRequest( project_id=project) @@ -107,13 +105,13 @@ def test_it(self): return_value=response_pb.SerializeToString()) with patch as mock_request: result = self._call_fut( - connection, project, method, request_pb, - datastore_pb2.BeginTransactionResponse) + http, project, method, base_url, + request_pb, datastore_pb2.BeginTransactionResponse) self.assertEqual(result, response_pb) mock_request.assert_called_once_with( - connection=connection, data=request_pb.SerializeToString(), - method=method, project=project) + http, project, method, request_pb.SerializeToString(), + base_url) class Test_DatastoreAPIOverHttp(unittest.TestCase): @@ -135,7 +133,8 @@ def test_constructor(self): def test_lookup(self): from google.cloud.proto.datastore.v1 import datastore_pb2 - connection = object() + connection = mock.Mock( + api_base_url='test.invalid', spec=['http', 'api_base_url']) ds_api = self._make_one(connection) project = 'project' @@ -149,8 +148,9 @@ def test_lookup(self): self.assertIs(result, mock.sentinel.looked_up) mock_rpc.assert_called_once_with( - connection, project, 'lookup', request_pb, - datastore_pb2.LookupResponse) + connection.http, project, 'lookup', + connection.api_base_url, + request_pb, datastore_pb2.LookupResponse) class TestConnection(unittest.TestCase): @@ -875,12 +875,3 @@ def __init__(self, headers, content): def request(self, **kw): self._called_with = kw return self._response, self._content - - -class _Connection(object): - - host = None - http = None - - def __init__(self, api_base_url): - self.api_base_url = api_base_url From 55e0e494daba18870f47d542a0302fc84a1221b7 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 6 Mar 2017 13:37:12 -0800 Subject: [PATCH 090/611] Moving datastore host configuration into Client. This is progress towards #2746 and makes Connection() act as a proxy for the contents of Client (as intended). --- .../google/cloud/datastore/_gax.py | 19 ++-- .../google/cloud/datastore/_http.py | 12 +-- .../google/cloud/datastore/client.py | 12 ++- .../unit_tests/test__gax.py | 40 +++---- .../unit_tests/test__http.py | 101 +++++++++--------- .../unit_tests/test_client.py | 39 +++++-- 6 files changed, 124 insertions(+), 99 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py index 1bf7b21fbe5f..155a02a3fe7e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py @@ -21,6 +21,7 @@ from google.cloud.proto.datastore.v1 import datastore_pb2_grpc from google.gax.utils import metrics from grpc import StatusCode +import six from google.cloud._helpers import make_insecure_stub from google.cloud._helpers import make_secure_channel @@ -90,20 +91,20 @@ class _DatastoreAPIOverGRPC(object): :type connection: :class:`Connection` :param connection: A connection object that contains helpful information for making requests. - - :type secure: bool - :param secure: Flag indicating if a secure stub connection is needed. """ - def __init__(self, connection, secure): - if secure: + def __init__(self, connection): + parse_result = six.moves.urllib_parse.urlparse( + connection.api_base_url) + host = parse_result.hostname + if parse_result.scheme == 'https': self._stub = make_secure_stub( - connection.credentials, connection.USER_AGENT, - datastore_pb2_grpc.DatastoreStub, connection.host, + connection.credentials, DEFAULT_USER_AGENT, + datastore_pb2_grpc.DatastoreStub, host, extra_options=_GRPC_EXTRA_OPTIONS) else: - self._stub = make_insecure_stub(datastore_pb2_grpc.DatastoreStub, - connection.host) + self._stub = make_insecure_stub( + datastore_pb2_grpc.DatastoreStub, host) def lookup(self, project, request_pb): """Perform a ``lookup`` request. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 22bd0cb19bc1..37a3cb1be5f9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -20,7 +20,6 @@ from google.cloud import _http as connection_module from google.cloud.environment_vars import DISABLE_GRPC -from google.cloud.environment_vars import GCD_HOST from google.cloud import exceptions from google.cloud.proto.datastore.v1 import datastore_pb2 as _datastore_pb2 @@ -282,16 +281,9 @@ class Connection(connection_module.Connection): def __init__(self, client): super(Connection, self).__init__(client) - try: - self.host = os.environ[GCD_HOST] - self.api_base_url = 'http://' + self.host - secure = False - except KeyError: - self.host = DATASTORE_API_HOST - self.api_base_url = API_BASE_URL - secure = True + self.api_base_url = client._base_url if _USE_GRPC: - self._datastore_api = _DatastoreAPIOverGRPC(self, secure=secure) + self._datastore_api = _DatastoreAPIOverGRPC(self) else: self._datastore_api = _DatastoreAPIOverHttp(self) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index ce924edda183..21fc8a0aa407 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -21,6 +21,7 @@ from google.cloud.client import ClientWithProject from google.cloud.environment_vars import DISABLE_GRPC from google.cloud.environment_vars import GCD_DATASET +from google.cloud.environment_vars import GCD_HOST from google.cloud.datastore._http import Connection from google.cloud.datastore._http import HTTPDatastoreAPI @@ -40,6 +41,8 @@ _MAX_LOOPS = 128 """Maximum number of iterations to wait for deferred keys.""" +_DATASTORE_BASE_URL = 'https://datastore.googleapis.com' +"""Datastore API request URL base.""" _USE_GAX = _HAVE_GRPC and not os.getenv(DISABLE_GRPC, False) @@ -195,7 +198,6 @@ def __init__(self, project=None, namespace=None, credentials=None, http=None, use_gax=None): super(Client, self).__init__( project=project, credentials=credentials, http=http) - self._connection = Connection(self) self.namespace = namespace self._batch_stack = _LocalStack() self._datastore_api_internal = None @@ -203,6 +205,14 @@ def __init__(self, project=None, namespace=None, self._use_gax = _USE_GAX else: self._use_gax = use_gax + try: + host = os.environ[GCD_HOST] + self._base_url = 'http://' + host + except KeyError: + self._base_url = _DATASTORE_BASE_URL + # NOTE: Make sure all properties are set before passing to + # ``Connection`` (e.g. ``_base_url``). + self._connection = Connection(self) @staticmethod def _determine_default(project): diff --git a/packages/google-cloud-datastore/unit_tests/test__gax.py b/packages/google-cloud-datastore/unit_tests/test__gax.py index aea60801cfe5..b6882f11f3b7 100644 --- a/packages/google-cloud-datastore/unit_tests/test__gax.py +++ b/packages/google-cloud-datastore/unit_tests/test__gax.py @@ -19,9 +19,6 @@ from google.cloud.datastore._http import _HAVE_GRPC -USER_AGENT = 'you-sir-age-int' - - @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') class Test__grpc_catch_rendezvous(unittest.TestCase): @@ -98,35 +95,37 @@ def _get_target_class(): return _DatastoreAPIOverGRPC def _make_one(self, stub, connection=None, secure=True): - if connection is None: - connection = mock.Mock( - credentials=object(), - host='CURR_HOST', - USER_AGENT=USER_AGENT, - spec=['credentials', 'host', 'USER_AGENT'], - ) - if secure: patch = mock.patch( 'google.cloud.datastore._gax.make_secure_stub', return_value=stub) + base_url = 'https://test.invalid' else: patch = mock.patch( 'google.cloud.datastore._gax.make_insecure_stub', return_value=stub) + base_url = 'http://test.invalid' + + if connection is None: + connection = mock.Mock( + credentials=object(), + api_base_url=base_url, + spec=['credentials', 'api_base_url'], + ) with patch as make_stub_mock: - api_obj = self._get_target_class()(connection, secure) + api_obj = self._get_target_class()(connection) return api_obj, make_stub_mock def test_constructor(self): + from google.cloud._http import DEFAULT_USER_AGENT import google.cloud.datastore._gax as MUT + host = 'test.invalid' conn = mock.Mock( credentials=object(), - host='CURR_HOST', - USER_AGENT=USER_AGENT, - spec=['credentials', 'host', 'USER_AGENT'], + api_base_url='https://' + host, + spec=['credentials', 'api_base_url'], ) stub = _GRPCStub() @@ -136,19 +135,20 @@ def test_constructor(self): self.assertIs(datastore_api._stub, stub) make_stub_mock.assert_called_once_with( conn.credentials, - conn.USER_AGENT, + DEFAULT_USER_AGENT, MUT.datastore_pb2_grpc.DatastoreStub, - conn.host, + host, extra_options=MUT._GRPC_EXTRA_OPTIONS, ) def test_constructor_insecure(self): from google.cloud.proto.datastore.v1 import datastore_pb2_grpc + host = 'test.invalid' conn = mock.Mock( credentials=object(), - host='CURR_HOST:1234', - spec=['credentials', 'host'], + api_base_url='http://' + host, + spec=['credentials', 'api_base_url'], ) stub = _GRPCStub() @@ -158,7 +158,7 @@ def test_constructor_insecure(self): self.assertIs(datastore_api._stub, stub) make_stub_mock.assert_called_once_with( datastore_pb2_grpc.DatastoreStub, - conn.host, + host, ) def test_lookup(self): diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index b5ad973f9a92..e81abf6cf80c 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -206,33 +206,19 @@ def _build_expected_url(connection, project, method): project + ':' + method, ]) - def test_default_url(self): - from google.cloud.datastore._http import API_BASE_URL - - conn = self._make_one(object()) - self.assertEqual(conn.api_base_url, API_BASE_URL) - - def test_custom_url_from_env(self): - from google.cloud._http import API_BASE_URL - from google.cloud.environment_vars import GCD_HOST - - HOST = 'CURR_HOST' - fake_environ = {GCD_HOST: HOST} - - with mock.patch('os.environ', new=fake_environ): - conn = self._make_one(object()) - - self.assertNotEqual(conn.api_base_url, API_BASE_URL) - self.assertEqual(conn.api_base_url, 'http://' + HOST) + def test_inherited_url(self): + client = mock.Mock(_base_url='test.invalid', spec=['_base_url']) + conn = self._make_one(client) + self.assertEqual(conn.api_base_url, client._base_url) def test_constructor(self): - client = object() + client = mock.Mock(spec=['_base_url']) conn = self._make_one(client) self.assertIs(conn._client, client) def test_constructor_without_grpc(self): connections = [] - client = object() + client = mock.Mock(spec=['_base_url']) return_val = object() def mock_api(connection): @@ -250,23 +236,17 @@ def mock_api(connection): self.assertEqual(connections, [conn]) def test_constructor_with_grpc(self): - api_args = [] - client = object() - return_val = object() - - def mock_api(connection, secure): - api_args.append((connection, secure)) - return return_val + client = mock.Mock(spec=['_base_url']) patch = mock.patch( 'google.cloud.datastore._http._DatastoreAPIOverGRPC', - new=mock_api) - with patch: + return_value=mock.sentinel.ds_api) + with patch as mock_klass: conn = self._make_one(client, use_grpc=True) + mock_klass.assert_called_once_with(conn) self.assertIs(conn._client, client) - self.assertIs(conn._datastore_api, return_val) - self.assertEqual(api_args, [(conn, True)]) + self.assertIs(conn._datastore_api, mock.sentinel.ds_api) def test_lookup_single_key_empty_response(self): from google.cloud.proto.datastore.v1 import datastore_pb2 @@ -277,7 +257,8 @@ def test_lookup_single_key_empty_response(self): # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock( + _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. conn = self._make_one(client) @@ -306,7 +287,8 @@ def test_lookup_single_key_empty_response_w_eventual(self): # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock( + _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. conn = self._make_one(client) @@ -333,7 +315,9 @@ def test_lookup_single_key_empty_response_w_eventual_and_transaction(self): PROJECT = 'PROJECT' TRANSACTION = b'TRANSACTION' key_pb = self._make_key_pb(PROJECT) - conn = self._make_one(object()) + + client = mock.Mock(spec=['_base_url']) + conn = self._make_one(client) self.assertRaises(ValueError, conn.lookup, PROJECT, key_pb, eventual=True, transaction_id=TRANSACTION) @@ -347,7 +331,8 @@ def test_lookup_single_key_empty_response_w_transaction(self): # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock( + _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. conn = self._make_one(client) @@ -381,7 +366,8 @@ def test_lookup_single_key_nonempty_response(self): # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock( + _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. conn = self._make_one(client) @@ -414,7 +400,8 @@ def test_lookup_multiple_keys_empty_response(self): # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock( + _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. conn = self._make_one(client) @@ -449,7 +436,8 @@ def test_lookup_multiple_keys_w_missing(self): # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock( + _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. conn = self._make_one(client) @@ -486,7 +474,8 @@ def test_lookup_multiple_keys_w_deferred(self): # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock( + _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. conn = self._make_one(client) @@ -532,7 +521,8 @@ def test_run_query_w_eventual_no_transaction(self): # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock( + _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. conn = self._make_one(client) @@ -568,7 +558,8 @@ def test_run_query_wo_eventual_w_transaction(self): # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock( + _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. conn = self._make_one(client) @@ -603,7 +594,9 @@ def test_run_query_w_eventual_and_transaction(self): no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL - conn = self._make_one(object()) + + client = mock.Mock(spec=['_base_url']) + conn = self._make_one(client) self.assertRaises(ValueError, conn.run_query, PROJECT, q_pb, eventual=True, transaction_id=TRANSACTION) @@ -623,7 +616,8 @@ def test_run_query_wo_namespace_empty_result(self): # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock( + _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. conn = self._make_one(client) @@ -655,7 +649,8 @@ def test_run_query_w_namespace_nonempty_result(self): # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock( + _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. conn = self._make_one(client) @@ -682,7 +677,8 @@ def test_begin_transaction(self): # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock( + _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. conn = self._make_one(client) @@ -712,7 +708,8 @@ def test_commit_wo_transaction(self): value_pb = _new_value_pb(insert, 'foo') value_pb.string_value = u'Foo' http = Http({'status': '200'}, rsp_pb.SerializeToString()) - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock( + _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) conn = self._make_one(client) uri = self._build_expected_url(conn, project, 'commit') @@ -743,7 +740,8 @@ def test_commit_w_transaction(self): value_pb = _new_value_pb(insert, 'foo') value_pb.string_value = u'Foo' http = Http({'status': '200'}, rsp_pb.SerializeToString()) - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock( + _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) conn = self._make_one(client) uri = self._build_expected_url(conn, project, 'commit') @@ -769,7 +767,8 @@ def test_rollback_ok(self): # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock( + _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. conn = self._make_one(client) @@ -792,7 +791,8 @@ def test_allocate_ids_empty(self): # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock( + _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. conn = self._make_one(client) @@ -826,7 +826,8 @@ def test_allocate_ids_non_empty(self): # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock( + _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. conn = self._make_one(client) diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index 63c2e0adebeb..a3abaf32cae8 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -155,13 +155,15 @@ def test_constructor_w_project_no_environ(self): self.assertRaises(EnvironmentError, self._make_one, None) def test_constructor_w_implicit_inputs(self): - OTHER = 'other' + from google.cloud.datastore.client import _DATASTORE_BASE_URL + + other = 'other' creds = _make_credentials() default_called = [] def fallback_mock(project): default_called.append(project) - return project or OTHER + return project or other klass = self._get_target_class() patch1 = mock.patch( @@ -175,31 +177,36 @@ def fallback_mock(project): with patch2: client = klass() - self.assertEqual(client.project, OTHER) + self.assertEqual(client.project, other) self.assertIsNone(client.namespace) self.assertIsInstance(client._connection, _MockConnection) self.assertIs(client._credentials, creds) self.assertIsNone(client._http_internal) + self.assertEqual(client._base_url, _DATASTORE_BASE_URL) + self.assertIsNone(client.current_batch) self.assertIsNone(client.current_transaction) self.assertEqual(default_called, [None]) def test_constructor_w_explicit_inputs(self): - OTHER = 'other' - NAMESPACE = 'namespace' + from google.cloud.datastore.client import _DATASTORE_BASE_URL + + other = 'other' + namespace = 'namespace' creds = _make_credentials() http = object() - client = self._make_one(project=OTHER, - namespace=NAMESPACE, + client = self._make_one(project=other, + namespace=namespace, credentials=creds, http=http) - self.assertEqual(client.project, OTHER) - self.assertEqual(client.namespace, NAMESPACE) + self.assertEqual(client.project, other) + self.assertEqual(client.namespace, namespace) self.assertIsInstance(client._connection, _MockConnection) self.assertIs(client._credentials, creds) self.assertIs(client._http_internal, http) self.assertIsNone(client.current_batch) self.assertEqual(list(client._batch_stack), []) + self.assertEqual(client._base_url, _DATASTORE_BASE_URL) def test_constructor_use_gax_default(self): import google.cloud.datastore.client as MUT @@ -228,6 +235,20 @@ def test_constructor_use_gax_default(self): use_gax=True) self.assertTrue(client4._use_gax) + def test_constructor_gcd_host(self): + from google.cloud.environment_vars import GCD_HOST + + host = 'localhost:1234' + fake_environ = {GCD_HOST: host} + project = 'PROJECT' + creds = _make_credentials() + http = object() + + with mock.patch('os.environ', new=fake_environ): + client = self._make_one( + project=project, credentials=creds, http=http) + self.assertEqual(client._base_url, 'http://' + host) + def test__datastore_api_property_gax(self): client = self._make_one( project='prahj-ekt', credentials=_make_credentials(), From fca184c5c0ec701cd837b0f836942a6280030085 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 8 Mar 2017 09:54:47 -0800 Subject: [PATCH 091/611] Using GAPIC datastore object (and an HTTP equivalent) for allocate_ids. --- .../google/cloud/datastore/_http.py | 55 ++--- .../google/cloud/datastore/client.py | 3 +- .../unit_tests/test__http.py | 213 +++++++++--------- .../unit_tests/test_client.py | 15 +- 4 files changed, 134 insertions(+), 152 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 37a3cb1be5f9..239dfc5c8cc2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -250,23 +250,6 @@ def rollback(self, project, request_pb): self.connection.api_base_url, request_pb, _datastore_pb2.RollbackResponse) - def allocate_ids(self, project, request_pb): - """Perform an ``allocateIds`` request. - - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. - - :type request_pb: :class:`.datastore_pb2.AllocateIdsRequest` - :param request_pb: The request protobuf object. - - :rtype: :class:`.datastore_pb2.AllocateIdsResponse` - :returns: The returned protobuf response object. - """ - return _rpc(self.connection.http, project, 'allocateIds', - self.connection.api_base_url, - request_pb, _datastore_pb2.AllocateIdsResponse) - class Connection(connection_module.Connection): """A connection to the Google Cloud Datastore via the Protobuf API. @@ -451,25 +434,6 @@ def rollback(self, project, transaction_id): # Response is empty (i.e. no fields) but we return it anyway. return self._datastore_api.rollback(project, request) - def allocate_ids(self, project, key_pbs): - """Obtain backend-generated IDs for a set of keys. - - Maps the ``DatastoreService.AllocateIds`` protobuf RPC. - - :type project: str - :param project: The project to which the transaction belongs. - - :type key_pbs: list of - :class:`.entity_pb2.Key` - :param key_pbs: The keys for which the backend should allocate IDs. - - :rtype: :class:`.datastore_pb2.AllocateIdsResponse` - :returns: The protobuf response from an allocate IDs request. - """ - request = _datastore_pb2.AllocateIdsRequest() - _add_keys_to_request(request.keys, key_pbs) - return self._datastore_api.allocate_ids(project, request) - class HTTPDatastoreAPI(object): """An API object that sends proto-over-HTTP requests. @@ -483,6 +447,25 @@ class HTTPDatastoreAPI(object): def __init__(self, client): self.client = client + def allocate_ids(self, project, key_pbs): + """Perform an ``allocateIds`` request. + + :type project: str + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type key_pbs: list of :class:`.entity_pb2.Key` + :param key_pbs: The keys for which the backend should allocate IDs. + + :rtype: :class:`.datastore_pb2.AllocateIdsResponse` + :returns: The returned protobuf response object. + """ + request_pb = _datastore_pb2.AllocateIdsRequest() + _add_keys_to_request(request_pb.keys, key_pbs) + return _rpc(self.client._http, project, 'allocateIds', + self.client._base_url, + request_pb, _datastore_pb2.AllocateIdsResponse) + def _set_read_options(request, eventual, transaction_id): """Validate rules for read options, and assign to the request. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 21fc8a0aa407..34f14735a592 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -467,8 +467,7 @@ def allocate_ids(self, incomplete_key, num_ids): incomplete_key_pb = incomplete_key.to_protobuf() incomplete_key_pbs = [incomplete_key_pb] * num_ids - conn = self._connection - response_pb = conn.allocate_ids( + response_pb = self._datastore_api.allocate_ids( incomplete_key.project, incomplete_key_pbs) allocated_ids = [allocated_key_pb.path[-1].id for allocated_key_pb in response_pb.keys] diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index e81abf6cf80c..30a32741de81 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -161,14 +161,6 @@ def _get_target_class(): return Connection - def _make_key_pb(self, project, id_=1234): - from google.cloud.datastore.key import Key - - path_args = ('Kind',) - if id_ is not None: - path_args += (id_,) - return Key(*path_args, project=project).to_protobuf() - def _make_query_pb(self, kind): from google.cloud.proto.datastore.v1 import query_pb2 @@ -181,31 +173,6 @@ def _make_one(self, client, use_grpc=False): new=use_grpc): return self._get_target_class()(client) - def _verify_protobuf_call(self, called_with, URI, conn): - from google.cloud import _http as connection_module - from google.cloud.datastore._http import _CLIENT_INFO - - self.assertEqual(called_with['uri'], URI) - self.assertEqual(called_with['method'], 'POST') - expected_headers = { - 'Content-Type': 'application/x-protobuf', - 'User-Agent': conn.USER_AGENT, - 'Content-Length': str(len(called_with['body'])), - connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, - } - self.assertEqual(called_with['headers'], expected_headers) - - @staticmethod - def _build_expected_url(connection, project, method): - from google.cloud.datastore._http import API_VERSION - - return '/'.join([ - connection.api_base_url, - API_VERSION, - 'projects', - project + ':' + method, - ]) - def test_inherited_url(self): client = mock.Mock(_base_url='test.invalid', spec=['_base_url']) conn = self._make_one(client) @@ -252,7 +219,7 @@ def test_lookup_single_key_empty_response(self): from google.cloud.proto.datastore.v1 import datastore_pb2 project = 'PROJECT' - key_pb = self._make_key_pb(project) + key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.LookupResponse() # Create mock HTTP and client with response. @@ -266,12 +233,12 @@ def test_lookup_single_key_empty_response(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = self._build_expected_url(conn, project, 'lookup') + uri = _build_expected_url(conn.api_base_url, project, 'lookup') self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) cw = http._called_with - self._verify_protobuf_call(cw, uri, conn) + _verify_protobuf_call(self, cw, uri) request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) keys = list(request.keys) @@ -282,7 +249,7 @@ def test_lookup_single_key_empty_response_w_eventual(self): from google.cloud.proto.datastore.v1 import datastore_pb2 project = 'PROJECT' - key_pb = self._make_key_pb(project) + key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.LookupResponse() # Create mock HTTP and client with response. @@ -296,12 +263,12 @@ def test_lookup_single_key_empty_response_w_eventual(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = self._build_expected_url(conn, project, 'lookup') + uri = _build_expected_url(conn.api_base_url, project, 'lookup') self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) cw = http._called_with - self._verify_protobuf_call(cw, uri, conn) + _verify_protobuf_call(self, cw, uri) request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) keys = list(request.keys) @@ -314,7 +281,7 @@ def test_lookup_single_key_empty_response_w_eventual(self): def test_lookup_single_key_empty_response_w_eventual_and_transaction(self): PROJECT = 'PROJECT' TRANSACTION = b'TRANSACTION' - key_pb = self._make_key_pb(PROJECT) + key_pb = _make_key_pb(PROJECT) client = mock.Mock(spec=['_base_url']) conn = self._make_one(client) @@ -326,7 +293,7 @@ def test_lookup_single_key_empty_response_w_transaction(self): project = 'PROJECT' transaction = b'TRANSACTION' - key_pb = self._make_key_pb(project) + key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.LookupResponse() # Create mock HTTP and client with response. @@ -340,12 +307,12 @@ def test_lookup_single_key_empty_response_w_transaction(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = self._build_expected_url(conn, project, 'lookup') + uri = _build_expected_url(conn.api_base_url, project, 'lookup') self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) cw = http._called_with - self._verify_protobuf_call(cw, uri, conn) + _verify_protobuf_call(self, cw, uri) request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) keys = list(request.keys) @@ -358,7 +325,7 @@ def test_lookup_single_key_nonempty_response(self): from google.cloud.proto.datastore.v1 import entity_pb2 project = 'PROJECT' - key_pb = self._make_key_pb(project) + key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.LookupResponse() entity = entity_pb2.Entity() entity.key.CopyFrom(key_pb) @@ -375,7 +342,7 @@ def test_lookup_single_key_nonempty_response(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = self._build_expected_url(conn, project, 'lookup') + uri = _build_expected_url(conn.api_base_url, project, 'lookup') self.assertEqual(len(response.found), 1) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) @@ -383,7 +350,7 @@ def test_lookup_single_key_nonempty_response(self): self.assertEqual(found.key.path[0].kind, 'Kind') self.assertEqual(found.key.path[0].id, 1234) cw = http._called_with - self._verify_protobuf_call(cw, uri, conn) + _verify_protobuf_call(self, cw, uri) request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) keys = list(request.keys) @@ -394,8 +361,8 @@ def test_lookup_multiple_keys_empty_response(self): from google.cloud.proto.datastore.v1 import datastore_pb2 project = 'PROJECT' - key_pb1 = self._make_key_pb(project) - key_pb2 = self._make_key_pb(project, id_=2345) + key_pb1 = _make_key_pb(project) + key_pb2 = _make_key_pb(project, id_=2345) rsp_pb = datastore_pb2.LookupResponse() # Create mock HTTP and client with response. @@ -409,12 +376,12 @@ def test_lookup_multiple_keys_empty_response(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = self._build_expected_url(conn, project, 'lookup') + uri = _build_expected_url(conn.api_base_url, project, 'lookup') self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) cw = http._called_with - self._verify_protobuf_call(cw, uri, conn) + _verify_protobuf_call(self, cw, uri) request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) keys = list(request.keys) @@ -426,8 +393,8 @@ def test_lookup_multiple_keys_w_missing(self): from google.cloud.proto.datastore.v1 import datastore_pb2 project = 'PROJECT' - key_pb1 = self._make_key_pb(project) - key_pb2 = self._make_key_pb(project, id_=2345) + key_pb1 = _make_key_pb(project) + key_pb2 = _make_key_pb(project, id_=2345) rsp_pb = datastore_pb2.LookupResponse() er_1 = rsp_pb.missing.add() er_1.entity.key.CopyFrom(key_pb1) @@ -445,13 +412,13 @@ def test_lookup_multiple_keys_w_missing(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = self._build_expected_url(conn, project, 'lookup') + uri = _build_expected_url(conn.api_base_url, project, 'lookup') self.assertEqual(len(response.found), 0) self.assertEqual(len(response.deferred), 0) missing_keys = [result.entity.key for result in response.missing] self.assertEqual(missing_keys, [key_pb1, key_pb2]) cw = http._called_with - self._verify_protobuf_call(cw, uri, conn) + _verify_protobuf_call(self, cw, uri) request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) keys = list(request.keys) @@ -466,8 +433,8 @@ def test_lookup_multiple_keys_w_deferred(self): from google.cloud.datastore._http import _CLIENT_INFO project = 'PROJECT' - key_pb1 = self._make_key_pb(project) - key_pb2 = self._make_key_pb(project, id_=2345) + key_pb1 = _make_key_pb(project) + key_pb2 = _make_key_pb(project, id_=2345) rsp_pb = datastore_pb2.LookupResponse() rsp_pb.deferred.add().CopyFrom(key_pb1) rsp_pb.deferred.add().CopyFrom(key_pb2) @@ -483,12 +450,12 @@ def test_lookup_multiple_keys_w_deferred(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = self._build_expected_url(conn, project, 'lookup') + uri = _build_expected_url(conn.api_base_url, project, 'lookup') self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(list(response.deferred), [key_pb1, key_pb2]) cw = http._called_with - self._verify_protobuf_call(cw, uri, conn) + _verify_protobuf_call(self, cw, uri) self.assertEqual(cw['uri'], uri) self.assertEqual(cw['method'], 'POST') expected_headers = { @@ -530,9 +497,9 @@ def test_run_query_w_eventual_no_transaction(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = self._build_expected_url(conn, project, 'runQuery') + uri = _build_expected_url(conn.api_base_url, project, 'runQuery') cw = http._called_with - self._verify_protobuf_call(cw, uri, conn) + _verify_protobuf_call(self, cw, uri) request = datastore_pb2.RunQueryRequest() request.ParseFromString(cw['body']) self.assertEqual(request.partition_id.namespace_id, '') @@ -568,9 +535,9 @@ def test_run_query_wo_eventual_w_transaction(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = self._build_expected_url(conn, project, 'runQuery') + uri = _build_expected_url(conn.api_base_url, project, 'runQuery') cw = http._called_with - self._verify_protobuf_call(cw, uri, conn) + _verify_protobuf_call(self, cw, uri) request = datastore_pb2.RunQueryRequest() request.ParseFromString(cw['body']) self.assertEqual(request.partition_id.namespace_id, '') @@ -625,9 +592,9 @@ def test_run_query_wo_namespace_empty_result(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = self._build_expected_url(conn, project, 'runQuery') + uri = _build_expected_url(conn.api_base_url, project, 'runQuery') cw = http._called_with - self._verify_protobuf_call(cw, uri, conn) + _verify_protobuf_call(self, cw, uri) request = datastore_pb2.RunQueryRequest() request.ParseFromString(cw['body']) self.assertEqual(request.partition_id.namespace_id, '') @@ -660,8 +627,8 @@ def test_run_query_w_namespace_nonempty_result(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) cw = http._called_with - uri = self._build_expected_url(conn, project, 'runQuery') - self._verify_protobuf_call(cw, uri, conn) + uri = _build_expected_url(conn.api_base_url, project, 'runQuery') + _verify_protobuf_call(self, cw, uri) request = datastore_pb2.RunQueryRequest() request.ParseFromString(cw['body']) self.assertEqual(request.partition_id.namespace_id, namespace) @@ -686,9 +653,10 @@ def test_begin_transaction(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = self._build_expected_url(conn, project, 'beginTransaction') + uri = _build_expected_url( + conn.api_base_url, project, 'beginTransaction') cw = http._called_with - self._verify_protobuf_call(cw, uri, conn) + _verify_protobuf_call(self, cw, uri) request = datastore_pb2.BeginTransactionRequest() request.ParseFromString(cw['body']) # The RPC-over-HTTP request does not set the project in the request. @@ -699,7 +667,7 @@ def test_commit_wo_transaction(self): from google.cloud.datastore.helpers import _new_value_pb project = 'PROJECT' - key_pb = self._make_key_pb(project) + key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.CommitResponse() req_pb = datastore_pb2.CommitRequest() mutation = req_pb.mutations.add() @@ -711,14 +679,14 @@ def test_commit_wo_transaction(self): client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) conn = self._make_one(client) - uri = self._build_expected_url(conn, project, 'commit') + uri = _build_expected_url(conn.api_base_url, project, 'commit') result = conn.commit(project, req_pb, None) self.assertEqual(result, rsp_pb) # Verify the caller. cw = http._called_with - self._verify_protobuf_call(cw, uri, conn) + _verify_protobuf_call(self, cw, uri) rq_class = datastore_pb2.CommitRequest request = rq_class() request.ParseFromString(cw['body']) @@ -731,7 +699,7 @@ def test_commit_w_transaction(self): from google.cloud.datastore.helpers import _new_value_pb project = 'PROJECT' - key_pb = self._make_key_pb(project) + key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.CommitResponse() req_pb = datastore_pb2.CommitRequest() mutation = req_pb.mutations.add() @@ -743,14 +711,14 @@ def test_commit_w_transaction(self): client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) conn = self._make_one(client) - uri = self._build_expected_url(conn, project, 'commit') + uri = _build_expected_url(conn.api_base_url, project, 'commit') result = conn.commit(project, req_pb, b'xact') self.assertEqual(result, rsp_pb) # Verify the caller. cw = http._called_with - self._verify_protobuf_call(cw, uri, conn) + _verify_protobuf_call(self, cw, uri) rq_class = datastore_pb2.CommitRequest request = rq_class() request.ParseFromString(cw['body']) @@ -776,13 +744,30 @@ def test_rollback_ok(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = self._build_expected_url(conn, project, 'rollback') + uri = _build_expected_url(conn.api_base_url, project, 'rollback') cw = http._called_with - self._verify_protobuf_call(cw, uri, conn) + _verify_protobuf_call(self, cw, uri) request = datastore_pb2.RollbackRequest() request.ParseFromString(cw['body']) self.assertEqual(request.transaction, transaction) + +class TestHTTPDatastoreAPI(unittest.TestCase): + + @staticmethod + def _get_target_class(): + from google.cloud.datastore._http import HTTPDatastoreAPI + + return HTTPDatastoreAPI + + def _make_one(self, *args, **kwargs): + return self._get_target_class()(*args, **kwargs) + + def test_constructor(self): + client = object() + ds_api = self._make_one(client) + self.assertIs(ds_api.client, client) + def test_allocate_ids_empty(self): from google.cloud.proto.datastore.v1 import datastore_pb2 @@ -795,15 +780,15 @@ def test_allocate_ids_empty(self): _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. - conn = self._make_one(client) - response = conn.allocate_ids(project, []) + ds_api = self._make_one(client) + response = ds_api.allocate_ids(project, []) # Check the result and verify the callers. self.assertEqual(list(response.keys), []) self.assertEqual(response, rsp_pb) - uri = self._build_expected_url(conn, project, 'allocateIds') + uri = _build_expected_url(client._base_url, project, 'allocateIds') cw = http._called_with - self._verify_protobuf_call(cw, uri, conn) + _verify_protobuf_call(self, cw, uri) request = datastore_pb2.AllocateIdsRequest() request.ParseFromString(cw['body']) self.assertEqual(list(request.keys), []) @@ -813,12 +798,12 @@ def test_allocate_ids_non_empty(self): project = 'PROJECT' before_key_pbs = [ - self._make_key_pb(project, id_=None), - self._make_key_pb(project, id_=None), + _make_key_pb(project, id_=None), + _make_key_pb(project, id_=None), ] after_key_pbs = [ - self._make_key_pb(project), - self._make_key_pb(project, id_=2345), + _make_key_pb(project), + _make_key_pb(project, id_=2345), ] rsp_pb = datastore_pb2.AllocateIdsResponse() rsp_pb.keys.add().CopyFrom(after_key_pbs[0]) @@ -830,15 +815,15 @@ def test_allocate_ids_non_empty(self): _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. - conn = self._make_one(client) - response = conn.allocate_ids(project, before_key_pbs) + ds_api = self._make_one(client) + response = ds_api.allocate_ids(project, before_key_pbs) # Check the result and verify the callers. self.assertEqual(list(response.keys), after_key_pbs) self.assertEqual(response, rsp_pb) - uri = self._build_expected_url(conn, project, 'allocateIds') + uri = _build_expected_url(client._base_url, project, 'allocateIds') cw = http._called_with - self._verify_protobuf_call(cw, uri, conn) + _verify_protobuf_call(self, cw, uri) request = datastore_pb2.AllocateIdsRequest() request.ParseFromString(cw['body']) self.assertEqual(len(request.keys), len(before_key_pbs)) @@ -846,23 +831,6 @@ def test_allocate_ids_non_empty(self): self.assertEqual(key_before, key_after) -class TestHTTPDatastoreAPI(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.datastore._http import HTTPDatastoreAPI - - return HTTPDatastoreAPI - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_constructor(self): - client = object() - ds_api = self._make_one(client) - self.assertIs(ds_api.client, client) - - class Http(object): _called_with = None @@ -876,3 +844,38 @@ def __init__(self, headers, content): def request(self, **kw): self._called_with = kw return self._response, self._content + + +def _build_expected_url(api_base_url, project, method): + from google.cloud.datastore._http import API_VERSION + + return '/'.join([ + api_base_url, + API_VERSION, + 'projects', + project + ':' + method, + ]) + + +def _make_key_pb(project, id_=1234): + from google.cloud.datastore.key import Key + + path_args = ('Kind',) + if id_ is not None: + path_args += (id_,) + return Key(*path_args, project=project).to_protobuf() + + +def _verify_protobuf_call(testcase, called_with, uri): + from google.cloud import _http as connection_module + from google.cloud.datastore._http import _CLIENT_INFO + + testcase.assertEqual(called_with['uri'], uri) + testcase.assertEqual(called_with['method'], 'POST') + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'User-Agent': connection_module.DEFAULT_USER_AGENT, + 'Content-Length': str(len(called_with['body'])), + connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, + } + testcase.assertEqual(called_with['headers'], expected_headers) diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index a3abaf32cae8..92f99a35aeb3 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -770,7 +770,12 @@ def test_allocate_ids_w_partial_key(self): incomplete_key._id = None creds = _make_credentials() - client = self._make_one(credentials=creds) + client = self._make_one(credentials=creds, use_gax=False) + allocated = mock.Mock( + keys=[_KeyPB(i) for i in range(num_ids)], spec=['keys']) + alloc_ids = mock.Mock(return_value=allocated, spec=[]) + ds_api = mock.Mock(allocate_ids=alloc_ids, spec=['allocate_ids']) + client._datastore_api_internal = ds_api result = client.allocate_ids(incomplete_key, num_ids) @@ -1015,8 +1020,6 @@ def __init__(self, credentials=None, http=None): self._lookup = [] self._commit_cw = [] self._commit = [] - self._alloc_cw = [] - self._alloc = [] def _add_lookup_result(self, results=(), missing=(), deferred=()): self._lookup.append((list(results), list(missing), list(deferred))) @@ -1046,12 +1049,6 @@ def commit(self, project, commit_request, transaction_id): datastore_pb2.MutationResult(key=key) for key in keys] return datastore_pb2.CommitResponse(mutation_results=mutation_results) - def allocate_ids(self, project, key_pbs): - self._alloc_cw.append((project, key_pbs)) - num_pbs = len(key_pbs) - keys = [_KeyPB(i) for i in list(range(num_pbs))] - return mock.Mock(keys=keys, spec=['keys']) - class _NoCommitBatch(object): From b0f1bc86c20d8fb81bb38133778697e6c8817d64 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 9 Mar 2017 09:41:49 -0800 Subject: [PATCH 092/611] Removing _Dummy test helper classes (in favor of mocks). --- .../unit_tests/test_client.py | 221 +++++++----------- 1 file changed, 89 insertions(+), 132 deletions(-) diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index 92f99a35aeb3..637668a30f6a 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -800,91 +800,74 @@ def test_key_w_project(self): client.key, KIND, ID, project=self.PROJECT) def test_key_wo_project(self): - KIND = 'KIND' - ID = 1234 + kind = 'KIND' + id_ = 1234 creds = _make_credentials() client = self._make_one(credentials=creds) patch = mock.patch( - 'google.cloud.datastore.client.Key', new=_Dummy) - with patch: - key = client.key(KIND, ID) - - self.assertIsInstance(key, _Dummy) - self.assertEqual(key.args, (KIND, ID)) - expected_kwargs = { - 'project': self.PROJECT, - 'namespace': None, - } - self.assertEqual(key.kwargs, expected_kwargs) + 'google.cloud.datastore.client.Key', spec=['__call__']) + with patch as mock_klass: + key = client.key(kind, id_) + self.assertIs(key, mock_klass.return_value) + mock_klass.assert_called_once_with( + kind, id_, project=self.PROJECT, namespace=None) def test_key_w_namespace(self): - KIND = 'KIND' - ID = 1234 - NAMESPACE = object() + kind = 'KIND' + id_ = 1234 + namespace = object() creds = _make_credentials() - client = self._make_one(namespace=NAMESPACE, credentials=creds) + client = self._make_one(namespace=namespace, credentials=creds) patch = mock.patch( - 'google.cloud.datastore.client.Key', new=_Dummy) - with patch: - key = client.key(KIND, ID) - - self.assertIsInstance(key, _Dummy) - expected_kwargs = { - 'project': self.PROJECT, - 'namespace': NAMESPACE, - } - self.assertEqual(key.kwargs, expected_kwargs) + 'google.cloud.datastore.client.Key', spec=['__call__']) + with patch as mock_klass: + key = client.key(kind, id_) + self.assertIs(key, mock_klass.return_value) + mock_klass.assert_called_once_with( + kind, id_, project=self.PROJECT, namespace=namespace) def test_key_w_namespace_collision(self): - KIND = 'KIND' - ID = 1234 - NAMESPACE1 = object() - NAMESPACE2 = object() + kind = 'KIND' + id_ = 1234 + namespace1 = object() + namespace2 = object() creds = _make_credentials() - client = self._make_one(namespace=NAMESPACE1, credentials=creds) + client = self._make_one(namespace=namespace1, credentials=creds) patch = mock.patch( - 'google.cloud.datastore.client.Key', new=_Dummy) - with patch: - key = client.key(KIND, ID, namespace=NAMESPACE2) - - self.assertIsInstance(key, _Dummy) - expected_kwargs = { - 'project': self.PROJECT, - 'namespace': NAMESPACE2, - } - self.assertEqual(key.kwargs, expected_kwargs) + 'google.cloud.datastore.client.Key', spec=['__call__']) + with patch as mock_klass: + key = client.key(kind, id_, namespace=namespace2) + self.assertIs(key, mock_klass.return_value) + mock_klass.assert_called_once_with( + kind, id_, project=self.PROJECT, namespace=namespace2) def test_batch(self): creds = _make_credentials() client = self._make_one(credentials=creds) patch = mock.patch( - 'google.cloud.datastore.client.Batch', new=_Dummy) - with patch: + 'google.cloud.datastore.client.Batch', spec=['__call__']) + with patch as mock_klass: batch = client.batch() - - self.assertIsInstance(batch, _Dummy) - self.assertEqual(batch.args, (client,)) - self.assertEqual(batch.kwargs, {}) + self.assertIs(batch, mock_klass.return_value) + mock_klass.assert_called_once_with(client) def test_transaction_defaults(self): creds = _make_credentials() client = self._make_one(credentials=creds) patch = mock.patch( - 'google.cloud.datastore.client.Transaction', new=_Dummy) - with patch: + 'google.cloud.datastore.client.Transaction', spec=['__call__']) + with patch as mock_klass: xact = client.transaction() - - self.assertIsInstance(xact, _Dummy) - self.assertEqual(xact.args, (client,)) - self.assertEqual(xact.kwargs, {}) + self.assertIs(xact, mock_klass.return_value) + mock_klass.assert_called_once_with(client) def test_query_w_client(self): KIND = 'KIND' @@ -909,106 +892,80 @@ def test_query_w_defaults(self): client = self._make_one(credentials=creds) patch = mock.patch( - 'google.cloud.datastore.client.Query', new=_Dummy) - with patch: + 'google.cloud.datastore.client.Query', spec=['__call__']) + with patch as mock_klass: query = client.query() - - self.assertIsInstance(query, _Dummy) - self.assertEqual(query.args, (client,)) - expected_kwargs = { - 'project': self.PROJECT, - 'namespace': None, - } - self.assertEqual(query.kwargs, expected_kwargs) + self.assertIs(query, mock_klass.return_value) + mock_klass.assert_called_once_with( + client, project=self.PROJECT, namespace=None) def test_query_explicit(self): - KIND = 'KIND' - NAMESPACE = 'NAMESPACE' - ANCESTOR = object() - FILTERS = [('PROPERTY', '==', 'VALUE')] - PROJECTION = ['__key__'] - ORDER = ['PROPERTY'] - DISTINCT_ON = ['DISTINCT_ON'] + kind = 'KIND' + namespace = 'NAMESPACE' + ancestor = object() + filters = [('PROPERTY', '==', 'VALUE')] + projection = ['__key__'] + order = ['PROPERTY'] + distinct_on = ['DISTINCT_ON'] creds = _make_credentials() client = self._make_one(credentials=creds) patch = mock.patch( - 'google.cloud.datastore.client.Query', new=_Dummy) - with patch: + 'google.cloud.datastore.client.Query', spec=['__call__']) + with patch as mock_klass: query = client.query( - kind=KIND, - namespace=NAMESPACE, - ancestor=ANCESTOR, - filters=FILTERS, - projection=PROJECTION, - order=ORDER, - distinct_on=DISTINCT_ON, + kind=kind, + namespace=namespace, + ancestor=ancestor, + filters=filters, + projection=projection, + order=order, + distinct_on=distinct_on, + ) + self.assertIs(query, mock_klass.return_value) + mock_klass.assert_called_once_with( + client, + project=self.PROJECT, + kind=kind, + namespace=namespace, + ancestor=ancestor, + filters=filters, + projection=projection, + order=order, + distinct_on=distinct_on, ) - - self.assertIsInstance(query, _Dummy) - self.assertEqual(query.args, (client,)) - kwargs = { - 'project': self.PROJECT, - 'kind': KIND, - 'namespace': NAMESPACE, - 'ancestor': ANCESTOR, - 'filters': FILTERS, - 'projection': PROJECTION, - 'order': ORDER, - 'distinct_on': DISTINCT_ON, - } - self.assertEqual(query.kwargs, kwargs) def test_query_w_namespace(self): - KIND = 'KIND' - NAMESPACE = object() + kind = 'KIND' + namespace = object() creds = _make_credentials() - client = self._make_one(namespace=NAMESPACE, credentials=creds) + client = self._make_one(namespace=namespace, credentials=creds) patch = mock.patch( - 'google.cloud.datastore.client.Query', new=_Dummy) - with patch: - query = client.query(kind=KIND) - - self.assertIsInstance(query, _Dummy) - self.assertEqual(query.args, (client,)) - expected_kwargs = { - 'project': self.PROJECT, - 'namespace': NAMESPACE, - 'kind': KIND, - } - self.assertEqual(query.kwargs, expected_kwargs) + 'google.cloud.datastore.client.Query', spec=['__call__']) + with patch as mock_klass: + query = client.query(kind=kind) + self.assertIs(query, mock_klass.return_value) + mock_klass.assert_called_once_with( + client, project=self.PROJECT, namespace=namespace, kind=kind) def test_query_w_namespace_collision(self): - KIND = 'KIND' - NAMESPACE1 = object() - NAMESPACE2 = object() + kind = 'KIND' + namespace1 = object() + namespace2 = object() creds = _make_credentials() - client = self._make_one(namespace=NAMESPACE1, credentials=creds) + client = self._make_one(namespace=namespace1, credentials=creds) patch = mock.patch( - 'google.cloud.datastore.client.Query', new=_Dummy) - with patch: - query = client.query(kind=KIND, namespace=NAMESPACE2) - - self.assertIsInstance(query, _Dummy) - self.assertEqual(query.args, (client,)) - expected_kwargs = { - 'project': self.PROJECT, - 'namespace': NAMESPACE2, - 'kind': KIND, - } - self.assertEqual(query.kwargs, expected_kwargs) - - -class _Dummy(object): - - def __init__(self, *args, **kwargs): - self.args = args - self.kwargs = kwargs + 'google.cloud.datastore.client.Query', spec=['__call__']) + with patch as mock_klass: + query = client.query(kind=kind, namespace=namespace2) + self.assertIs(query, mock_klass.return_value) + mock_klass.assert_called_once_with( + client, project=self.PROJECT, namespace=namespace2, kind=kind) class _MockConnection(object): From aab58078343809d9bc7499b8b5f3add56cd93642 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 9 Mar 2017 10:19:20 -0800 Subject: [PATCH 093/611] Using GAPIC datastore object (and an HTTP equivalent) for rollback. --- .../google/cloud/datastore/_http.py | 57 +++++++------------ .../google/cloud/datastore/transaction.py | 2 +- .../unit_tests/test__http.py | 42 +++++++------- .../unit_tests/test_transaction.py | 49 +++++++++------- 4 files changed, 69 insertions(+), 81 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 239dfc5c8cc2..5cd8cf29b788 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -233,23 +233,6 @@ def commit(self, project, request_pb): self.connection.api_base_url, request_pb, _datastore_pb2.CommitResponse) - def rollback(self, project, request_pb): - """Perform a ``rollback`` request. - - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. - - :type request_pb: :class:`.datastore_pb2.RollbackRequest` - :param request_pb: The request protobuf object. - - :rtype: :class:`.datastore_pb2.RollbackResponse` - :returns: The returned protobuf response object. - """ - return _rpc(self.connection.http, project, 'rollback', - self.connection.api_base_url, - request_pb, _datastore_pb2.RollbackResponse) - class Connection(connection_module.Connection): """A connection to the Google Cloud Datastore via the Protobuf API. @@ -414,26 +397,6 @@ def commit(self, project, request, transaction_id): return self._datastore_api.commit(project, request) - def rollback(self, project, transaction_id): - """Rollback the connection's existing transaction. - - Maps the ``DatastoreService.Rollback`` protobuf RPC. - - :type project: str - :param project: The project to which the transaction belongs. - - :type transaction_id: str - :param transaction_id: The transaction ID returned from - :meth:`begin_transaction`. - - :rtype: :class:`.datastore_pb2.RollbackResponse` - :returns: The returned protobuf response object. - """ - request = _datastore_pb2.RollbackRequest() - request.transaction = transaction_id - # Response is empty (i.e. no fields) but we return it anyway. - return self._datastore_api.rollback(project, request) - class HTTPDatastoreAPI(object): """An API object that sends proto-over-HTTP requests. @@ -447,6 +410,26 @@ class HTTPDatastoreAPI(object): def __init__(self, client): self.client = client + def rollback(self, project, transaction_id): + """Perform a ``rollback`` request. + + :type project: str + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type transaction_id: bytes + :param transaction_id: The transaction ID to rollback. + + :rtype: :class:`.datastore_pb2.RollbackResponse` + :returns: The returned protobuf response object. + """ + request_pb = _datastore_pb2.RollbackRequest() + request_pb.transaction = transaction_id + # Response is empty (i.e. no fields) but we return it anyway. + return _rpc(self.client._http, project, 'rollback', + self.client._base_url, + request_pb, _datastore_pb2.RollbackResponse) + def allocate_ids(self, project, key_pbs): """Perform an ``allocateIds`` request. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index 40995f5efb30..651bbfcd0ba3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -201,7 +201,7 @@ def rollback(self): """ try: # No need to use the response it contains nothing. - self._client._connection.rollback(self.project, self._id) + self._client._datastore_api.rollback(self.project, self._id) finally: super(Transaction, self).rollback() # Clear our own ID in case this gets accidentally reused. diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 30a32741de81..c9ee3f4eb1b4 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -726,6 +726,23 @@ def test_commit_w_transaction(self): self.assertEqual(list(request.mutations), [mutation]) self.assertEqual(request.mode, rq_class.TRANSACTIONAL) + +class TestHTTPDatastoreAPI(unittest.TestCase): + + @staticmethod + def _get_target_class(): + from google.cloud.datastore._http import HTTPDatastoreAPI + + return HTTPDatastoreAPI + + def _make_one(self, *args, **kwargs): + return self._get_target_class()(*args, **kwargs) + + def test_constructor(self): + client = object() + ds_api = self._make_one(client) + self.assertIs(ds_api.client, client) + def test_rollback_ok(self): from google.cloud.proto.datastore.v1 import datastore_pb2 @@ -739,35 +756,18 @@ def test_rollback_ok(self): _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. - conn = self._make_one(client) - response = conn.rollback(project, transaction) + ds_api = self._make_one(client) + response = ds_api.rollback(project, transaction) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(conn.api_base_url, project, 'rollback') + uri = _build_expected_url(client._base_url, project, 'rollback') cw = http._called_with _verify_protobuf_call(self, cw, uri) request = datastore_pb2.RollbackRequest() request.ParseFromString(cw['body']) self.assertEqual(request.transaction, transaction) - -class TestHTTPDatastoreAPI(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.datastore._http import HTTPDatastoreAPI - - return HTTPDatastoreAPI - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_constructor(self): - client = object() - ds_api = self._make_one(client) - self.assertIs(ds_api.client, client) - def test_allocate_ids_empty(self): from google.cloud.proto.datastore.v1 import datastore_pb2 @@ -784,8 +784,8 @@ def test_allocate_ids_empty(self): response = ds_api.allocate_ids(project, []) # Check the result and verify the callers. - self.assertEqual(list(response.keys), []) self.assertEqual(response, rsp_pb) + self.assertEqual(list(response.keys), []) uri = _build_expected_url(client._base_url, project, 'allocateIds') cw = http._called_with _verify_protobuf_call(self, cw, uri) diff --git a/packages/google-cloud-datastore/unit_tests/test_transaction.py b/packages/google-cloud-datastore/unit_tests/test_transaction.py index db7bf6bcc97f..c122fb38b11f 100644 --- a/packages/google-cloud-datastore/unit_tests/test_transaction.py +++ b/packages/google-cloud-datastore/unit_tests/test_transaction.py @@ -14,6 +14,8 @@ import unittest +import mock + class TestTransaction(unittest.TestCase): @@ -77,15 +79,18 @@ def test_begin(self): self.assertEqual(connection._begun, _PROJECT) def test_begin_tombstoned(self): - _PROJECT = 'PROJECT' - connection = _Connection(234) - client = _Client(_PROJECT, connection) + project = 'PROJECT' + id_ = 234 + connection = _Connection(id_) + ds_api = mock.Mock(spec=['rollback']) + client = _Client(project, connection, datastore_api=ds_api) xact = self._make_one(client) xact.begin() - self.assertEqual(xact.id, 234) - self.assertEqual(connection._begun, _PROJECT) + self.assertEqual(xact.id, id_) + self.assertEqual(connection._begun, project) xact.rollback() + ds_api.rollback.assert_called_once_with(project, id_) self.assertIsNone(xact.id) self.assertRaises(ValueError, xact.begin) @@ -104,14 +109,16 @@ def test_begin_w_begin_transaction_failure(self): self.assertEqual(connection._begun, _PROJECT) def test_rollback(self): - _PROJECT = 'PROJECT' - connection = _Connection(234) - client = _Client(_PROJECT, connection) + project = 'PROJECT' + id_ = 234 + connection = _Connection(id_) + ds_api = mock.Mock(spec=['rollback']) + client = _Client(project, connection, datastore_api=ds_api) xact = self._make_one(client) xact.begin() xact.rollback() + ds_api.rollback.assert_called_once_with(project, id_) self.assertIsNone(xact.id) - self.assertEqual(connection._rolled_back, (_PROJECT, 234)) def test_commit_no_partial_keys(self): _PROJECT = 'PROJECT' @@ -161,19 +168,21 @@ def test_context_manager_w_raise(self): class Foo(Exception): pass - _PROJECT = 'PROJECT' - connection = _Connection(234) - client = _Client(_PROJECT, connection) + project = 'PROJECT' + id_ = 234 + connection = _Connection(id_) + ds_api = mock.Mock(spec=['rollback']) + client = _Client(project, connection, datastore_api=ds_api) xact = self._make_one(client) xact._mutation = object() try: with xact: - self.assertEqual(xact.id, 234) - self.assertEqual(connection._begun, _PROJECT) + self.assertEqual(xact.id, id_) + self.assertEqual(connection._begun, project) raise Foo() except Foo: self.assertIsNone(xact.id) - self.assertEqual(connection._rolled_back, (_PROJECT, 234)) + ds_api.rollback.assert_called_once_with(project, id_) self.assertIsNone(connection._committed) self.assertIsNone(xact.id) @@ -192,7 +201,6 @@ def _make_key(kind, id_, project): class _Connection(object): _marker = object() _begun = None - _rolled_back = None _committed = None _side_effect = None @@ -206,8 +214,6 @@ def __init__(self, xact_id=123, keys=()): mutation_results=mutation_results) def begin_transaction(self, project): - import mock - self._begun = project if self._side_effect is None: return mock.Mock( @@ -215,9 +221,6 @@ def begin_transaction(self, project): else: raise self._side_effect - def rollback(self, project, transaction_id): - self._rolled_back = project, transaction_id - def commit(self, project, commit_request, transaction_id): self._committed = (project, commit_request, transaction_id) return self._commit_response_pb @@ -234,9 +237,11 @@ def __init__(self): class _Client(object): - def __init__(self, project, connection, namespace=None): + def __init__(self, project, connection, + datastore_api=None, namespace=None): self.project = project self._connection = connection + self._datastore_api = datastore_api self.namespace = namespace self._batches = [] From 3d5d5c556545a5ceda55035f0d44568299298953 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 9 Mar 2017 11:56:08 -0800 Subject: [PATCH 094/611] Using GAPIC datastore object (and an HTTP equivalent) for commit. This is a bit more involved since - The signature requires the **caller** to determine the mode - The signature receives a list of mutations, so we no longer collect mutations within a CommitRequest object - The exception re-mapping must be done, so we can't use the base datastore_client.DatastoreClient GAPIC object --- .../google/cloud/datastore/_gax.py | 69 +++++++--------- .../google/cloud/datastore/_http.py | 81 ++++++++----------- .../google/cloud/datastore/batch.py | 23 ++++-- 3 files changed, 78 insertions(+), 95 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py index 155a02a3fe7e..c88614f19bf2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py @@ -19,6 +19,8 @@ from google.cloud.gapic.datastore.v1 import datastore_client from google.cloud.proto.datastore.v1 import datastore_pb2_grpc +from google.gax.errors import GaxError +from google.gax.grpc import exc_to_code from google.gax.utils import metrics from grpc import StatusCode import six @@ -71,6 +73,13 @@ def _grpc_catch_rendezvous(): """ try: yield + except GaxError as exc: + error_code = exc_to_code(exc.cause) + error_class = _GRPC_ERROR_MAPPING.get(error_code) + if error_class is None: + raise + else: + raise error_class(exc.cause.details()) except exceptions.GrpcRendezvous as exc: error_code = exc.code() error_class = _GRPC_ERROR_MAPPING.get(error_code) @@ -158,56 +167,38 @@ def begin_transaction(self, project, request_pb): with _grpc_catch_rendezvous(): return self._stub.BeginTransaction(request_pb) - def commit(self, project, request_pb): - """Perform a ``commit`` request. - - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. - - :type request_pb: :class:`.datastore_pb2.CommitRequest` - :param request_pb: The request protobuf object. - :rtype: :class:`.datastore_pb2.CommitResponse` - :returns: The returned protobuf response object. - """ - request_pb.project_id = project - with _grpc_catch_rendezvous(): - return self._stub.Commit(request_pb) +class GAPICDatastoreAPI(datastore_client.DatastoreClient): + """An API object that sends proto-over-gRPC requests. - def rollback(self, project, request_pb): - """Perform a ``rollback`` request. + A light wrapper around the parent class, with exception re-mapping + provided (from GaxError to our native errors). - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. + :type args: tuple + :param args: Positional arguments to pass to constructor. - :type request_pb: :class:`.datastore_pb2.RollbackRequest` - :param request_pb: The request protobuf object. + :type kwargs: dict + :param kwargs: Keyword arguments to pass to constructor. + """ - :rtype: :class:`.datastore_pb2.RollbackResponse` - :returns: The returned protobuf response object. - """ - request_pb.project_id = project - with _grpc_catch_rendezvous(): - return self._stub.Rollback(request_pb) + def commit(self, *args, **kwargs): + """Perform a ``commit`` request. - def allocate_ids(self, project, request_pb): - """Perform an ``allocateIds`` request. + A light wrapper around the the base method from the parent class. + Intended to provide exception re-mapping (from GaxError to our + native errors). - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. + :type args: tuple + :param args: Positional arguments to pass to base method. - :type request_pb: :class:`.datastore_pb2.AllocateIdsRequest` - :param request_pb: The request protobuf object. + :type kwargs: dict + :param kwargs: Keyword arguments to pass to base method. - :rtype: :class:`.datastore_pb2.AllocateIdsResponse` + :rtype: :class:`.datastore_pb2.CommitResponse` :returns: The returned protobuf response object. """ - request_pb.project_id = project with _grpc_catch_rendezvous(): - return self._stub.AllocateIds(request_pb) + return super(GAPICDatastoreAPI, self).commit(*args, **kwargs) def make_datastore_api(client): @@ -222,5 +213,5 @@ def make_datastore_api(client): channel = make_secure_channel( client._credentials, DEFAULT_USER_AGENT, datastore_client.DatastoreClient.SERVICE_ADDRESS) - return datastore_client.DatastoreClient( + return GAPICDatastoreAPI( channel=channel, lib_name='gccl', lib_version=__version__) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 5cd8cf29b788..c3c1f0bcda36 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -216,23 +216,6 @@ def begin_transaction(self, project, request_pb): self.connection.api_base_url, request_pb, _datastore_pb2.BeginTransactionResponse) - def commit(self, project, request_pb): - """Perform a ``commit`` request. - - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. - - :type request_pb: :class:`.datastore_pb2.CommitRequest` - :param request_pb: The request protobuf object. - - :rtype: :class:`.datastore_pb2.CommitResponse` - :returns: The returned protobuf response object. - """ - return _rpc(self.connection.http, project, 'commit', - self.connection.api_base_url, - request_pb, _datastore_pb2.CommitResponse) - class Connection(connection_module.Connection): """A connection to the Google Cloud Datastore via the Protobuf API. @@ -366,37 +349,6 @@ def begin_transaction(self, project): request = _datastore_pb2.BeginTransactionRequest() return self._datastore_api.begin_transaction(project, request) - def commit(self, project, request, transaction_id): - """Commit mutations in context of current transaction (if any). - - Maps the ``DatastoreService.Commit`` protobuf RPC. - - :type project: str - :param project: The project to which the transaction applies. - - :type request: :class:`.datastore_pb2.CommitRequest` - :param request: The protobuf with the mutations being committed. - - :type transaction_id: str - :param transaction_id: (Optional) The transaction ID returned from - :meth:`begin_transaction`. Non-transactional - batches must pass ``None``. - - .. note:: - - This method will mutate ``request`` before using it. - - :rtype: :class:`.datastore_pb2.CommitResponse` - :returns: The protobuf response from a commit request. - """ - if transaction_id: - request.mode = _datastore_pb2.CommitRequest.TRANSACTIONAL - request.transaction = transaction_id - else: - request.mode = _datastore_pb2.CommitRequest.NON_TRANSACTIONAL - - return self._datastore_api.commit(project, request) - class HTTPDatastoreAPI(object): """An API object that sends proto-over-HTTP requests. @@ -410,6 +362,39 @@ class HTTPDatastoreAPI(object): def __init__(self, client): self.client = client + def commit(self, project, mode, mutations, transaction=None): + """Perform a ``commit`` request. + + :type project: str + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type mode: :class:`.gapic.datastore.v1.enums.CommitRequest.Mode` + :param mode: The type of commit to perform. Expected to be one of + ``TRANSACTIONAL`` or ``NON_TRANSACTIONAL``. + + :type mutations: list + :param mutations: List of :class:`.datastore_pb2.Mutation`, the + mutations to perform. + + :type transaction: bytes + :param transaction: (Optional) The transaction ID returned from + :meth:`begin_transaction`. Non-transactional + commits must pass :data:`None`. + + :rtype: :class:`.datastore_pb2.CommitResponse` + :returns: The returned protobuf response object. + """ + request_pb = _datastore_pb2.CommitRequest( + project_id=project, + mode=mode, + transaction=transaction, + mutations=mutations, + ) + return _rpc(self.client._http, project, 'commit', + self.client._base_url, + request_pb, _datastore_pb2.CommitResponse) + def rollback(self, project, transaction_id): """Perform a ``rollback`` request. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/batch.py b/packages/google-cloud-datastore/google/cloud/datastore/batch.py index 33cc536cf273..a5b80e432c9a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/batch.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/batch.py @@ -77,7 +77,7 @@ class Batch(object): def __init__(self, client): self._client = client - self._commit_request = _datastore_pb2.CommitRequest() + self._mutations = [] self._partial_key_entities = [] self._status = self._INITIAL @@ -110,7 +110,8 @@ def _add_partial_key_entity_pb(self): :returns: The newly created entity protobuf that will be updated and sent with a commit. """ - new_mutation = self.mutations.add() + new_mutation = _datastore_pb2.Mutation() + self._mutations.append(new_mutation) return new_mutation.insert def _add_complete_key_entity_pb(self): @@ -123,7 +124,8 @@ def _add_complete_key_entity_pb(self): # We use ``upsert`` for entities with completed keys, rather than # ``insert`` or ``update``, in order not to create race conditions # based on prior existence / removal of the entity. - new_mutation = self.mutations.add() + new_mutation = _datastore_pb2.Mutation() + self._mutations.append(new_mutation) return new_mutation.upsert def _add_delete_key_pb(self): @@ -133,7 +135,8 @@ def _add_delete_key_pb(self): :returns: The newly created key protobuf that will be deleted when sent with a commit. """ - new_mutation = self.mutations.add() + new_mutation = _datastore_pb2.Mutation() + self._mutations.append(new_mutation) return new_mutation.delete @property @@ -150,7 +153,7 @@ def mutations(self): :returns: The list of :class:`.datastore_pb2.Mutation` protobufs to be sent in the commit request. """ - return self._commit_request.mutations + return self._mutations def put(self, entity): """Remember an entity's state to be saved during :meth:`commit`. @@ -237,9 +240,13 @@ def _commit(self): This is called by :meth:`commit`. """ - # NOTE: ``self._commit_request`` will be modified. - commit_response_pb = self._client._connection.commit( - self.project, self._commit_request, self._id) + if self._id is None: + mode = _datastore_pb2.CommitRequest.NON_TRANSACTIONAL + else: + mode = _datastore_pb2.CommitRequest.TRANSACTIONAL + + commit_response_pb = self._client._datastore_api.commit( + self.project, mode, self._mutations, transaction=self._id) _, updated_keys = _parse_commit_response(commit_response_pb) # If the back-end returns without error, we are guaranteed that # :meth:`Connection.commit` will return keys that match (length and From ff6a7c0ea53909e144a0ef5d8ca63e4e463f6a79 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 9 Mar 2017 14:29:40 -0800 Subject: [PATCH 095/611] Updating unit tests for GAPIC commit in datastore. --- .../unit_tests/test__gax.py | 128 +++++----- .../unit_tests/test__http.py | 66 ++--- .../unit_tests/test_batch.py | 237 +++++++++--------- .../unit_tests/test_client.py | 80 +++--- .../unit_tests/test_transaction.py | 146 ++++++----- 5 files changed, 360 insertions(+), 297 deletions(-) diff --git a/packages/google-cloud-datastore/unit_tests/test__gax.py b/packages/google-cloud-datastore/unit_tests/test__gax.py index b6882f11f3b7..ffc1bad756fd 100644 --- a/packages/google-cloud-datastore/unit_tests/test__gax.py +++ b/packages/google-cloud-datastore/unit_tests/test__gax.py @@ -34,6 +34,14 @@ def _fake_method(exc, result=None): else: raise exc + @staticmethod + def _make_rendezvous(status_code, details): + from grpc._channel import _RPCState + from google.cloud.exceptions import GrpcRendezvous + + exc_state = _RPCState((), None, None, status_code, details) + return GrpcRendezvous(exc_state, None, None, None) + def test_success(self): expected = object() with self._call_fut(): @@ -42,39 +50,30 @@ def test_success(self): def test_failure_aborted(self): from grpc import StatusCode - from grpc._channel import _RPCState from google.cloud.exceptions import Conflict - from google.cloud.exceptions import GrpcRendezvous details = 'Bad things.' - exc_state = _RPCState((), None, None, StatusCode.ABORTED, details) - exc = GrpcRendezvous(exc_state, None, None, None) + exc = self._make_rendezvous(StatusCode.ABORTED, details) with self.assertRaises(Conflict): with self._call_fut(): self._fake_method(exc) def test_failure_invalid_argument(self): from grpc import StatusCode - from grpc._channel import _RPCState from google.cloud.exceptions import BadRequest - from google.cloud.exceptions import GrpcRendezvous details = ('Cannot have inequality filters on multiple ' 'properties: [created, priority]') - exc_state = _RPCState((), None, None, - StatusCode.INVALID_ARGUMENT, details) - exc = GrpcRendezvous(exc_state, None, None, None) + exc = self._make_rendezvous(StatusCode.INVALID_ARGUMENT, details) with self.assertRaises(BadRequest): with self._call_fut(): self._fake_method(exc) def test_failure_cancelled(self): - from grpc import StatusCode - from grpc._channel import _RPCState from google.cloud.exceptions import GrpcRendezvous + from grpc import StatusCode - exc_state = _RPCState((), None, None, StatusCode.CANCELLED, None) - exc = GrpcRendezvous(exc_state, None, None, None) + exc = self._make_rendezvous(StatusCode.CANCELLED, None) with self.assertRaises(GrpcRendezvous): with self._call_fut(): self._fake_method(exc) @@ -85,6 +84,33 @@ def test_commit_failure_non_grpc_err(self): with self._call_fut(): self._fake_method(exc) + def test_gax_error(self): + from google.gax.errors import GaxError + from grpc import StatusCode + from google.cloud.exceptions import Forbidden + + # First, create low-level GrpcRendezvous exception. + details = 'Some error details.' + cause = self._make_rendezvous(StatusCode.PERMISSION_DENIED, details) + # Then put it into a high-level GaxError. + msg = 'GAX Error content.' + exc = GaxError(msg, cause=cause) + + with self.assertRaises(Forbidden): + with self._call_fut(): + self._fake_method(exc) + + def test_gax_error_not_mapped(self): + from google.gax.errors import GaxError + from grpc import StatusCode + + cause = self._make_rendezvous(StatusCode.CANCELLED, None) + exc = GaxError(None, cause=cause) + + with self.assertRaises(GaxError): + with self._call_fut(): + self._fake_method(exc) + class Test_DatastoreAPIOverGRPC(unittest.TestCase): @@ -228,45 +254,38 @@ def test_begin_transaction(self): stub.method_calls, [(request_pb, 'BeginTransaction')]) - def test_commit_success(self): - return_val = object() - stub = _GRPCStub(return_val) - datastore_api, _ = self._make_one(stub=stub) - request_pb = mock.Mock(project_id=None, spec=['project_id']) - project = 'PROJECT' - result = datastore_api.commit(project, request_pb) - self.assertIs(result, return_val) - self.assertEqual(request_pb.project_id, project) - self.assertEqual(stub.method_calls, - [(request_pb, 'Commit')]) +@unittest.skipUnless(_HAVE_GRPC, 'No gRPC') +class TestGAPICDatastoreAPI(unittest.TestCase): - def test_rollback(self): - return_val = object() - stub = _GRPCStub(return_val) - datastore_api, _ = self._make_one(stub=stub) + @staticmethod + def _get_target_class(): + from google.cloud.datastore._gax import GAPICDatastoreAPI - request_pb = mock.Mock(project_id=None, spec=['project_id']) - project = 'PROJECT' - result = datastore_api.rollback(project, request_pb) - self.assertIs(result, return_val) - self.assertEqual(request_pb.project_id, project) - self.assertEqual(stub.method_calls, - [(request_pb, 'Rollback')]) + return GAPICDatastoreAPI - def test_allocate_ids(self): - return_val = object() - stub = _GRPCStub(return_val) - datastore_api, _ = self._make_one(stub=stub) + def _make_one(self, *args, **kwargs): + return self._get_target_class()(*args, **kwargs) - request_pb = mock.Mock(project_id=None, spec=['project_id']) - project = 'PROJECT' - result = datastore_api.allocate_ids(project, request_pb) - self.assertIs(result, return_val) - self.assertEqual(request_pb.project_id, project) - self.assertEqual( - stub.method_calls, - [(request_pb, 'AllocateIds')]) + def test_commit(self): + from google.cloud.gapic.datastore.v1 import datastore_client + + patch1 = mock.patch.object( + datastore_client.DatastoreClient, '__init__', + return_value=None) + patch2 = mock.patch.object(datastore_client.DatastoreClient, 'commit') + patch3 = mock.patch( + 'google.cloud.datastore._gax._grpc_catch_rendezvous') + + with patch1 as mock_constructor: + ds_api = self._make_one() + mock_constructor.assert_called_once_with() + with patch2 as mock_commit: + with patch3 as mock_catch_rendezvous: + mock_catch_rendezvous.assert_not_called() + ds_api.commit(1, 2, a=3) + mock_commit.assert_called_once_with(1, 2, a=3) + mock_catch_rendezvous.assert_called_once_with() @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') @@ -278,12 +297,12 @@ def _call_fut(self, client): return make_datastore_api(client) @mock.patch( - 'google.cloud.gapic.datastore.v1.datastore_client.DatastoreClient', - SERVICE_ADDRESS='datastore.mock.mock', + 'google.cloud.datastore._gax.GAPICDatastoreAPI', return_value=mock.sentinel.ds_client) @mock.patch('google.cloud.datastore._gax.make_secure_channel', return_value=mock.sentinel.channel) def test_it(self, make_chan, mock_klass): + from google.cloud.gapic.datastore.v1 import datastore_client from google.cloud._http import DEFAULT_USER_AGENT from google.cloud.datastore import __version__ @@ -294,7 +313,7 @@ def test_it(self, make_chan, mock_klass): make_chan.assert_called_once_with( mock.sentinel.credentials, DEFAULT_USER_AGENT, - mock_klass.SERVICE_ADDRESS) + datastore_client.DatastoreClient.SERVICE_ADDRESS) mock_klass.assert_called_once_with( channel=mock.sentinel.channel, lib_name='gccl', lib_version=__version__) @@ -322,12 +341,3 @@ def RunQuery(self, request_pb): def BeginTransaction(self, request_pb): return self._method(request_pb, 'BeginTransaction') - - def Commit(self, request_pb): - return self._method(request_pb, 'Commit') - - def Rollback(self, request_pb): - return self._method(request_pb, 'Rollback') - - def AllocateIds(self, request_pb): - return self._method(request_pb, 'AllocateIds') diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index c9ee3f4eb1b4..482120c49969 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -662,6 +662,23 @@ def test_begin_transaction(self): # The RPC-over-HTTP request does not set the project in the request. self.assertEqual(request.project_id, u'') + +class TestHTTPDatastoreAPI(unittest.TestCase): + + @staticmethod + def _get_target_class(): + from google.cloud.datastore._http import HTTPDatastoreAPI + + return HTTPDatastoreAPI + + def _make_one(self, *args, **kwargs): + return self._get_target_class()(*args, **kwargs) + + def test_constructor(self): + client = object() + ds_api = self._make_one(client) + self.assertIs(ds_api.client, client) + def test_commit_wo_transaction(self): from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb @@ -675,19 +692,23 @@ def test_commit_wo_transaction(self): insert.key.CopyFrom(key_pb) value_pb = _new_value_pb(insert, 'foo') value_pb.string_value = u'Foo' + + # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) - conn = self._make_one(client) - uri = _build_expected_url(conn.api_base_url, project, 'commit') - result = conn.commit(project, req_pb, None) - self.assertEqual(result, rsp_pb) + # Make request. + rq_class = datastore_pb2.CommitRequest + ds_api = self._make_one(client) + mode = rq_class.NON_TRANSACTIONAL + result = ds_api.commit(project, mode, [mutation]) - # Verify the caller. + # Check the result and verify the callers. + self.assertEqual(result, rsp_pb) + uri = _build_expected_url(client._base_url, project, 'commit') cw = http._called_with _verify_protobuf_call(self, cw, uri) - rq_class = datastore_pb2.CommitRequest request = rq_class() request.ParseFromString(cw['body']) self.assertEqual(request.transaction, b'') @@ -707,42 +728,29 @@ def test_commit_w_transaction(self): insert.key.CopyFrom(key_pb) value_pb = _new_value_pb(insert, 'foo') value_pb.string_value = u'Foo' + + # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) - conn = self._make_one(client) - uri = _build_expected_url(conn.api_base_url, project, 'commit') - result = conn.commit(project, req_pb, b'xact') - self.assertEqual(result, rsp_pb) + # Make request. + rq_class = datastore_pb2.CommitRequest + ds_api = self._make_one(client) + mode = rq_class.TRANSACTIONAL + result = ds_api.commit(project, mode, [mutation], transaction=b'xact') - # Verify the caller. + # Check the result and verify the callers. + self.assertEqual(result, rsp_pb) + uri = _build_expected_url(client._base_url, project, 'commit') cw = http._called_with _verify_protobuf_call(self, cw, uri) - rq_class = datastore_pb2.CommitRequest request = rq_class() request.ParseFromString(cw['body']) self.assertEqual(request.transaction, b'xact') self.assertEqual(list(request.mutations), [mutation]) self.assertEqual(request.mode, rq_class.TRANSACTIONAL) - -class TestHTTPDatastoreAPI(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.datastore._http import HTTPDatastoreAPI - - return HTTPDatastoreAPI - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_constructor(self): - client = object() - ds_api = self._make_one(client) - self.assertIs(ds_api.client, client) - def test_rollback_ok(self): from google.cloud.proto.datastore.v1 import datastore_pb2 diff --git a/packages/google-cloud-datastore/unit_tests/test_batch.py b/packages/google-cloud-datastore/unit_tests/test_batch.py index db62d0436c64..df01a0ce2c7a 100644 --- a/packages/google-cloud-datastore/unit_tests/test_batch.py +++ b/packages/google-cloud-datastore/unit_tests/test_batch.py @@ -14,6 +14,8 @@ import unittest +import mock + class TestBatch(unittest.TestCase): @@ -27,28 +29,24 @@ def _make_one(self, client): return self._get_target_class()(client) def test_ctor(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 - - _PROJECT = 'PROJECT' - _NAMESPACE = 'NAMESPACE' - connection = _Connection() - client = _Client(_PROJECT, connection, _NAMESPACE) + project = 'PROJECT' + namespace = 'NAMESPACE' + client = _Client(project, namespace=namespace) batch = self._make_one(client) - self.assertEqual(batch.project, _PROJECT) + self.assertEqual(batch.project, project) self.assertIs(batch._client, client) - self.assertEqual(batch.namespace, _NAMESPACE) + self.assertEqual(batch.namespace, namespace) self.assertIsNone(batch._id) self.assertEqual(batch._status, batch._INITIAL) - self.assertIsInstance(batch._commit_request, - datastore_pb2.CommitRequest) - self.assertIs(batch.mutations, batch._commit_request.mutations) + self.assertEqual(batch._mutations, []) self.assertEqual(batch._partial_key_entities, []) def test_current(self): - _PROJECT = 'PROJECT' - connection = _Connection() - client = _Client(_PROJECT, connection) + from google.cloud.proto.datastore.v1 import datastore_pb2 + + project = 'PROJECT' + client = _Client(project) batch1 = self._make_one(client) batch2 = self._make_one(client) self.assertIsNone(batch1.current()) @@ -64,19 +62,22 @@ def test_current(self): self.assertIsNone(batch1.current()) self.assertIsNone(batch2.current()) + commit_method = client._datastore_api.commit + self.assertEqual(commit_method.call_count, 2) + mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL + commit_method.assert_called_with(project, mode, [], transaction=None) + def test_put_entity_wo_key(self): - _PROJECT = 'PROJECT' - connection = _Connection() - client = _Client(_PROJECT, connection) + project = 'PROJECT' + client = _Client(project) batch = self._make_one(client) batch.begin() self.assertRaises(ValueError, batch.put, _Entity()) def test_put_entity_wrong_status(self): - _PROJECT = 'PROJECT' - connection = _Connection() - client = _Client(_PROJECT, connection) + project = 'PROJECT' + client = _Client(project) batch = self._make_one(client) entity = _Entity() entity.key = _Key('OTHER') @@ -85,9 +86,8 @@ def test_put_entity_wrong_status(self): self.assertRaises(ValueError, batch.put, entity) def test_put_entity_w_key_wrong_project(self): - _PROJECT = 'PROJECT' - connection = _Connection() - client = _Client(_PROJECT, connection) + project = 'PROJECT' + client = _Client(project) batch = self._make_one(client) entity = _Entity() entity.key = _Key('OTHER') @@ -96,13 +96,12 @@ def test_put_entity_w_key_wrong_project(self): self.assertRaises(ValueError, batch.put, entity) def test_put_entity_w_partial_key(self): - _PROJECT = 'PROJECT' - _PROPERTIES = {'foo': 'bar'} - connection = _Connection() - client = _Client(_PROJECT, connection) + project = 'PROJECT' + properties = {'foo': 'bar'} + client = _Client(project) batch = self._make_one(client) - entity = _Entity(_PROPERTIES) - key = entity.key = _Key(_PROJECT) + entity = _Entity(properties) + key = entity.key = _Key(project) key._id = None batch.begin() @@ -115,19 +114,18 @@ def test_put_entity_w_partial_key(self): def test_put_entity_w_completed_key(self): from google.cloud.datastore.helpers import _property_tuples - _PROJECT = 'PROJECT' - _PROPERTIES = { + project = 'PROJECT' + properties = { 'foo': 'bar', 'baz': 'qux', 'spam': [1, 2, 3], 'frotz': [], # will be ignored } - connection = _Connection() - client = _Client(_PROJECT, connection) + client = _Client(project) batch = self._make_one(client) - entity = _Entity(_PROPERTIES) + entity = _Entity(properties) entity.exclude_from_indexes = ('baz', 'spam') - key = entity.key = _Key(_PROJECT) + key = entity.key = _Key(project) batch.begin() batch.put(entity) @@ -147,31 +145,28 @@ def test_put_entity_w_completed_key(self): self.assertFalse('frotz' in prop_dict) def test_delete_wrong_status(self): - _PROJECT = 'PROJECT' - connection = _Connection() - client = _Client(_PROJECT, connection) + project = 'PROJECT' + client = _Client(project) batch = self._make_one(client) - key = _Key(_PROJECT) + key = _Key(project) key._id = None self.assertEqual(batch._status, batch._INITIAL) self.assertRaises(ValueError, batch.delete, key) def test_delete_w_partial_key(self): - _PROJECT = 'PROJECT' - connection = _Connection() - client = _Client(_PROJECT, connection) + project = 'PROJECT' + client = _Client(project) batch = self._make_one(client) - key = _Key(_PROJECT) + key = _Key(project) key._id = None batch.begin() self.assertRaises(ValueError, batch.delete, key) def test_delete_w_key_wrong_project(self): - _PROJECT = 'PROJECT' - connection = _Connection() - client = _Client(_PROJECT, connection) + project = 'PROJECT' + client = _Client(project) batch = self._make_one(client) key = _Key('OTHER') @@ -179,11 +174,10 @@ def test_delete_w_key_wrong_project(self): self.assertRaises(ValueError, batch.delete, key) def test_delete_w_completed_key(self): - _PROJECT = 'PROJECT' - connection = _Connection() - client = _Client(_PROJECT, connection) + project = 'PROJECT' + client = _Client(project) batch = self._make_one(client) - key = _Key(_PROJECT) + key = _Key(project) batch.begin() batch.delete(key) @@ -192,24 +186,24 @@ def test_delete_w_completed_key(self): self.assertEqual(mutated_key, key._key) def test_begin(self): - _PROJECT = 'PROJECT' - client = _Client(_PROJECT, None) + project = 'PROJECT' + client = _Client(project, None) batch = self._make_one(client) self.assertEqual(batch._status, batch._INITIAL) batch.begin() self.assertEqual(batch._status, batch._IN_PROGRESS) def test_begin_fail(self): - _PROJECT = 'PROJECT' - client = _Client(_PROJECT, None) + project = 'PROJECT' + client = _Client(project, None) batch = self._make_one(client) batch._status = batch._IN_PROGRESS with self.assertRaises(ValueError): batch.begin() def test_rollback(self): - _PROJECT = 'PROJECT' - client = _Client(_PROJECT, None) + project = 'PROJECT' + client = _Client(project, None) batch = self._make_one(client) batch.begin() self.assertEqual(batch._status, batch._IN_PROGRESS) @@ -217,17 +211,18 @@ def test_rollback(self): self.assertEqual(batch._status, batch._ABORTED) def test_rollback_wrong_status(self): - _PROJECT = 'PROJECT' - client = _Client(_PROJECT, None) + project = 'PROJECT' + client = _Client(project, None) batch = self._make_one(client) self.assertEqual(batch._status, batch._INITIAL) self.assertRaises(ValueError, batch.rollback) def test_commit(self): - _PROJECT = 'PROJECT' - connection = _Connection() - client = _Client(_PROJECT, connection) + from google.cloud.proto.datastore.v1 import datastore_pb2 + + project = 'PROJECT' + client = _Client(project) batch = self._make_one(client) self.assertEqual(batch._status, batch._INITIAL) @@ -236,23 +231,25 @@ def test_commit(self): batch.commit() self.assertEqual(batch._status, batch._FINISHED) - self.assertEqual(connection._committed, - [(_PROJECT, batch._commit_request, None)]) + commit_method = client._datastore_api.commit + mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL + commit_method.assert_called_with(project, mode, [], transaction=None) def test_commit_wrong_status(self): - _PROJECT = 'PROJECT' - connection = _Connection() - client = _Client(_PROJECT, connection) + project = 'PROJECT' + client = _Client(project) batch = self._make_one(client) self.assertEqual(batch._status, batch._INITIAL) self.assertRaises(ValueError, batch.commit) def test_commit_w_partial_key_entities(self): + from google.cloud.proto.datastore.v1 import datastore_pb2 + project = 'PROJECT' new_id = 1234 - connection = _Connection(new_id) - client = _Client(project, connection) + ds_api = _make_datastore_api(new_id) + client = _Client(project, datastore_api=ds_api) batch = self._make_one(client) entity = _Entity({}) key = entity.key = _Key(project) @@ -265,19 +262,21 @@ def test_commit_w_partial_key_entities(self): batch.commit() self.assertEqual(batch._status, batch._FINISHED) - self.assertEqual(connection._committed, - [(project, batch._commit_request, None)]) + mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL + ds_api.commit.assert_called_once_with( + project, mode, [], transaction=None) self.assertFalse(entity.key.is_partial) self.assertEqual(entity.key._id, new_id) def test_as_context_mgr_wo_error(self): - _PROJECT = 'PROJECT' - _PROPERTIES = {'foo': 'bar'} - connection = _Connection() - entity = _Entity(_PROPERTIES) - key = entity.key = _Key(_PROJECT) + from google.cloud.proto.datastore.v1 import datastore_pb2 + + project = 'PROJECT' + properties = {'foo': 'bar'} + entity = _Entity(properties) + key = entity.key = _Key(project) - client = _Client(_PROJECT, connection) + client = _Client(project) self.assertEqual(list(client._batches), []) with self._make_one(client) as batch: @@ -288,19 +287,22 @@ def test_as_context_mgr_wo_error(self): mutated_entity = _mutated_pb(self, batch.mutations, 'upsert') self.assertEqual(mutated_entity.key, key._key) - self.assertEqual(connection._committed, - [(_PROJECT, batch._commit_request, None)]) + commit_method = client._datastore_api.commit + mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL + commit_method.assert_called_with( + project, mode, batch.mutations, transaction=None) def test_as_context_mgr_nested(self): - _PROJECT = 'PROJECT' - _PROPERTIES = {'foo': 'bar'} - connection = _Connection() - entity1 = _Entity(_PROPERTIES) - key1 = entity1.key = _Key(_PROJECT) - entity2 = _Entity(_PROPERTIES) - key2 = entity2.key = _Key(_PROJECT) - - client = _Client(_PROJECT, connection) + from google.cloud.proto.datastore.v1 import datastore_pb2 + + project = 'PROJECT' + properties = {'foo': 'bar'} + entity1 = _Entity(properties) + key1 = entity1.key = _Key(project) + entity2 = _Entity(properties) + key2 = entity2.key = _Key(project) + + client = _Client(project) self.assertEqual(list(client._batches), []) with self._make_one(client) as batch1: @@ -320,18 +322,21 @@ def test_as_context_mgr_nested(self): mutated_entity2 = _mutated_pb(self, batch2.mutations, 'upsert') self.assertEqual(mutated_entity2.key, key2._key) - self.assertEqual(connection._committed, - [(_PROJECT, batch2._commit_request, None), - (_PROJECT, batch1._commit_request, None)]) + commit_method = client._datastore_api.commit + self.assertEqual(commit_method.call_count, 2) + mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL + commit_method.assert_called_with( + project, mode, batch1.mutations, transaction=None) + commit_method.assert_called_with( + project, mode, batch2.mutations, transaction=None) def test_as_context_mgr_w_error(self): - _PROJECT = 'PROJECT' - _PROPERTIES = {'foo': 'bar'} - connection = _Connection() - entity = _Entity(_PROPERTIES) - key = entity.key = _Key(_PROJECT) + project = 'PROJECT' + properties = {'foo': 'bar'} + entity = _Entity(properties) + key = entity.key = _Key(project) - client = _Client(_PROJECT, connection) + client = _Client(project) self.assertEqual(list(client._batches), []) try: @@ -346,7 +351,6 @@ def test_as_context_mgr_w_error(self): mutated_entity = _mutated_pb(self, batch.mutations, 'upsert') self.assertEqual(mutated_entity.key, key._key) - self.assertEqual(connection._committed, []) def test_as_context_mgr_enter_fails(self): klass = self._get_target_class() @@ -409,24 +413,6 @@ def test_it(self): self.assertEqual(result, (index_updates, keys)) -class _Connection(object): - _marker = object() - _save_result = (False, None) - - def __init__(self, *new_key_ids): - from google.cloud.proto.datastore.v1 import datastore_pb2 - - self._committed = [] - mutation_results = [ - _make_mutation(key_id) for key_id in new_key_ids] - self._commit_response_pb = datastore_pb2.CommitResponse( - mutation_results=mutation_results) - - def commit(self, project, commit_request, transaction_id): - self._committed.append((project, commit_request, transaction_id)) - return self._commit_response_pb - - class _Entity(dict): key = None exclude_from_indexes = () @@ -434,7 +420,6 @@ class _Entity(dict): class _Key(object): - _MARKER = object() _kind = 'KIND' _key = 'KEY' _path = None @@ -471,9 +456,11 @@ def completed_key(self, new_id): class _Client(object): - def __init__(self, project, connection, namespace=None): + def __init__(self, project, datastore_api=None, namespace=None): self.project = project - self._connection = connection + if datastore_api is None: + datastore_api = _make_datastore_api() + self._datastore_api = datastore_api self.namespace = namespace self._batches = [] @@ -516,3 +503,17 @@ def _make_mutation(id_): elem.kind = 'Kind' elem.id = id_ return datastore_pb2.MutationResult(key=key) + + +def _make_commit_response(*new_key_ids): + from google.cloud.proto.datastore.v1 import datastore_pb2 + + mutation_results = [ + _make_mutation(key_id) for key_id in new_key_ids] + return datastore_pb2.CommitResponse(mutation_results=mutation_results) + + +def _make_datastore_api(*new_key_ids): + commit_method = mock.Mock( + return_value=_make_commit_response(*new_key_ids), spec=[]) + return mock.Mock(commit=commit_method, spec=['commit']) diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index 637668a30f6a..fdc7394af419 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -645,6 +645,7 @@ def test_put_multi_w_single_empty_entity(self): self.assertRaises(ValueError, client.put_multi, Entity()) def test_put_multi_no_batch_w_partial_key(self): + from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.datastore.helpers import _property_tuples entity = _Entity(foo=u'bar') @@ -654,17 +655,23 @@ def test_put_multi_no_batch_w_partial_key(self): creds = _make_credentials() client = self._make_one(credentials=creds) key_pb = _make_key(234) - client._connection._commit.append([key_pb]) + ds_api = _make_datastore_api(key_pb) + client._datastore_api_internal = ds_api result = client.put_multi([entity]) self.assertIsNone(result) - self.assertEqual(len(client._connection._commit_cw), 1) - (project, - commit_req, transaction_id) = client._connection._commit_cw[0] - self.assertEqual(project, self.PROJECT) + self.assertEqual(ds_api.commit.call_count, 1) + _, positional, keyword = ds_api.commit.mock_calls[0] + self.assertEqual(keyword, {'transaction': None}) + + self.assertEqual(len(positional), 3) + self.assertEqual(positional[0], self.PROJECT) + self.assertEqual( + positional[1], datastore_pb2.CommitRequest.NON_TRANSACTIONAL) - mutated_entity = _mutated_pb(self, commit_req.mutations, 'insert') + mutations = positional[2] + mutated_entity = _mutated_pb(self, mutations, 'insert') self.assertEqual(mutated_entity.key, key.to_protobuf()) prop_list = list(_property_tuples(mutated_entity)) @@ -673,8 +680,6 @@ def test_put_multi_no_batch_w_partial_key(self): self.assertEqual(name, 'foo') self.assertEqual(value_pb.string_value, u'bar') - self.assertIsNone(transaction_id) - def test_put_multi_existing_batch_w_completed_key(self): from google.cloud.datastore.helpers import _property_tuples @@ -715,31 +720,43 @@ def _delete_multi(*args, **kw): def test_delete_multi_no_keys(self): creds = _make_credentials() client = self._make_one(credentials=creds) + client._datastore_api_internal = _make_datastore_api() + result = client.delete_multi([]) self.assertIsNone(result) - self.assertEqual(len(client._connection._commit_cw), 0) + client._datastore_api_internal.commit.assert_not_called() def test_delete_multi_no_batch(self): + from google.cloud.proto.datastore.v1 import datastore_pb2 + key = _Key(self.PROJECT) creds = _make_credentials() client = self._make_one(credentials=creds) - client._connection._commit.append([]) + ds_api = _make_datastore_api() + client._datastore_api_internal = ds_api result = client.delete_multi([key]) self.assertIsNone(result) - self.assertEqual(len(client._connection._commit_cw), 1) - (project, - commit_req, transaction_id) = client._connection._commit_cw[0] - self.assertEqual(project, self.PROJECT) - mutated_key = _mutated_pb(self, commit_req.mutations, 'delete') + self.assertEqual(ds_api.commit.call_count, 1) + _, positional, keyword = ds_api.commit.mock_calls[0] + self.assertEqual(keyword, {'transaction': None}) + + self.assertEqual(len(positional), 3) + self.assertEqual(positional[0], self.PROJECT) + self.assertEqual( + positional[1], datastore_pb2.CommitRequest.NON_TRANSACTIONAL) + + mutations = positional[2] + mutated_key = _mutated_pb(self, mutations, 'delete') self.assertEqual(mutated_key, key.to_protobuf()) - self.assertIsNone(transaction_id) def test_delete_multi_w_existing_batch(self): creds = _make_credentials() client = self._make_one(credentials=creds) + client._datastore_api_internal = _make_datastore_api() + key = _Key(self.PROJECT) with _NoCommitBatch(client) as CURR_BATCH: @@ -748,11 +765,13 @@ def test_delete_multi_w_existing_batch(self): self.assertIsNone(result) mutated_key = _mutated_pb(self, CURR_BATCH.mutations, 'delete') self.assertEqual(mutated_key, key._key) - self.assertEqual(len(client._connection._commit_cw), 0) + client._datastore_api_internal.commit.assert_not_called() def test_delete_multi_w_existing_transaction(self): creds = _make_credentials() client = self._make_one(credentials=creds) + client._datastore_api_internal = _make_datastore_api() + key = _Key(self.PROJECT) with _NoCommitTransaction(client) as CURR_XACT: @@ -761,7 +780,7 @@ def test_delete_multi_w_existing_transaction(self): self.assertIsNone(result) mutated_key = _mutated_pb(self, CURR_XACT.mutations, 'delete') self.assertEqual(mutated_key, key._key) - self.assertEqual(len(client._connection._commit_cw), 0) + client._datastore_api_internal.commit.assert_not_called() def test_allocate_ids_w_partial_key(self): num_ids = 2 @@ -975,8 +994,6 @@ def __init__(self, credentials=None, http=None): self.http = http self._lookup_cw = [] self._lookup = [] - self._commit_cw = [] - self._commit = [] def _add_lookup_result(self, results=(), missing=(), deferred=()): self._lookup.append((list(results), list(missing), list(deferred))) @@ -997,15 +1014,6 @@ def lookup(self, project, key_pbs, eventual=False, transaction_id=None): deferred=deferred, spec=['found', 'missing', 'deferred']) - def commit(self, project, commit_request, transaction_id): - from google.cloud.proto.datastore.v1 import datastore_pb2 - - self._commit_cw.append((project, commit_request, transaction_id)) - keys, self._commit = self._commit[0], self._commit[1:] - mutation_results = [ - datastore_pb2.MutationResult(key=key) for key in keys] - return datastore_pb2.CommitResponse(mutation_results=mutation_results) - class _NoCommitBatch(object): @@ -1121,3 +1129,17 @@ def _make_key(id_): elem = key.path.add() elem.id = id_ return key + + +def _make_commit_response(*keys): + from google.cloud.proto.datastore.v1 import datastore_pb2 + + mutation_results = [ + datastore_pb2.MutationResult(key=key) for key in keys] + return datastore_pb2.CommitResponse(mutation_results=mutation_results) + + +def _make_datastore_api(*keys): + commit_method = mock.Mock( + return_value=_make_commit_response(*keys), spec=[]) + return mock.Mock(commit=commit_method, spec=['commit']) diff --git a/packages/google-cloud-datastore/unit_tests/test_transaction.py b/packages/google-cloud-datastore/unit_tests/test_transaction.py index c122fb38b11f..210f9d71cdd7 100644 --- a/packages/google-cloud-datastore/unit_tests/test_transaction.py +++ b/packages/google-cloud-datastore/unit_tests/test_transaction.py @@ -29,25 +29,24 @@ def _make_one(self, client, **kw): return self._get_target_class()(client, **kw) def test_ctor_defaults(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 - - _PROJECT = 'PROJECT' + project = 'PROJECT' connection = _Connection() - client = _Client(_PROJECT, connection) + client = _Client(project, connection) xact = self._make_one(client) - self.assertEqual(xact.project, _PROJECT) + self.assertEqual(xact.project, project) self.assertIs(xact._client, client) self.assertIsNone(xact.id) self.assertEqual(xact._status, self._get_target_class()._INITIAL) - self.assertIsInstance(xact._commit_request, - datastore_pb2.CommitRequest) - self.assertIs(xact.mutations, xact._commit_request.mutations) + self.assertEqual(xact._mutations, []) self.assertEqual(len(xact._partial_key_entities), 0) def test_current(self): - _PROJECT = 'PROJECT' - connection = _Connection() - client = _Client(_PROJECT, connection) + from google.cloud.proto.datastore.v1 import datastore_pb2 + + project = 'PROJECT' + id_ = 678 + connection = _Connection(id_) + client = _Client(project, connection) xact1 = self._make_one(client) xact2 = self._make_one(client) self.assertIsNone(xact1.current()) @@ -69,36 +68,41 @@ def test_current(self): self.assertIsNone(xact1.current()) self.assertIsNone(xact2.current()) + client._datastore_api.rollback.assert_not_called() + commit_method = client._datastore_api.commit + self.assertEqual(commit_method.call_count, 2) + mode = datastore_pb2.CommitRequest.TRANSACTIONAL + commit_method.assert_called_with(project, mode, [], transaction=id_) + def test_begin(self): - _PROJECT = 'PROJECT' + project = 'PROJECT' connection = _Connection(234) - client = _Client(_PROJECT, connection) + client = _Client(project, connection) xact = self._make_one(client) xact.begin() self.assertEqual(xact.id, 234) - self.assertEqual(connection._begun, _PROJECT) + self.assertEqual(connection._begun, project) def test_begin_tombstoned(self): project = 'PROJECT' id_ = 234 connection = _Connection(id_) - ds_api = mock.Mock(spec=['rollback']) - client = _Client(project, connection, datastore_api=ds_api) + client = _Client(project, connection) xact = self._make_one(client) xact.begin() self.assertEqual(xact.id, id_) self.assertEqual(connection._begun, project) xact.rollback() - ds_api.rollback.assert_called_once_with(project, id_) + client._datastore_api.rollback.assert_called_once_with(project, id_) self.assertIsNone(xact.id) self.assertRaises(ValueError, xact.begin) def test_begin_w_begin_transaction_failure(self): - _PROJECT = 'PROJECT' + project = 'PROJECT' connection = _Connection(234) - client = _Client(_PROJECT, connection) + client = _Client(project, connection) xact = self._make_one(client) connection._side_effect = RuntimeError @@ -106,61 +110,74 @@ def test_begin_w_begin_transaction_failure(self): xact.begin() self.assertIsNone(xact.id) - self.assertEqual(connection._begun, _PROJECT) + self.assertEqual(connection._begun, project) def test_rollback(self): project = 'PROJECT' id_ = 234 connection = _Connection(id_) - ds_api = mock.Mock(spec=['rollback']) - client = _Client(project, connection, datastore_api=ds_api) + client = _Client(project, connection) xact = self._make_one(client) xact.begin() xact.rollback() - ds_api.rollback.assert_called_once_with(project, id_) + client._datastore_api.rollback.assert_called_once_with(project, id_) self.assertIsNone(xact.id) def test_commit_no_partial_keys(self): - _PROJECT = 'PROJECT' - connection = _Connection(234) - client = _Client(_PROJECT, connection) + from google.cloud.proto.datastore.v1 import datastore_pb2 + + project = 'PROJECT' + id_ = 234 + connection = _Connection(id_) + + client = _Client(project, connection) xact = self._make_one(client) - xact._commit_request = commit_request = object() xact.begin() xact.commit() - self.assertEqual(connection._committed, - (_PROJECT, commit_request, 234)) + + mode = datastore_pb2.CommitRequest.TRANSACTIONAL + client._datastore_api.commit.assert_called_once_with( + project, mode, [], transaction=id_) self.assertIsNone(xact.id) def test_commit_w_partial_keys(self): + from google.cloud.proto.datastore.v1 import datastore_pb2 + project = 'PROJECT' kind = 'KIND' - id_ = 123 - key = _make_key(kind, id_, project) - connection = _Connection(234, keys=[key]) - client = _Client(project, connection) + id1 = 123 + key = _make_key(kind, id1, project) + ds_api = _make_datastore_api(key) + id2 = 234 + connection = _Connection(id2) + client = _Client(project, connection, datastore_api=ds_api) xact = self._make_one(client) xact.begin() entity = _Entity() xact.put(entity) - xact._commit_request = commit_request = object() xact.commit() - self.assertEqual(connection._committed, - (project, commit_request, 234)) + + mode = datastore_pb2.CommitRequest.TRANSACTIONAL + ds_api.commit.assert_called_once_with( + project, mode, xact.mutations, transaction=id2) self.assertIsNone(xact.id) - self.assertEqual(entity.key.path, [{'kind': kind, 'id': id_}]) + self.assertEqual(entity.key.path, [{'kind': kind, 'id': id1}]) def test_context_manager_no_raise(self): - _PROJECT = 'PROJECT' - connection = _Connection(234) - client = _Client(_PROJECT, connection) + from google.cloud.proto.datastore.v1 import datastore_pb2 + + project = 'PROJECT' + id_ = 234 + connection = _Connection(id_) + client = _Client(project, connection) xact = self._make_one(client) - xact._commit_request = commit_request = object() with xact: - self.assertEqual(xact.id, 234) - self.assertEqual(connection._begun, _PROJECT) - self.assertEqual(connection._committed, - (_PROJECT, commit_request, 234)) + self.assertEqual(xact.id, id_) + self.assertEqual(connection._begun, project) + + mode = datastore_pb2.CommitRequest.TRANSACTIONAL + client._datastore_api.commit.assert_called_once_with( + project, mode, [], transaction=id_) self.assertIsNone(xact.id) def test_context_manager_w_raise(self): @@ -171,8 +188,7 @@ class Foo(Exception): project = 'PROJECT' id_ = 234 connection = _Connection(id_) - ds_api = mock.Mock(spec=['rollback']) - client = _Client(project, connection, datastore_api=ds_api) + client = _Client(project, connection) xact = self._make_one(client) xact._mutation = object() try: @@ -182,8 +198,10 @@ class Foo(Exception): raise Foo() except Foo: self.assertIsNone(xact.id) - ds_api.rollback.assert_called_once_with(project, id_) - self.assertIsNone(connection._committed) + client._datastore_api.rollback.assert_called_once_with( + project, id_) + + client._datastore_api.commit.assert_not_called() self.assertIsNone(xact.id) @@ -199,19 +217,11 @@ def _make_key(kind, id_, project): class _Connection(object): - _marker = object() _begun = None - _committed = None _side_effect = None - def __init__(self, xact_id=123, keys=()): - from google.cloud.proto.datastore.v1 import datastore_pb2 - + def __init__(self, xact_id=123): self._xact_id = xact_id - mutation_results = [ - datastore_pb2.MutationResult(key=key) for key in keys] - self._commit_response_pb = datastore_pb2.CommitResponse( - mutation_results=mutation_results) def begin_transaction(self, project): self._begun = project @@ -221,10 +231,6 @@ def begin_transaction(self, project): else: raise self._side_effect - def commit(self, project, commit_request, transaction_id): - self._committed = (project, commit_request, transaction_id) - return self._commit_response_pb - class _Entity(dict): @@ -241,6 +247,8 @@ def __init__(self, project, connection, datastore_api=None, namespace=None): self.project = project self._connection = connection + if datastore_api is None: + datastore_api = _make_datastore_api() self._datastore_api = datastore_api self.namespace = namespace self._batches = [] @@ -270,3 +278,17 @@ def __enter__(self): def __exit__(self, *args): self._client._pop_batch() + + +def _make_commit_response(*keys): + from google.cloud.proto.datastore.v1 import datastore_pb2 + + mutation_results = [ + datastore_pb2.MutationResult(key=key) for key in keys] + return datastore_pb2.CommitResponse(mutation_results=mutation_results) + + +def _make_datastore_api(*keys): + commit_method = mock.Mock( + return_value=_make_commit_response(*keys), spec=[]) + return mock.Mock(commit=commit_method, spec=['commit', 'rollback']) From bf7e08b090fce8a25b6be16c45308f230da231f0 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 9 Mar 2017 14:36:59 -0800 Subject: [PATCH 096/611] Using six.reraise to preserve traceback (Python 3 feature backport). --- .../google-cloud-datastore/google/cloud/datastore/_gax.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py index c88614f19bf2..7475340dcbe5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py @@ -16,6 +16,7 @@ import contextlib +import sys from google.cloud.gapic.datastore.v1 import datastore_client from google.cloud.proto.datastore.v1 import datastore_pb2_grpc @@ -79,14 +80,16 @@ def _grpc_catch_rendezvous(): if error_class is None: raise else: - raise error_class(exc.cause.details()) + new_exc = error_class(exc.cause.details()) + six.reraise(error_class, new_exc, sys.exc_info()[2]) except exceptions.GrpcRendezvous as exc: error_code = exc.code() error_class = _GRPC_ERROR_MAPPING.get(error_code) if error_class is None: raise else: - raise error_class(exc.details()) + new_exc = error_class(exc.details()) + six.reraise(error_class, new_exc, sys.exc_info()[2]) class _DatastoreAPIOverGRPC(object): From 473e8d5901238c192c5a71d81be047dfe6c5b195 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 10 Mar 2017 12:00:20 -0800 Subject: [PATCH 097/611] Using GAPIC datastore object (and an HTTP equivalent) for begin_transaction. --- .../google/cloud/datastore/_gax.py | 18 --- .../google/cloud/datastore/_http.py | 47 +++----- .../google/cloud/datastore/transaction.py | 2 +- .../unit_tests/test__gax.py | 17 --- .../unit_tests/test__http.py | 40 +++---- .../unit_tests/test_transaction.py | 112 +++++++++--------- 6 files changed, 91 insertions(+), 145 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py index 7475340dcbe5..8037e7ff78dd 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py @@ -152,24 +152,6 @@ def run_query(self, project, request_pb): with _grpc_catch_rendezvous(): return self._stub.RunQuery(request_pb) - def begin_transaction(self, project, request_pb): - """Perform a ``beginTransaction`` request. - - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. - - :type request_pb: - :class:`.datastore_pb2.BeginTransactionRequest` - :param request_pb: The request protobuf object. - - :rtype: :class:`.datastore_pb2.BeginTransactionResponse` - :returns: The returned protobuf response object. - """ - request_pb.project_id = project - with _grpc_catch_rendezvous(): - return self._stub.BeginTransaction(request_pb) - class GAPICDatastoreAPI(datastore_client.DatastoreClient): """An API object that sends proto-over-gRPC requests. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index c3c1f0bcda36..62499c8fcffc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -198,24 +198,6 @@ def run_query(self, project, request_pb): self.connection.api_base_url, request_pb, _datastore_pb2.RunQueryResponse) - def begin_transaction(self, project, request_pb): - """Perform a ``beginTransaction`` request. - - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. - - :type request_pb: - :class:`.datastore_pb2.BeginTransactionRequest` - :param request_pb: The request protobuf object. - - :rtype: :class:`.datastore_pb2.BeginTransactionResponse` - :returns: The returned protobuf response object. - """ - return _rpc(self.connection.http, project, 'beginTransaction', - self.connection.api_base_url, - request_pb, _datastore_pb2.BeginTransactionResponse) - class Connection(connection_module.Connection): """A connection to the Google Cloud Datastore via the Protobuf API. @@ -335,20 +317,6 @@ def run_query(self, project, query_pb, namespace=None, request.query.CopyFrom(query_pb) return self._datastore_api.run_query(project, request) - def begin_transaction(self, project): - """Begin a transaction. - - Maps the ``DatastoreService.BeginTransaction`` protobuf RPC. - - :type project: str - :param project: The project to which the transaction applies. - - :rtype: :class:`.datastore_pb2.BeginTransactionResponse` - :returns: The serialized transaction that was begun. - """ - request = _datastore_pb2.BeginTransactionRequest() - return self._datastore_api.begin_transaction(project, request) - class HTTPDatastoreAPI(object): """An API object that sends proto-over-HTTP requests. @@ -362,6 +330,21 @@ class HTTPDatastoreAPI(object): def __init__(self, client): self.client = client + def begin_transaction(self, project): + """Perform a ``beginTransaction`` request. + + :type project: str + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :rtype: :class:`.datastore_pb2.BeginTransactionResponse` + :returns: The returned protobuf response object. + """ + request_pb = _datastore_pb2.BeginTransactionRequest() + return _rpc(self.client._http, project, 'beginTransaction', + self.client._base_url, + request_pb, _datastore_pb2.BeginTransactionResponse) + def commit(self, project, mode, mutations, transaction=None): """Perform a ``commit`` request. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index 651bbfcd0ba3..c1cd6a01321a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -184,7 +184,7 @@ def begin(self): """ super(Transaction, self).begin() try: - response_pb = self._client._connection.begin_transaction( + response_pb = self._client._datastore_api.begin_transaction( self.project) self._id = response_pb.transaction except: # noqa: E722 do not use bare except, specify exception instead diff --git a/packages/google-cloud-datastore/unit_tests/test__gax.py b/packages/google-cloud-datastore/unit_tests/test__gax.py index ffc1bad756fd..0061ea106df9 100644 --- a/packages/google-cloud-datastore/unit_tests/test__gax.py +++ b/packages/google-cloud-datastore/unit_tests/test__gax.py @@ -240,20 +240,6 @@ def test_run_query_invalid_argument(self): exc = GrpcRendezvous(exc_state, None, None, None) self._run_query_failure_helper(exc, BadRequest) - def test_begin_transaction(self): - return_val = object() - stub = _GRPCStub(return_val) - datastore_api, _ = self._make_one(stub=stub) - - request_pb = mock.Mock(project_id=None, spec=['project_id']) - project = 'PROJECT' - result = datastore_api.begin_transaction(project, request_pb) - self.assertIs(result, return_val) - self.assertEqual(request_pb.project_id, project) - self.assertEqual( - stub.method_calls, - [(request_pb, 'BeginTransaction')]) - @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') class TestGAPICDatastoreAPI(unittest.TestCase): @@ -338,6 +324,3 @@ def Lookup(self, request_pb): def RunQuery(self, request_pb): return self._method(request_pb, 'RunQuery') - - def BeginTransaction(self, request_pb): - return self._method(request_pb, 'BeginTransaction') diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 482120c49969..67262584f67d 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -634,6 +634,23 @@ def test_run_query_w_namespace_nonempty_result(self): self.assertEqual(request.partition_id.namespace_id, namespace) self.assertEqual(request.query, q_pb) + +class TestHTTPDatastoreAPI(unittest.TestCase): + + @staticmethod + def _get_target_class(): + from google.cloud.datastore._http import HTTPDatastoreAPI + + return HTTPDatastoreAPI + + def _make_one(self, *args, **kwargs): + return self._get_target_class()(*args, **kwargs) + + def test_constructor(self): + client = object() + ds_api = self._make_one(client) + self.assertIs(ds_api.client, client) + def test_begin_transaction(self): from google.cloud.proto.datastore.v1 import datastore_pb2 @@ -648,13 +665,13 @@ def test_begin_transaction(self): _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. - conn = self._make_one(client) - response = conn.begin_transaction(project) + ds_api = self._make_one(client) + response = ds_api.begin_transaction(project) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) uri = _build_expected_url( - conn.api_base_url, project, 'beginTransaction') + client._base_url, project, 'beginTransaction') cw = http._called_with _verify_protobuf_call(self, cw, uri) request = datastore_pb2.BeginTransactionRequest() @@ -662,23 +679,6 @@ def test_begin_transaction(self): # The RPC-over-HTTP request does not set the project in the request. self.assertEqual(request.project_id, u'') - -class TestHTTPDatastoreAPI(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.datastore._http import HTTPDatastoreAPI - - return HTTPDatastoreAPI - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_constructor(self): - client = object() - ds_api = self._make_one(client) - self.assertIs(ds_api.client, client) - def test_commit_wo_transaction(self): from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb diff --git a/packages/google-cloud-datastore/unit_tests/test_transaction.py b/packages/google-cloud-datastore/unit_tests/test_transaction.py index 210f9d71cdd7..a9a4194c7dca 100644 --- a/packages/google-cloud-datastore/unit_tests/test_transaction.py +++ b/packages/google-cloud-datastore/unit_tests/test_transaction.py @@ -30,8 +30,7 @@ def _make_one(self, client, **kw): def test_ctor_defaults(self): project = 'PROJECT' - connection = _Connection() - client = _Client(project, connection) + client = _Client(project) xact = self._make_one(client) self.assertEqual(xact.project, project) self.assertIs(xact._client, client) @@ -45,8 +44,8 @@ def test_current(self): project = 'PROJECT' id_ = 678 - connection = _Connection(id_) - client = _Client(project, connection) + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) xact1 = self._make_one(client) xact2 = self._make_one(client) self.assertIsNone(xact1.current()) @@ -68,30 +67,35 @@ def test_current(self): self.assertIsNone(xact1.current()) self.assertIsNone(xact2.current()) - client._datastore_api.rollback.assert_not_called() - commit_method = client._datastore_api.commit + ds_api.rollback.assert_not_called() + commit_method = ds_api.commit self.assertEqual(commit_method.call_count, 2) mode = datastore_pb2.CommitRequest.TRANSACTIONAL commit_method.assert_called_with(project, mode, [], transaction=id_) + begin_txn = ds_api.begin_transaction + self.assertEqual(begin_txn.call_count, 2) + begin_txn.assert_called_with(project) + def test_begin(self): project = 'PROJECT' - connection = _Connection(234) - client = _Client(project, connection) + id_ = 889 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) xact = self._make_one(client) xact.begin() - self.assertEqual(xact.id, 234) - self.assertEqual(connection._begun, project) + self.assertEqual(xact.id, id_) + ds_api.begin_transaction.assert_called_once_with(project) def test_begin_tombstoned(self): project = 'PROJECT' - id_ = 234 - connection = _Connection(id_) - client = _Client(project, connection) + id_ = 1094 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) xact = self._make_one(client) xact.begin() self.assertEqual(xact.id, id_) - self.assertEqual(connection._begun, project) + ds_api.begin_transaction.assert_called_once_with(project) xact.rollback() client._datastore_api.rollback.assert_called_once_with(project, id_) @@ -101,36 +105,37 @@ def test_begin_tombstoned(self): def test_begin_w_begin_transaction_failure(self): project = 'PROJECT' - connection = _Connection(234) - client = _Client(project, connection) + id_ = 712 + ds_api = _make_datastore_api(xact_id=id_) + ds_api.begin_transaction = mock.Mock(side_effect=RuntimeError, spec=[]) + client = _Client(project, datastore_api=ds_api) xact = self._make_one(client) - connection._side_effect = RuntimeError with self.assertRaises(RuntimeError): xact.begin() self.assertIsNone(xact.id) - self.assertEqual(connection._begun, project) + ds_api.begin_transaction.assert_called_once_with(project) def test_rollback(self): project = 'PROJECT' - id_ = 234 - connection = _Connection(id_) - client = _Client(project, connection) + id_ = 239 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) xact = self._make_one(client) xact.begin() xact.rollback() client._datastore_api.rollback.assert_called_once_with(project, id_) self.assertIsNone(xact.id) + ds_api.begin_transaction.assert_called_once_with(project) def test_commit_no_partial_keys(self): from google.cloud.proto.datastore.v1 import datastore_pb2 project = 'PROJECT' - id_ = 234 - connection = _Connection(id_) - - client = _Client(project, connection) + id_ = 1002930 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) xact = self._make_one(client) xact.begin() xact.commit() @@ -139,6 +144,7 @@ def test_commit_no_partial_keys(self): client._datastore_api.commit.assert_called_once_with( project, mode, [], transaction=id_) self.assertIsNone(xact.id) + ds_api.begin_transaction.assert_called_once_with(project) def test_commit_w_partial_keys(self): from google.cloud.proto.datastore.v1 import datastore_pb2 @@ -147,10 +153,9 @@ def test_commit_w_partial_keys(self): kind = 'KIND' id1 = 123 key = _make_key(kind, id1, project) - ds_api = _make_datastore_api(key) id2 = 234 - connection = _Connection(id2) - client = _Client(project, connection, datastore_api=ds_api) + ds_api = _make_datastore_api(key, xact_id=id2) + client = _Client(project, datastore_api=ds_api) xact = self._make_one(client) xact.begin() entity = _Entity() @@ -162,23 +167,25 @@ def test_commit_w_partial_keys(self): project, mode, xact.mutations, transaction=id2) self.assertIsNone(xact.id) self.assertEqual(entity.key.path, [{'kind': kind, 'id': id1}]) + ds_api.begin_transaction.assert_called_once_with(project) def test_context_manager_no_raise(self): from google.cloud.proto.datastore.v1 import datastore_pb2 project = 'PROJECT' - id_ = 234 - connection = _Connection(id_) - client = _Client(project, connection) + id_ = 912830 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) xact = self._make_one(client) with xact: self.assertEqual(xact.id, id_) - self.assertEqual(connection._begun, project) + ds_api.begin_transaction.assert_called_once_with(project) mode = datastore_pb2.CommitRequest.TRANSACTIONAL client._datastore_api.commit.assert_called_once_with( project, mode, [], transaction=id_) self.assertIsNone(xact.id) + self.assertEqual(ds_api.begin_transaction.call_count, 1) def test_context_manager_w_raise(self): @@ -186,15 +193,15 @@ class Foo(Exception): pass project = 'PROJECT' - id_ = 234 - connection = _Connection(id_) - client = _Client(project, connection) + id_ = 614416 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) xact = self._make_one(client) xact._mutation = object() try: with xact: self.assertEqual(xact.id, id_) - self.assertEqual(connection._begun, project) + ds_api.begin_transaction.assert_called_once_with(project) raise Foo() except Foo: self.assertIsNone(xact.id) @@ -203,6 +210,7 @@ class Foo(Exception): client._datastore_api.commit.assert_not_called() self.assertIsNone(xact.id) + self.assertEqual(ds_api.begin_transaction.call_count, 1) def _make_key(kind, id_, project): @@ -216,22 +224,6 @@ def _make_key(kind, id_, project): return key -class _Connection(object): - _begun = None - _side_effect = None - - def __init__(self, xact_id=123): - self._xact_id = xact_id - - def begin_transaction(self, project): - self._begun = project - if self._side_effect is None: - return mock.Mock( - transaction=self._xact_id, spec=['transaction']) - else: - raise self._side_effect - - class _Entity(dict): def __init__(self): @@ -243,10 +235,8 @@ def __init__(self): class _Client(object): - def __init__(self, project, connection, - datastore_api=None, namespace=None): + def __init__(self, project, datastore_api=None, namespace=None): self.project = project - self._connection = connection if datastore_api is None: datastore_api = _make_datastore_api() self._datastore_api = datastore_api @@ -288,7 +278,15 @@ def _make_commit_response(*keys): return datastore_pb2.CommitResponse(mutation_results=mutation_results) -def _make_datastore_api(*keys): +def _make_datastore_api(*keys, **kwargs): commit_method = mock.Mock( return_value=_make_commit_response(*keys), spec=[]) - return mock.Mock(commit=commit_method, spec=['commit', 'rollback']) + + xact_id = kwargs.pop('xact_id', 123) + txn_pb = mock.Mock( + transaction=xact_id, spec=['transaction']) + begin_txn = mock.Mock(return_value=txn_pb, spec=[]) + + return mock.Mock( + commit=commit_method, begin_transaction=begin_txn, + spec=['begin_transaction', 'commit', 'rollback']) From ead77467a1fb3858fc447758442080fa6bf18cd2 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 13 Mar 2017 09:51:27 -0700 Subject: [PATCH 098/611] Using GAPIC datastore object (and an HTTP equivalent) for run_query. --- .../google/cloud/datastore/_gax.py | 17 --- .../google/cloud/datastore/_http.py | 127 ++++++------------ .../google/cloud/datastore/query.py | 24 +++- 3 files changed, 61 insertions(+), 107 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py index 8037e7ff78dd..fac5770239d0 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py @@ -135,23 +135,6 @@ def lookup(self, project, request_pb): with _grpc_catch_rendezvous(): return self._stub.Lookup(request_pb) - def run_query(self, project, request_pb): - """Perform a ``runQuery`` request. - - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. - - :type request_pb: :class:`.datastore_pb2.RunQueryRequest` - :param request_pb: The request protobuf object. - - :rtype: :class:`.datastore_pb2.RunQueryResponse` - :returns: The returned protobuf response object. - """ - request_pb.project_id = project - with _grpc_catch_rendezvous(): - return self._stub.RunQuery(request_pb) - class GAPICDatastoreAPI(datastore_client.DatastoreClient): """An API object that sends proto-over-gRPC requests. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 62499c8fcffc..910da9f0dbf6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -181,23 +181,6 @@ def lookup(self, project, request_pb): self.connection.api_base_url, request_pb, _datastore_pb2.LookupResponse) - def run_query(self, project, request_pb): - """Perform a ``runQuery`` request. - - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. - - :type request_pb: :class:`.datastore_pb2.RunQueryRequest` - :param request_pb: The request protobuf object. - - :rtype: :class:`.datastore_pb2.RunQueryResponse` - :returns: The returned protobuf response object. - """ - return _rpc(self.connection.http, project, 'runQuery', - self.connection.api_base_url, - request_pb, _datastore_pb2.RunQueryResponse) - class Connection(connection_module.Connection): """A connection to the Google Cloud Datastore via the Protobuf API. @@ -264,71 +247,61 @@ def lookup(self, project, key_pbs, :rtype: :class:`.datastore_pb2.LookupResponse` :returns: The returned protobuf for the lookup request. """ - lookup_request = _datastore_pb2.LookupRequest() + lookup_request = _datastore_pb2.LookupRequest(keys=key_pbs) _set_read_options(lookup_request, eventual, transaction_id) - _add_keys_to_request(lookup_request.keys, key_pbs) - return self._datastore_api.lookup(project, lookup_request) - def run_query(self, project, query_pb, namespace=None, - eventual=False, transaction_id=None): - """Run a query on the Cloud Datastore. - Maps the ``DatastoreService.RunQuery`` protobuf RPC. +class HTTPDatastoreAPI(object): + """An API object that sends proto-over-HTTP requests. - Given a Query protobuf, sends a ``runQuery`` request to the - Cloud Datastore API and returns a list of entity protobufs - matching the query. + Intended to provide the same methods as the GAPIC ``DatastoreClient``. - You typically wouldn't use this method directly, in favor of the - :meth:`google.cloud.datastore.query.Query.fetch` method. + :type client: :class:`~google.cloud.datastore.client.Client` + :param client: The client that provides configuration. + """ - Under the hood, the :class:`google.cloud.datastore.query.Query` class - uses this method to fetch data. + def __init__(self, client): + self.client = client + + def run_query(self, project, partition_id, read_options, + query=None, gql_query=None): + """Perform a ``runQuery`` request. :type project: str - :param project: The project over which to run the query. + :param project: The project to connect to. This is + usually your project name in the cloud console. - :type query_pb: :class:`.query_pb2.Query` - :param query_pb: The Protobuf representing the query to run. + :type partition_id: :class:`.entity_pb2.PartitionId` + :param partition_id: Partition ID corresponding to an optional + namespace and project ID. - :type namespace: str - :param namespace: The namespace over which to run the query. + :type read_options: :class:`.datastore_pb2.ReadOptions` + :param read_options: The options for this query. Contains a + either the transaction for the read or + ``STRONG`` or ``EVENTUAL`` read consistency. - :type eventual: bool - :param eventual: If False (the default), request ``STRONG`` read - consistency. If True, request ``EVENTUAL`` read - consistency. + :type query: :class:`.query_pb2.Query` + :param query: (Optional) The query protobuf to run. At most one of + ``query`` and ``gql_query`` can be specified. - :type transaction_id: str - :param transaction_id: If passed, make the request in the scope of - the given transaction. Incompatible with - ``eventual==True``. + :type gql_query: :class:`.query_pb2.GqlQuery` + :param gql_query: (Optional) The GQL query to run. At most one of + ``query`` and ``gql_query`` can be specified. :rtype: :class:`.datastore_pb2.RunQueryResponse` - :returns: The protobuf response from a ``runQuery`` request. + :returns: The returned protobuf response object. """ - request = _datastore_pb2.RunQueryRequest() - _set_read_options(request, eventual, transaction_id) - - if namespace: - request.partition_id.namespace_id = namespace - - request.query.CopyFrom(query_pb) - return self._datastore_api.run_query(project, request) - - -class HTTPDatastoreAPI(object): - """An API object that sends proto-over-HTTP requests. - - Intended to provide the same methods as the GAPIC ``DatastoreClient``. - - :type client: :class:`~google.cloud.datastore.client.Client` - :param client: The client that provides configuration. - """ - - def __init__(self, client): - self.client = client + request_pb = _datastore_pb2.RunQueryRequest( + project_id=project, + partition_id=partition_id, + read_options=read_options, + query=query, + gql_query=gql_query, + ) + return _rpc(self.client._http, project, 'runQuery', + self.client._base_url, + request_pb, _datastore_pb2.RunQueryResponse) def begin_transaction(self, project): """Perform a ``beginTransaction`` request. @@ -391,8 +364,10 @@ def rollback(self, project, transaction_id): :rtype: :class:`.datastore_pb2.RollbackResponse` :returns: The returned protobuf response object. """ - request_pb = _datastore_pb2.RollbackRequest() - request_pb.transaction = transaction_id + request_pb = _datastore_pb2.RollbackRequest( + project_id=project, + transaction=transaction_id, + ) # Response is empty (i.e. no fields) but we return it anyway. return _rpc(self.client._http, project, 'rollback', self.client._base_url, @@ -411,8 +386,7 @@ def allocate_ids(self, project, key_pbs): :rtype: :class:`.datastore_pb2.AllocateIdsResponse` :returns: The returned protobuf response object. """ - request_pb = _datastore_pb2.AllocateIdsRequest() - _add_keys_to_request(request_pb.keys, key_pbs) + request_pb = _datastore_pb2.AllocateIdsRequest(keys=key_pbs) return _rpc(self.client._http, project, 'allocateIds', self.client._base_url, request_pb, _datastore_pb2.AllocateIdsResponse) @@ -434,16 +408,3 @@ def _set_read_options(request, eventual, transaction_id): opts.read_consistency = _datastore_pb2.ReadOptions.EVENTUAL elif transaction_id: opts.transaction = transaction_id - - -def _add_keys_to_request(request_field_pb, key_pbs): - """Add protobuf keys to a request object. - - :type request_field_pb: `RepeatedCompositeFieldContainer` - :param request_field_pb: A repeated proto field that contains keys. - - :type key_pbs: list of :class:`.entity_pb2.Key` - :param key_pbs: The keys to add to a request. - """ - for key_pb in key_pbs: - request_field_pb.add().CopyFrom(key_pb) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 8342554b558f..27189bdb12b2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -20,6 +20,8 @@ from google.cloud.iterator import Iterator as BaseIterator from google.cloud.iterator import Page +from google.cloud.proto.datastore.v1 import datastore_pb2 as _datastore_pb2 +from google.cloud.proto.datastore.v1 import entity_pb2 as _entity_pb2 from google.cloud.proto.datastore.v1 import query_pb2 as _query_pb2 from google.cloud.datastore import helpers from google.cloud.datastore.key import Key @@ -479,14 +481,22 @@ def _next_page(self): if not self._more_results: return None - pb = self._build_protobuf() + query_pb = self._build_protobuf() transaction = self.client.current_transaction - - response_pb = self.client._connection.run_query( - query_pb=pb, - project=self._query.project, - namespace=self._query.namespace, - transaction_id=transaction and transaction.id, + if transaction is None: + read_options = _datastore_pb2.ReadOptions() + else: + read_options = _datastore_pb2.ReadOptions( + transaction=transaction.id) + + partition_id = _entity_pb2.PartitionId( + project_id=self._query.project, + namespace_id=self._query.namespace) + response_pb = self.client._datastore_api.run_query( + self._query.project, + partition_id, + read_options, + query=query_pb, ) entity_pbs = self._process_query_results(response_pb) return Page(self, entity_pbs, self._item_to_value) From ee7b46532e40ec993fe0dc75ae963eff2157023a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 13 Mar 2017 10:59:45 -0700 Subject: [PATCH 099/611] Updating unit tests for GAPIC datastore run_query change. --- .../unit_tests/test__gax.py | 51 +---- .../unit_tests/test__http.py | 207 +++++++++--------- .../unit_tests/test_query.py | 91 ++++---- 3 files changed, 155 insertions(+), 194 deletions(-) diff --git a/packages/google-cloud-datastore/unit_tests/test__gax.py b/packages/google-cloud-datastore/unit_tests/test__gax.py index 0061ea106df9..433162542ea9 100644 --- a/packages/google-cloud-datastore/unit_tests/test__gax.py +++ b/packages/google-cloud-datastore/unit_tests/test__gax.py @@ -200,46 +200,6 @@ def test_lookup(self): self.assertEqual(stub.method_calls, [(request_pb, 'Lookup')]) - def test_run_query(self): - return_val = object() - stub = _GRPCStub(return_val) - datastore_api, _ = self._make_one(stub=stub) - - request_pb = mock.Mock(project_id=None, spec=['project_id']) - project = 'PROJECT' - result = datastore_api.run_query(project, request_pb) - self.assertIs(result, return_val) - self.assertEqual(request_pb.project_id, project) - self.assertEqual(stub.method_calls, - [(request_pb, 'RunQuery')]) - - def _run_query_failure_helper(self, exc, err_class): - stub = _GRPCStub(side_effect=exc) - datastore_api, _ = self._make_one(stub=stub) - - request_pb = mock.Mock(project_id=None, spec=['project_id']) - project = 'PROJECT' - with self.assertRaises(err_class): - datastore_api.run_query(project, request_pb) - - self.assertEqual(request_pb.project_id, project) - self.assertEqual(stub.method_calls, - [(request_pb, 'RunQuery')]) - - @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') - def test_run_query_invalid_argument(self): - from grpc import StatusCode - from grpc._channel import _RPCState - from google.cloud.exceptions import BadRequest - from google.cloud.exceptions import GrpcRendezvous - - details = ('Cannot have inequality filters on multiple ' - 'properties: [created, priority]') - exc_state = _RPCState((), None, None, - StatusCode.INVALID_ARGUMENT, details) - exc = GrpcRendezvous(exc_state, None, None, None) - self._run_query_failure_helper(exc, BadRequest) - @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') class TestGAPICDatastoreAPI(unittest.TestCase): @@ -307,20 +267,13 @@ def test_it(self, make_chan, mock_klass): class _GRPCStub(object): - def __init__(self, return_val=None, side_effect=Exception): + def __init__(self, return_val=None): self.return_val = return_val - self.side_effect = side_effect self.method_calls = [] def _method(self, request_pb, name): self.method_calls.append((request_pb, name)) - if self.side_effect is Exception: - return self.return_val - else: - raise self.side_effect + return self.return_val def Lookup(self, request_pb): return self._method(request_pb, 'Lookup') - - def RunQuery(self, request_pb): - return self._method(request_pb, 'RunQuery') diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 67262584f67d..748eba93b7f3 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -161,13 +161,6 @@ def _get_target_class(): return Connection - def _make_query_pb(self, kind): - from google.cloud.proto.datastore.v1 import query_pb2 - - pb = query_pb2.Query() - pb.kind.add().name = kind - return pb - def _make_one(self, client, use_grpc=False): with mock.patch('google.cloud.datastore._http._USE_GRPC', new=use_grpc): @@ -279,14 +272,14 @@ def test_lookup_single_key_empty_response_w_eventual(self): self.assertEqual(request.read_options.transaction, b'') def test_lookup_single_key_empty_response_w_eventual_and_transaction(self): - PROJECT = 'PROJECT' - TRANSACTION = b'TRANSACTION' - key_pb = _make_key_pb(PROJECT) + project = 'PROJECT' + transaction = b'TRANSACTION' + key_pb = _make_key_pb(project) client = mock.Mock(spec=['_base_url']) conn = self._make_one(client) - self.assertRaises(ValueError, conn.lookup, PROJECT, key_pb, - eventual=True, transaction_id=TRANSACTION) + self.assertRaises(ValueError, conn.lookup, project, [key_pb], + eventual=True, transaction_id=transaction) def test_lookup_single_key_empty_response_w_transaction(self): from google.cloud.proto.datastore.v1 import datastore_pb2 @@ -472,19 +465,50 @@ def test_lookup_multiple_keys_w_deferred(self): self.assertEqual(key_pb1, keys[0]) self.assertEqual(key_pb2, keys[1]) + +class TestHTTPDatastoreAPI(unittest.TestCase): + + @staticmethod + def _get_target_class(): + from google.cloud.datastore._http import HTTPDatastoreAPI + + return HTTPDatastoreAPI + + def _make_one(self, *args, **kwargs): + return self._get_target_class()(*args, **kwargs) + + @staticmethod + def _make_query_pb(kind): + from google.cloud.proto.datastore.v1 import query_pb2 + + return query_pb2.Query( + kind=[query_pb2.KindExpression(name=kind)], + ) + + def test_constructor(self): + client = object() + ds_api = self._make_one(client) + self.assertIs(ds_api.client, client) + def test_run_query_w_eventual_no_transaction(self): from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.proto.datastore.v1 import query_pb2 project = 'PROJECT' kind = 'Nonesuch' cursor = b'\x00' - q_pb = self._make_query_pb(kind) - rsp_pb = datastore_pb2.RunQueryResponse() - rsp_pb.batch.end_cursor = cursor - no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS - rsp_pb.batch.more_results = no_more - rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL + query_pb = self._make_query_pb(kind) + partition_id = entity_pb2.PartitionId(project_id=project) + read_options = datastore_pb2.ReadOptions( + read_consistency=datastore_pb2.ReadOptions.EVENTUAL) + rsp_pb = datastore_pb2.RunQueryResponse( + batch=query_pb2.QueryResultBatch( + entity_result_type=query_pb2.EntityResult.FULL, + end_cursor=cursor, + more_results=query_pb2.QueryResultBatch.NO_MORE_RESULTS, + ) + ) # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) @@ -492,36 +516,40 @@ def test_run_query_w_eventual_no_transaction(self): _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. - conn = self._make_one(client) - response = conn.run_query(project, q_pb, eventual=True) + ds_api = self._make_one(client) + response = ds_api.run_query( + project, partition_id, read_options, query=query_pb) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(conn.api_base_url, project, 'runQuery') + uri = _build_expected_url(client._base_url, project, 'runQuery') cw = http._called_with _verify_protobuf_call(self, cw, uri) request = datastore_pb2.RunQueryRequest() request.ParseFromString(cw['body']) - self.assertEqual(request.partition_id.namespace_id, '') - self.assertEqual(request.query, q_pb) - self.assertEqual(request.read_options.read_consistency, - datastore_pb2.ReadOptions.EVENTUAL) - self.assertEqual(request.read_options.transaction, b'') + self.assertEqual(request.partition_id, partition_id) + self.assertEqual(request.query, query_pb) + self.assertEqual(request.read_options, read_options) def test_run_query_wo_eventual_w_transaction(self): from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.proto.datastore.v1 import query_pb2 project = 'PROJECT' kind = 'Nonesuch' cursor = b'\x00' transaction = b'TRANSACTION' - q_pb = self._make_query_pb(kind) - rsp_pb = datastore_pb2.RunQueryResponse() - rsp_pb.batch.end_cursor = cursor - no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS - rsp_pb.batch.more_results = no_more - rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL + query_pb = self._make_query_pb(kind) + partition_id = entity_pb2.PartitionId(project_id=project) + read_options = datastore_pb2.ReadOptions(transaction=transaction) + rsp_pb = datastore_pb2.RunQueryResponse( + batch=query_pb2.QueryResultBatch( + entity_result_type=query_pb2.EntityResult.FULL, + end_cursor=cursor, + more_results=query_pb2.QueryResultBatch.NO_MORE_RESULTS, + ) + ) # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) @@ -529,57 +557,39 @@ def test_run_query_wo_eventual_w_transaction(self): _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. - conn = self._make_one(client) - response = conn.run_query( - project, q_pb, transaction_id=transaction) + ds_api = self._make_one(client) + response = ds_api.run_query( + project, partition_id, read_options, query=query_pb) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(conn.api_base_url, project, 'runQuery') + uri = _build_expected_url(client._base_url, project, 'runQuery') cw = http._called_with _verify_protobuf_call(self, cw, uri) request = datastore_pb2.RunQueryRequest() request.ParseFromString(cw['body']) - self.assertEqual(request.partition_id.namespace_id, '') - self.assertEqual(request.query, q_pb) - self.assertEqual( - request.read_options.read_consistency, - datastore_pb2.ReadOptions.READ_CONSISTENCY_UNSPECIFIED) - self.assertEqual(request.read_options.transaction, transaction) - - def test_run_query_w_eventual_and_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import query_pb2 - - PROJECT = 'PROJECT' - KIND = 'Nonesuch' - CURSOR = b'\x00' - TRANSACTION = b'TRANSACTION' - q_pb = self._make_query_pb(KIND) - rsp_pb = datastore_pb2.RunQueryResponse() - rsp_pb.batch.end_cursor = CURSOR - no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS - rsp_pb.batch.more_results = no_more - rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL - - client = mock.Mock(spec=['_base_url']) - conn = self._make_one(client) - self.assertRaises(ValueError, conn.run_query, PROJECT, q_pb, - eventual=True, transaction_id=TRANSACTION) + self.assertEqual(request.partition_id, partition_id) + self.assertEqual(request.query, query_pb) + self.assertEqual(request.read_options, read_options) def test_run_query_wo_namespace_empty_result(self): from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.proto.datastore.v1 import query_pb2 project = 'PROJECT' kind = 'Nonesuch' cursor = b'\x00' - q_pb = self._make_query_pb(kind) - rsp_pb = datastore_pb2.RunQueryResponse() - rsp_pb.batch.end_cursor = cursor - no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS - rsp_pb.batch.more_results = no_more - rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL + query_pb = self._make_query_pb(kind) + partition_id = entity_pb2.PartitionId(project_id=project) + read_options = datastore_pb2.ReadOptions() + rsp_pb = datastore_pb2.RunQueryResponse( + batch=query_pb2.QueryResultBatch( + entity_result_type=query_pb2.EntityResult.FULL, + end_cursor=cursor, + more_results=query_pb2.QueryResultBatch.NO_MORE_RESULTS, + ) + ) # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) @@ -587,18 +597,20 @@ def test_run_query_wo_namespace_empty_result(self): _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. - conn = self._make_one(client) - response = conn.run_query(project, q_pb) + ds_api = self._make_one(client) + response = ds_api.run_query( + project, partition_id, read_options, query=query_pb) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(conn.api_base_url, project, 'runQuery') + uri = _build_expected_url(client._base_url, project, 'runQuery') cw = http._called_with _verify_protobuf_call(self, cw, uri) request = datastore_pb2.RunQueryRequest() request.ParseFromString(cw['body']) - self.assertEqual(request.partition_id.namespace_id, '') - self.assertEqual(request.query, q_pb) + self.assertEqual(request.partition_id, partition_id) + self.assertEqual(request.query, query_pb) + self.assertEqual(request.read_options, read_options) def test_run_query_w_namespace_nonempty_result(self): from google.cloud.proto.datastore.v1 import datastore_pb2 @@ -607,12 +619,20 @@ def test_run_query_w_namespace_nonempty_result(self): project = 'PROJECT' kind = 'Kind' - entity = entity_pb2.Entity() - q_pb = self._make_query_pb(kind) - rsp_pb = datastore_pb2.RunQueryResponse() - rsp_pb.batch.entity_results.add(entity=entity) - rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL - rsp_pb.batch.more_results = query_pb2.QueryResultBatch.NO_MORE_RESULTS + namespace = 'NS' + query_pb = self._make_query_pb(kind) + partition_id = entity_pb2.PartitionId( + project_id=project, namespace_id=namespace) + read_options = datastore_pb2.ReadOptions() + rsp_pb = datastore_pb2.RunQueryResponse( + batch=query_pb2.QueryResultBatch( + entity_result_type=query_pb2.EntityResult.FULL, + entity_results=[ + query_pb2.EntityResult(entity=entity_pb2.Entity()), + ], + more_results=query_pb2.QueryResultBatch.NO_MORE_RESULTS, + ) + ) # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) @@ -620,36 +640,19 @@ def test_run_query_w_namespace_nonempty_result(self): _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. - conn = self._make_one(client) - namespace = 'NS' - response = conn.run_query(project, q_pb, namespace=namespace) + ds_api = self._make_one(client) + response = ds_api.run_query( + project, partition_id, read_options, query=query_pb) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) cw = http._called_with - uri = _build_expected_url(conn.api_base_url, project, 'runQuery') + uri = _build_expected_url(client._base_url, project, 'runQuery') _verify_protobuf_call(self, cw, uri) request = datastore_pb2.RunQueryRequest() request.ParseFromString(cw['body']) - self.assertEqual(request.partition_id.namespace_id, namespace) - self.assertEqual(request.query, q_pb) - - -class TestHTTPDatastoreAPI(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.datastore._http import HTTPDatastoreAPI - - return HTTPDatastoreAPI - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_constructor(self): - client = object() - ds_api = self._make_one(client) - self.assertIs(ds_api.client, client) + self.assertEqual(request.partition_id, partition_id) + self.assertEqual(request.query, query_pb) def test_begin_transaction(self): from google.cloud.proto.datastore.v1 import datastore_pb2 diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/unit_tests/test_query.py index fdf18a858775..b361ec25a42f 100644 --- a/packages/google-cloud-datastore/unit_tests/test_query.py +++ b/packages/google-cloud-datastore/unit_tests/test_query.py @@ -14,6 +14,8 @@ import unittest +import mock + class TestQuery(unittest.TestCase): @@ -28,10 +30,8 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def _make_client(self, connection=None): - if connection is None: - connection = _Connection() - return _Client(self._PROJECT, connection) + def _make_client(self): + return _Client(self._PROJECT) def test_ctor_defaults(self): client = self._make_client() @@ -319,8 +319,7 @@ def test_distinct_on_multiple_calls(self): def test_fetch_defaults_w_client_attr(self): from google.cloud.datastore.query import Iterator - connection = _Connection() - client = self._make_client(connection) + client = self._make_client() query = self._make_one(client) iterator = query.fetch() @@ -333,9 +332,8 @@ def test_fetch_defaults_w_client_attr(self): def test_fetch_w_explicit_client(self): from google.cloud.datastore.query import Iterator - connection = _Connection() - client = self._make_client(connection) - other_client = self._make_client(connection) + client = self._make_client() + other_client = self._make_client() query = self._make_one(client) iterator = query.fetch(limit=7, offset=8, client=other_client) self.assertIsInstance(iterator, Iterator) @@ -400,7 +398,7 @@ def test__build_protobuf_empty(self): from google.cloud.proto.datastore.v1 import query_pb2 from google.cloud.datastore.query import Query - client = _Client(None, None) + client = _Client(None) query = Query(client) iterator = self._make_one(query, client) @@ -412,7 +410,7 @@ def test__build_protobuf_all_values(self): from google.cloud.proto.datastore.v1 import query_pb2 from google.cloud.datastore.query import Query - client = _Client(None, None) + client = _Client(None) query = Query(client) limit = 15 offset = 9 @@ -489,17 +487,24 @@ def test__process_query_results_bad_enum(self): with self.assertRaises(ValueError): iterator._process_query_results(response_pb) - def test__next_page(self): + def _next_page_helper(self, txn_id=None): from google.cloud.iterator import Page + from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.proto.datastore.v1 import query_pb2 from google.cloud.datastore.query import Query - connection = _Connection() more_enum = query_pb2.QueryResultBatch.NOT_FINISHED result = _make_query_response([], b'', more_enum, 0) - connection._results = [result] project = 'prujekt' - client = _Client(project, connection) + ds_api = _make_datastore_api(result) + if txn_id is None: + client = _Client(project, datastore_api=ds_api) + else: + transaction = mock.Mock(id=txn_id, spec=['id']) + client = _Client( + project, datastore_api=ds_api, transaction=transaction) + query = Query(client) iterator = self._make_one(query, client) @@ -507,25 +512,34 @@ def test__next_page(self): self.assertIsInstance(page, Page) self.assertIs(page._parent, iterator) - self.assertEqual(connection._called_with, [{ - 'query_pb': query_pb2.Query(), - 'project': project, - 'namespace': None, - 'transaction_id': None, - }]) + partition_id = entity_pb2.PartitionId(project_id=project) + if txn_id is None: + read_options = datastore_pb2.ReadOptions() + else: + read_options = datastore_pb2.ReadOptions(transaction=txn_id) + empty_query = query_pb2.Query() + ds_api.run_query.assert_called_once_with( + project, partition_id, read_options, query=empty_query) + + def test__next_page(self): + self._next_page_helper() + + def test__next_page_in_transaction(self): + txn_id = b'1xo1md\xe2\x98\x83' + self._next_page_helper(txn_id) def test__next_page_no_more(self): from google.cloud.datastore.query import Query - connection = _Connection() - client = _Client(None, connection) + ds_api = _make_datastore_api() + client = _Client(None, datastore_api=ds_api) query = Query(client) iterator = self._make_one(query, client) iterator._more_results = False page = iterator._next_page() self.assertIsNone(page) - self.assertEqual(connection._called_with, []) + ds_api.run_query.assert_not_called() class Test__item_to_entity(unittest.TestCase): @@ -674,32 +688,18 @@ def __init__(self, self.distinct_on = distinct_on -class _Connection(object): - - _called_with = None - _cursor = b'\x00' - _skipped = 0 - - def __init__(self): - self._results = [] - self._called_with = [] - - def run_query(self, **kw): - self._called_with.append(kw) - result, self._results = self._results[0], self._results[1:] - return result - - class _Client(object): - def __init__(self, project, connection, namespace=None): + def __init__(self, project, datastore_api=None, namespace=None, + transaction=None): self.project = project - self._connection = connection + self._datastore_api = datastore_api self.namespace = namespace + self._transaction = transaction @property def current_transaction(self): - pass + return self._transaction def _make_entity(kind, id_, project): @@ -729,3 +729,8 @@ def _make_query_response( ], ), ) + + +def _make_datastore_api(result=None): + run_query = mock.Mock(return_value=result, spec=[]) + return mock.Mock(run_query=run_query, spec=['run_query']) From 9b90a30f4124d3a16cf36d0b3e7cf3cab689da89 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 13 Mar 2017 12:50:21 -0700 Subject: [PATCH 100/611] Using GAPIC datastore object (and an HTTP equivalent) for lookup. This is the last method to be ported over so the Connection() base class as well as the _DatastoreAPIOverGRPC and _DatastoreAPIOverHttp helper classes have been totally removed. This commit represents a "working" implementation (i.e. system tests pass) but the unit tests have yet to be updated. --- .../google/cloud/datastore/_gax.py | 47 ------ .../google/cloud/datastore/_http.py | 147 +++--------------- .../google/cloud/datastore/batch.py | 2 +- .../google/cloud/datastore/client.py | 58 +++++-- .../google/cloud/datastore/query.py | 2 - .../google/cloud/datastore/transaction.py | 1 - 6 files changed, 65 insertions(+), 192 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py index fac5770239d0..4f075330d3e5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py @@ -19,16 +19,13 @@ import sys from google.cloud.gapic.datastore.v1 import datastore_client -from google.cloud.proto.datastore.v1 import datastore_pb2_grpc from google.gax.errors import GaxError from google.gax.grpc import exc_to_code from google.gax.utils import metrics from grpc import StatusCode import six -from google.cloud._helpers import make_insecure_stub from google.cloud._helpers import make_secure_channel -from google.cloud._helpers import make_secure_stub from google.cloud._http import DEFAULT_USER_AGENT from google.cloud import exceptions @@ -92,50 +89,6 @@ def _grpc_catch_rendezvous(): six.reraise(error_class, new_exc, sys.exc_info()[2]) -class _DatastoreAPIOverGRPC(object): - """Helper mapping datastore API methods. - - Makes requests to send / receive protobuf content over gRPC. - - Methods make bare API requests without any helpers for constructing - the requests or parsing the responses. - - :type connection: :class:`Connection` - :param connection: A connection object that contains helpful - information for making requests. - """ - - def __init__(self, connection): - parse_result = six.moves.urllib_parse.urlparse( - connection.api_base_url) - host = parse_result.hostname - if parse_result.scheme == 'https': - self._stub = make_secure_stub( - connection.credentials, DEFAULT_USER_AGENT, - datastore_pb2_grpc.DatastoreStub, host, - extra_options=_GRPC_EXTRA_OPTIONS) - else: - self._stub = make_insecure_stub( - datastore_pb2_grpc.DatastoreStub, host) - - def lookup(self, project, request_pb): - """Perform a ``lookup`` request. - - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. - - :type request_pb: :class:`.datastore_pb2.LookupRequest` - :param request_pb: The request protobuf object. - - :rtype: :class:`.datastore_pb2.LookupResponse` - :returns: The returned protobuf response object. - """ - request_pb.project_id = project - with _grpc_catch_rendezvous(): - return self._stub.Lookup(request_pb) - - class GAPICDatastoreAPI(datastore_client.DatastoreClient): """An API object that sends proto-over-gRPC requests. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 910da9f0dbf6..0723a97a0de4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -14,22 +14,13 @@ """Connections to Google Cloud Datastore API servers.""" -import os - from google.rpc import status_pb2 from google.cloud import _http as connection_module -from google.cloud.environment_vars import DISABLE_GRPC from google.cloud import exceptions from google.cloud.proto.datastore.v1 import datastore_pb2 as _datastore_pb2 from google.cloud.datastore import __version__ -try: - from google.cloud.datastore._gax import _DatastoreAPIOverGRPC - _HAVE_GRPC = True -except ImportError: # pragma: NO COVER - _DatastoreAPIOverGRPC = None - _HAVE_GRPC = False DATASTORE_API_HOST = 'datastore.googleapis.com' @@ -42,8 +33,6 @@ '/{project}:{method}') """A template for the URL of a particular API call.""" -_DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) -_USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC _CLIENT_INFO = connection_module.CLIENT_INFO_TEMPLATE.format(__version__) @@ -148,121 +137,45 @@ def build_api_url(project, method, base_url): project=project, method=method) -class _DatastoreAPIOverHttp(object): - """Helper mapping datastore API methods. - - Makes requests to send / receive protobuf content over HTTP/1.1. +class HTTPDatastoreAPI(object): + """An API object that sends proto-over-HTTP requests. - Methods make bare API requests without any helpers for constructing - the requests or parsing the responses. + Intended to provide the same methods as the GAPIC ``DatastoreClient``. - :type connection: :class:`Connection` - :param connection: A connection object that contains helpful - information for making requests. + :type client: :class:`~google.cloud.datastore.client.Client` + :param client: The client that provides configuration. """ - def __init__(self, connection): - self.connection = connection + def __init__(self, client): + self.client = client - def lookup(self, project, request_pb): + def lookup(self, project, read_options, key_pbs): """Perform a ``lookup`` request. :type project: str :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`.datastore_pb2.LookupRequest` - :param request_pb: The request protobuf object. - - :rtype: :class:`.datastore_pb2.LookupResponse` - :returns: The returned protobuf response object. - """ - return _rpc(self.connection.http, project, 'lookup', - self.connection.api_base_url, - request_pb, _datastore_pb2.LookupResponse) - - -class Connection(connection_module.Connection): - """A connection to the Google Cloud Datastore via the Protobuf API. - - This class should understand only the basic types (and protobufs) - in method arguments, however it should be capable of returning advanced - types. - - :type client: :class:`~google.cloud.datastore.client.Client` - :param client: The client that owns the current connection. - """ - - def __init__(self, client): - super(Connection, self).__init__(client) - self.api_base_url = client._base_url - if _USE_GRPC: - self._datastore_api = _DatastoreAPIOverGRPC(self) - else: - self._datastore_api = _DatastoreAPIOverHttp(self) - - def lookup(self, project, key_pbs, - eventual=False, transaction_id=None): - """Lookup keys from a project in the Cloud Datastore. - - Maps the ``DatastoreService.Lookup`` protobuf RPC. - - This uses mostly protobufs - (:class:`.entity_pb2.Key` as input and :class:`.entity_pb2.Entity` - as output). It is used under the hood in - :meth:`Client.get() <.datastore.client.Client.get>`: - - .. code-block:: python - - >>> from google.cloud import datastore - >>> client = datastore.Client(project='project') - >>> key = client.key('MyKind', 1234) - >>> client.get(key) - [] - - Using a :class:`Connection` directly: - - .. code-block:: python - - >>> connection.lookup('project', [key.to_protobuf()]) - [] - - :type project: str - :param project: The project to look up the keys in. + :type read_options: :class:`.datastore_pb2.ReadOptions` + :param read_options: The options for this lookup. Contains a + either the transaction for the read or + ``STRONG`` or ``EVENTUAL`` read consistency. :type key_pbs: list of :class:`.entity_pb2.Key` :param key_pbs: The keys to retrieve from the datastore. - :type eventual: bool - :param eventual: If False (the default), request ``STRONG`` read - consistency. If True, request ``EVENTUAL`` read - consistency. - - :type transaction_id: str - :param transaction_id: If passed, make the request in the scope of - the given transaction. Incompatible with - ``eventual==True``. - :rtype: :class:`.datastore_pb2.LookupResponse` - :returns: The returned protobuf for the lookup request. + :returns: The returned protobuf response object. """ - lookup_request = _datastore_pb2.LookupRequest(keys=key_pbs) - _set_read_options(lookup_request, eventual, transaction_id) - return self._datastore_api.lookup(project, lookup_request) - - -class HTTPDatastoreAPI(object): - """An API object that sends proto-over-HTTP requests. - - Intended to provide the same methods as the GAPIC ``DatastoreClient``. - - :type client: :class:`~google.cloud.datastore.client.Client` - :param client: The client that provides configuration. - """ - - def __init__(self, client): - self.client = client + request_pb = _datastore_pb2.LookupRequest( + project_id=project, + read_options=read_options, + keys=key_pbs, + ) + return _rpc(self.client._http, project, 'lookup', + self.client._base_url, + request_pb, _datastore_pb2.LookupResponse) def run_query(self, project, partition_id, read_options, query=None, gql_query=None): @@ -390,21 +303,3 @@ def allocate_ids(self, project, key_pbs): return _rpc(self.client._http, project, 'allocateIds', self.client._base_url, request_pb, _datastore_pb2.AllocateIdsResponse) - - -def _set_read_options(request, eventual, transaction_id): - """Validate rules for read options, and assign to the request. - - Helper method for ``lookup()`` and ``run_query``. - - :raises: :class:`ValueError` if ``eventual`` is ``True`` and the - ``transaction_id`` is not ``None``. - """ - if eventual and (transaction_id is not None): - raise ValueError('eventual must be False when in a transaction') - - opts = request.read_options - if eventual: - opts.read_consistency = _datastore_pb2.ReadOptions.EVENTUAL - elif transaction_id: - opts.transaction = transaction_id diff --git a/packages/google-cloud-datastore/google/cloud/datastore/batch.py b/packages/google-cloud-datastore/google/cloud/datastore/batch.py index a5b80e432c9a..b20ba7047670 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/batch.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/batch.py @@ -249,7 +249,7 @@ def _commit(self): self.project, mode, self._mutations, transaction=self._id) _, updated_keys = _parse_commit_response(commit_response_pb) # If the back-end returns without error, we are guaranteed that - # :meth:`Connection.commit` will return keys that match (length and + # ``commit`` will return keys that match (length and # order) directly ``_partial_key_entities``. for new_key_pb, entity in zip(updated_keys, self._partial_key_entities): diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 34f14735a592..2579fd85b097 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -15,6 +15,8 @@ import os +from google.cloud.proto.datastore.v1 import datastore_pb2 as _datastore_pb2 + from google.cloud._helpers import _LocalStack from google.cloud._helpers import ( _determine_default_project as _base_default_project) @@ -23,7 +25,6 @@ from google.cloud.environment_vars import GCD_DATASET from google.cloud.environment_vars import GCD_HOST -from google.cloud.datastore._http import Connection from google.cloud.datastore._http import HTTPDatastoreAPI from google.cloud.datastore import helpers from google.cloud.datastore.batch import Batch @@ -78,15 +79,18 @@ def _determine_default_project(project=None): return project -def _extended_lookup(connection, project, key_pbs, +def _extended_lookup(datastore_api, project, key_pbs, missing=None, deferred=None, eventual=False, transaction_id=None): """Repeat lookup until all keys found (unless stop requested). Helper function for :meth:`Client.get_multi`. - :type connection: :class:`google.cloud.datastore._http.Connection` - :param connection: The connection used to connect to datastore. + :type datastore_api: + :class:`google.cloud.datastore._http.HTTPDatastoreAPI` + or :class:`google.cloud.datastore._gax.GAPICDatastoreAPI` + :param datastore_api: The datastore API object used to connect + to datastore. :type project: str :param project: The project to make the request for. @@ -127,15 +131,11 @@ def _extended_lookup(connection, project, key_pbs, results = [] loop_num = 0 + read_options = _get_read_options(eventual, transaction_id) while loop_num < _MAX_LOOPS: # loop against possible deferred. loop_num += 1 - - lookup_response = connection.lookup( - project=project, - key_pbs=key_pbs, - eventual=eventual, - transaction_id=transaction_id, - ) + lookup_response = datastore_api.lookup( + project, read_options, key_pbs) # Accumulate the new results. results.extend(result.entity for result in lookup_response.found) @@ -210,9 +210,6 @@ def __init__(self, project=None, namespace=None, self._base_url = 'http://' + host except KeyError: self._base_url = _DATASTORE_BASE_URL - # NOTE: Make sure all properties are set before passing to - # ``Connection`` (e.g. ``_base_url``). - self._connection = Connection(self) @staticmethod def _determine_default(project): @@ -347,7 +344,7 @@ def get_multi(self, keys, missing=None, deferred=None, transaction=None): transaction = self.current_transaction entity_pbs = _extended_lookup( - connection=self._connection, + datastore_api=self._datastore_api, project=self.project, key_pbs=[k.to_protobuf() for k in keys], missing=missing, @@ -569,3 +566,34 @@ def do_something(entity): if 'namespace' not in kwargs: kwargs['namespace'] = self.namespace return Query(self, **kwargs) + + +def _get_read_options(eventual, transaction_id): + """Validate rules for read options, and assign to the request. + + Helper method for ``lookup()`` and ``run_query``. + + :type eventual: bool + :param eventual: Flag indicating if ``EVENTUAL`` or ``STRONG`` + consistency should be used. + + :type transaction_id: bytes + :param transaction_id: A transaction identifier (may be null). + + :rtype: :class:`.datastore_pb2.ReadOptions` + :returns: The read options corresponding to the inputs. + :raises: :class:`ValueError` if ``eventual`` is ``True`` and the + ``transaction_id`` is not ``None``. + """ + if transaction_id is None: + if eventual: + return _datastore_pb2.ReadOptions( + read_consistency=_datastore_pb2.ReadOptions.EVENTUAL) + else: + return _datastore_pb2.ReadOptions() + else: + if eventual: + raise ValueError('eventual must be False when in a transaction') + else: + return _datastore_pb2.ReadOptions( + transaction=transaction_id) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 27189bdb12b2..ebbe72eed52a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -362,8 +362,6 @@ def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, :rtype: :class:`Iterator` :returns: The iterator for the query. - :raises: ValueError if ``connection`` is not passed and no implicit - default has been set. """ if client is None: client = self._client diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index c1cd6a01321a..00d4ac1d891b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -196,7 +196,6 @@ def rollback(self): This method has necessary side-effects: - - Sets the current connection's transaction reference to None. - Sets the current transaction's ID to None. """ try: From e257e367033ff3a03c701d9991137d54ec0779bc Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 13 Mar 2017 14:05:41 -0700 Subject: [PATCH 101/611] Updating unit test for removal of datastore Connection. Also moved the Connection.lookup() method onto GAPIC and GAPIC-like HTTP API object. --- .../unit_tests/test__gax.py | 105 +------- .../unit_tests/test__http.py | 228 ++++------------ .../unit_tests/test_client.py | 247 +++++++++++------- 3 files changed, 209 insertions(+), 371 deletions(-) diff --git a/packages/google-cloud-datastore/unit_tests/test__gax.py b/packages/google-cloud-datastore/unit_tests/test__gax.py index 433162542ea9..bb7dd6ac1773 100644 --- a/packages/google-cloud-datastore/unit_tests/test__gax.py +++ b/packages/google-cloud-datastore/unit_tests/test__gax.py @@ -16,7 +16,7 @@ import mock -from google.cloud.datastore._http import _HAVE_GRPC +from google.cloud.datastore.client import _HAVE_GRPC @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') @@ -112,95 +112,6 @@ def test_gax_error_not_mapped(self): self._fake_method(exc) -class Test_DatastoreAPIOverGRPC(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.datastore._gax import _DatastoreAPIOverGRPC - - return _DatastoreAPIOverGRPC - - def _make_one(self, stub, connection=None, secure=True): - if secure: - patch = mock.patch( - 'google.cloud.datastore._gax.make_secure_stub', - return_value=stub) - base_url = 'https://test.invalid' - else: - patch = mock.patch( - 'google.cloud.datastore._gax.make_insecure_stub', - return_value=stub) - base_url = 'http://test.invalid' - - if connection is None: - connection = mock.Mock( - credentials=object(), - api_base_url=base_url, - spec=['credentials', 'api_base_url'], - ) - - with patch as make_stub_mock: - api_obj = self._get_target_class()(connection) - return api_obj, make_stub_mock - - def test_constructor(self): - from google.cloud._http import DEFAULT_USER_AGENT - import google.cloud.datastore._gax as MUT - - host = 'test.invalid' - conn = mock.Mock( - credentials=object(), - api_base_url='https://' + host, - spec=['credentials', 'api_base_url'], - ) - - stub = _GRPCStub() - datastore_api, make_stub_mock = self._make_one( - stub, connection=conn) - - self.assertIs(datastore_api._stub, stub) - make_stub_mock.assert_called_once_with( - conn.credentials, - DEFAULT_USER_AGENT, - MUT.datastore_pb2_grpc.DatastoreStub, - host, - extra_options=MUT._GRPC_EXTRA_OPTIONS, - ) - - def test_constructor_insecure(self): - from google.cloud.proto.datastore.v1 import datastore_pb2_grpc - - host = 'test.invalid' - conn = mock.Mock( - credentials=object(), - api_base_url='http://' + host, - spec=['credentials', 'api_base_url'], - ) - - stub = _GRPCStub() - datastore_api, make_stub_mock = self._make_one( - stub, connection=conn, secure=False) - - self.assertIs(datastore_api._stub, stub) - make_stub_mock.assert_called_once_with( - datastore_pb2_grpc.DatastoreStub, - host, - ) - - def test_lookup(self): - return_val = object() - stub = _GRPCStub(return_val) - datastore_api, _ = self._make_one(stub=stub) - - request_pb = mock.Mock(project_id=None, spec=['project_id']) - project = 'PROJECT' - result = datastore_api.lookup(project, request_pb) - self.assertIs(result, return_val) - self.assertEqual(request_pb.project_id, project) - self.assertEqual(stub.method_calls, - [(request_pb, 'Lookup')]) - - @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') class TestGAPICDatastoreAPI(unittest.TestCase): @@ -263,17 +174,3 @@ def test_it(self, make_chan, mock_klass): mock_klass.assert_called_once_with( channel=mock.sentinel.channel, lib_name='gccl', lib_version=__version__) - - -class _GRPCStub(object): - - def __init__(self, return_val=None): - self.return_val = return_val - self.method_calls = [] - - def _method(self, request_pb, name): - self.method_calls.append((request_pb, name)) - return self.return_val - - def Lookup(self, request_pb): - return self._method(request_pb, 'Lookup') diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/unit_tests/test__http.py index 748eba93b7f3..db364ec4dd61 100644 --- a/packages/google-cloud-datastore/unit_tests/test__http.py +++ b/packages/google-cloud-datastore/unit_tests/test__http.py @@ -114,99 +114,29 @@ def test_it(self): base_url) -class Test_DatastoreAPIOverHttp(unittest.TestCase): +class TestHTTPDatastoreAPI(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.datastore._http import _DatastoreAPIOverHttp - - return _DatastoreAPIOverHttp - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_constructor(self): - connection = object() - ds_api = self._make_one(connection) - self.assertIs(ds_api.connection, connection) - - def test_lookup(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 - - connection = mock.Mock( - api_base_url='test.invalid', spec=['http', 'api_base_url']) - ds_api = self._make_one(connection) - - project = 'project' - request_pb = object() - - patch = mock.patch( - 'google.cloud.datastore._http._rpc', - return_value=mock.sentinel.looked_up) - with patch as mock_rpc: - result = ds_api.lookup(project, request_pb) - self.assertIs(result, mock.sentinel.looked_up) - - mock_rpc.assert_called_once_with( - connection.http, project, 'lookup', - connection.api_base_url, - request_pb, datastore_pb2.LookupResponse) + from google.cloud.datastore._http import HTTPDatastoreAPI + return HTTPDatastoreAPI -class TestConnection(unittest.TestCase): + def _make_one(self, *args, **kwargs): + return self._get_target_class()(*args, **kwargs) @staticmethod - def _get_target_class(): - from google.cloud.datastore._http import Connection - - return Connection - - def _make_one(self, client, use_grpc=False): - with mock.patch('google.cloud.datastore._http._USE_GRPC', - new=use_grpc): - return self._get_target_class()(client) + def _make_query_pb(kind): + from google.cloud.proto.datastore.v1 import query_pb2 - def test_inherited_url(self): - client = mock.Mock(_base_url='test.invalid', spec=['_base_url']) - conn = self._make_one(client) - self.assertEqual(conn.api_base_url, client._base_url) + return query_pb2.Query( + kind=[query_pb2.KindExpression(name=kind)], + ) def test_constructor(self): - client = mock.Mock(spec=['_base_url']) - conn = self._make_one(client) - self.assertIs(conn._client, client) - - def test_constructor_without_grpc(self): - connections = [] - client = mock.Mock(spec=['_base_url']) - return_val = object() - - def mock_api(connection): - connections.append(connection) - return return_val - - patch = mock.patch( - 'google.cloud.datastore._http._DatastoreAPIOverHttp', - new=mock_api) - with patch: - conn = self._make_one(client, use_grpc=False) - - self.assertIs(conn._client, client) - self.assertIs(conn._datastore_api, return_val) - self.assertEqual(connections, [conn]) - - def test_constructor_with_grpc(self): - client = mock.Mock(spec=['_base_url']) - - patch = mock.patch( - 'google.cloud.datastore._http._DatastoreAPIOverGRPC', - return_value=mock.sentinel.ds_api) - with patch as mock_klass: - conn = self._make_one(client, use_grpc=True) - mock_klass.assert_called_once_with(conn) - - self.assertIs(conn._client, client) - self.assertIs(conn._datastore_api, mock.sentinel.ds_api) + client = object() + ds_api = self._make_one(client) + self.assertIs(ds_api.client, client) def test_lookup_single_key_empty_response(self): from google.cloud.proto.datastore.v1 import datastore_pb2 @@ -214,6 +144,7 @@ def test_lookup_single_key_empty_response(self): project = 'PROJECT' key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.LookupResponse() + read_options = datastore_pb2.ReadOptions() # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) @@ -221,12 +152,12 @@ def test_lookup_single_key_empty_response(self): _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. - conn = self._make_one(client) - response = conn.lookup(project, [key_pb]) + ds_api = self._make_one(client) + response = ds_api.lookup(project, read_options, [key_pb]) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(conn.api_base_url, project, 'lookup') + uri = _build_expected_url(client._base_url, project, 'lookup') self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) @@ -234,9 +165,8 @@ def test_lookup_single_key_empty_response(self): _verify_protobuf_call(self, cw, uri) request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) - keys = list(request.keys) - self.assertEqual(len(keys), 1) - self.assertEqual(key_pb, keys[0]) + self.assertEqual(list(request.keys), [key_pb]) + self.assertEqual(request.read_options, read_options) def test_lookup_single_key_empty_response_w_eventual(self): from google.cloud.proto.datastore.v1 import datastore_pb2 @@ -244,6 +174,8 @@ def test_lookup_single_key_empty_response_w_eventual(self): project = 'PROJECT' key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.LookupResponse() + read_options = datastore_pb2.ReadOptions( + read_consistency=datastore_pb2.ReadOptions.EVENTUAL) # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) @@ -251,12 +183,12 @@ def test_lookup_single_key_empty_response_w_eventual(self): _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. - conn = self._make_one(client) - response = conn.lookup(project, [key_pb], eventual=True) + ds_api = self._make_one(client) + response = ds_api.lookup(project, read_options, [key_pb]) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(conn.api_base_url, project, 'lookup') + uri = _build_expected_url(client._base_url, project, 'lookup') self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) @@ -264,22 +196,8 @@ def test_lookup_single_key_empty_response_w_eventual(self): _verify_protobuf_call(self, cw, uri) request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) - keys = list(request.keys) - self.assertEqual(len(keys), 1) - self.assertEqual(key_pb, keys[0]) - self.assertEqual(request.read_options.read_consistency, - datastore_pb2.ReadOptions.EVENTUAL) - self.assertEqual(request.read_options.transaction, b'') - - def test_lookup_single_key_empty_response_w_eventual_and_transaction(self): - project = 'PROJECT' - transaction = b'TRANSACTION' - key_pb = _make_key_pb(project) - - client = mock.Mock(spec=['_base_url']) - conn = self._make_one(client) - self.assertRaises(ValueError, conn.lookup, project, [key_pb], - eventual=True, transaction_id=transaction) + self.assertEqual(list(request.keys), [key_pb]) + self.assertEqual(request.read_options, read_options) def test_lookup_single_key_empty_response_w_transaction(self): from google.cloud.proto.datastore.v1 import datastore_pb2 @@ -288,6 +206,7 @@ def test_lookup_single_key_empty_response_w_transaction(self): transaction = b'TRANSACTION' key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.LookupResponse() + read_options = datastore_pb2.ReadOptions(transaction=transaction) # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) @@ -295,12 +214,12 @@ def test_lookup_single_key_empty_response_w_transaction(self): _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. - conn = self._make_one(client) - response = conn.lookup(project, [key_pb], transaction_id=transaction) + ds_api = self._make_one(client) + response = ds_api.lookup(project, read_options, [key_pb]) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(conn.api_base_url, project, 'lookup') + uri = _build_expected_url(client._base_url, project, 'lookup') self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) @@ -308,10 +227,8 @@ def test_lookup_single_key_empty_response_w_transaction(self): _verify_protobuf_call(self, cw, uri) request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) - keys = list(request.keys) - self.assertEqual(len(keys), 1) - self.assertEqual(key_pb, keys[0]) - self.assertEqual(request.read_options.transaction, transaction) + self.assertEqual(list(request.keys), [key_pb]) + self.assertEqual(request.read_options, read_options) def test_lookup_single_key_nonempty_response(self): from google.cloud.proto.datastore.v1 import datastore_pb2 @@ -323,6 +240,7 @@ def test_lookup_single_key_nonempty_response(self): entity = entity_pb2.Entity() entity.key.CopyFrom(key_pb) rsp_pb.found.add(entity=entity) + read_options = datastore_pb2.ReadOptions() # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) @@ -330,12 +248,12 @@ def test_lookup_single_key_nonempty_response(self): _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. - conn = self._make_one(client) - response = conn.lookup(project, [key_pb]) + ds_api = self._make_one(client) + response = ds_api.lookup(project, read_options, [key_pb]) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(conn.api_base_url, project, 'lookup') + uri = _build_expected_url(client._base_url, project, 'lookup') self.assertEqual(len(response.found), 1) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) @@ -346,9 +264,8 @@ def test_lookup_single_key_nonempty_response(self): _verify_protobuf_call(self, cw, uri) request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) - keys = list(request.keys) - self.assertEqual(len(keys), 1) - self.assertEqual(key_pb, keys[0]) + self.assertEqual(list(request.keys), [key_pb]) + self.assertEqual(request.read_options, read_options) def test_lookup_multiple_keys_empty_response(self): from google.cloud.proto.datastore.v1 import datastore_pb2 @@ -357,6 +274,7 @@ def test_lookup_multiple_keys_empty_response(self): key_pb1 = _make_key_pb(project) key_pb2 = _make_key_pb(project, id_=2345) rsp_pb = datastore_pb2.LookupResponse() + read_options = datastore_pb2.ReadOptions() # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) @@ -364,12 +282,12 @@ def test_lookup_multiple_keys_empty_response(self): _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. - conn = self._make_one(client) - response = conn.lookup(project, [key_pb1, key_pb2]) + ds_api = self._make_one(client) + response = ds_api.lookup(project, read_options, [key_pb1, key_pb2]) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(conn.api_base_url, project, 'lookup') + uri = _build_expected_url(client._base_url, project, 'lookup') self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) @@ -377,10 +295,8 @@ def test_lookup_multiple_keys_empty_response(self): _verify_protobuf_call(self, cw, uri) request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) - keys = list(request.keys) - self.assertEqual(len(keys), 2) - self.assertEqual(key_pb1, keys[0]) - self.assertEqual(key_pb2, keys[1]) + self.assertEqual(list(request.keys), [key_pb1, key_pb2]) + self.assertEqual(request.read_options, read_options) def test_lookup_multiple_keys_w_missing(self): from google.cloud.proto.datastore.v1 import datastore_pb2 @@ -393,6 +309,7 @@ def test_lookup_multiple_keys_w_missing(self): er_1.entity.key.CopyFrom(key_pb1) er_2 = rsp_pb.missing.add() er_2.entity.key.CopyFrom(key_pb2) + read_options = datastore_pb2.ReadOptions() # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) @@ -400,12 +317,12 @@ def test_lookup_multiple_keys_w_missing(self): _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. - conn = self._make_one(client) - response = conn.lookup(project, [key_pb1, key_pb2]) + ds_api = self._make_one(client) + response = ds_api.lookup(project, read_options, [key_pb1, key_pb2]) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(conn.api_base_url, project, 'lookup') + uri = _build_expected_url(client._base_url, project, 'lookup') self.assertEqual(len(response.found), 0) self.assertEqual(len(response.deferred), 0) missing_keys = [result.entity.key for result in response.missing] @@ -414,14 +331,11 @@ def test_lookup_multiple_keys_w_missing(self): _verify_protobuf_call(self, cw, uri) request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) - keys = list(request.keys) - self.assertEqual(len(keys), 2) - self.assertEqual(key_pb1, keys[0]) - self.assertEqual(key_pb2, keys[1]) + self.assertEqual(list(request.keys), [key_pb1, key_pb2]) + self.assertEqual(request.read_options, read_options) def test_lookup_multiple_keys_w_deferred(self): from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud import _http as connection_module from google.cloud.datastore._http import _CLIENT_INFO @@ -431,6 +345,7 @@ def test_lookup_multiple_keys_w_deferred(self): rsp_pb = datastore_pb2.LookupResponse() rsp_pb.deferred.add().CopyFrom(key_pb1) rsp_pb.deferred.add().CopyFrom(key_pb2) + read_options = datastore_pb2.ReadOptions() # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) @@ -438,12 +353,12 @@ def test_lookup_multiple_keys_w_deferred(self): _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. - conn = self._make_one(client) - response = conn.lookup(project, [key_pb1, key_pb2]) + ds_api = self._make_one(client) + response = ds_api.lookup(project, read_options, [key_pb1, key_pb2]) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(conn.api_base_url, project, 'lookup') + uri = _build_expected_url(client._base_url, project, 'lookup') self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(list(response.deferred), [key_pb1, key_pb2]) @@ -453,42 +368,15 @@ def test_lookup_multiple_keys_w_deferred(self): self.assertEqual(cw['method'], 'POST') expected_headers = { 'Content-Type': 'application/x-protobuf', - 'User-Agent': conn.USER_AGENT, - 'Content-Length': '48', + 'User-Agent': connection_module.DEFAULT_USER_AGENT, + 'Content-Length': str(len(cw['body'])), connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, } self.assertEqual(cw['headers'], expected_headers) request = datastore_pb2.LookupRequest() request.ParseFromString(cw['body']) - keys = list(request.keys) - self.assertEqual(len(keys), 2) - self.assertEqual(key_pb1, keys[0]) - self.assertEqual(key_pb2, keys[1]) - - -class TestHTTPDatastoreAPI(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.datastore._http import HTTPDatastoreAPI - - return HTTPDatastoreAPI - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - @staticmethod - def _make_query_pb(kind): - from google.cloud.proto.datastore.v1 import query_pb2 - - return query_pb2.Query( - kind=[query_pb2.KindExpression(name=kind)], - ) - - def test_constructor(self): - client = object() - ds_api = self._make_one(client) - self.assertIs(ds_api.client, client) + self.assertEqual(list(request.keys), [key_pb1, key_pb2]) + self.assertEqual(request.read_options, read_options) def test_run_query_w_eventual_no_transaction(self): from google.cloud.proto.datastore.v1 import datastore_pb2 diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/unit_tests/test_client.py index fdc7394af419..8f4197c51419 100644 --- a/packages/google-cloud-datastore/unit_tests/test_client.py +++ b/packages/google-cloud-datastore/unit_tests/test_client.py @@ -120,17 +120,6 @@ class TestClient(unittest.TestCase): PROJECT = 'PROJECT' - def setUp(self): - from google.cloud.datastore import client as MUT - - self.original_cnxn_class = MUT.Connection - MUT.Connection = _MockConnection - - def tearDown(self): - from google.cloud.datastore import client as MUT - - MUT.Connection = self.original_cnxn_class - @staticmethod def _get_target_class(): from google.cloud.datastore.client import Client @@ -179,7 +168,6 @@ def fallback_mock(project): self.assertEqual(client.project, other) self.assertIsNone(client.namespace) - self.assertIsInstance(client._connection, _MockConnection) self.assertIs(client._credentials, creds) self.assertIsNone(client._http_internal) self.assertEqual(client._base_url, _DATASTORE_BASE_URL) @@ -201,7 +189,6 @@ def test_constructor_w_explicit_inputs(self): http=http) self.assertEqual(client.project, other) self.assertEqual(client.namespace, namespace) - self.assertIsInstance(client._connection, _MockConnection) self.assertIs(client._credentials, creds) self.assertIs(client._http_internal, http) self.assertIsNone(client.current_batch) @@ -355,17 +342,25 @@ def test_get_multi_no_keys(self): self.assertEqual(results, []) def test_get_multi_miss(self): + from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.datastore.key import Key creds = _make_credentials() client = self._make_one(credentials=creds) - client._connection._add_lookup_result() + ds_api = _make_datastore_api() + client._datastore_api_internal = ds_api + key = Key('Kind', 1234, project=self.PROJECT) results = client.get_multi([key]) self.assertEqual(results, []) + read_options = datastore_pb2.ReadOptions() + ds_api.lookup.assert_called_once_with( + self.PROJECT, read_options, [key.to_protobuf()]) + def test_get_multi_miss_w_missing(self): from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.datastore.key import Key KIND = 'Kind' @@ -381,14 +376,21 @@ def test_get_multi_miss_w_missing(self): creds = _make_credentials() client = self._make_one(credentials=creds) # Set missing entity on mock connection. - client._connection._add_lookup_result(missing=[missed]) + lookup_response = _make_lookup_response(missing=[missed]) + ds_api = _make_datastore_api(lookup_response=lookup_response) + client._datastore_api_internal = ds_api key = Key(KIND, ID, project=self.PROJECT) missing = [] entities = client.get_multi([key], missing=missing) self.assertEqual(entities, []) - self.assertEqual([missed.key.to_protobuf() for missed in missing], - [key.to_protobuf()]) + key_pb = key.to_protobuf() + self.assertEqual( + [missed.key.to_protobuf() for missed in missing], [key_pb]) + + read_options = datastore_pb2.ReadOptions() + ds_api.lookup.assert_called_once_with( + self.PROJECT, read_options, [key_pb]) def test_get_multi_w_missing_non_empty(self): from google.cloud.datastore.key import Key @@ -413,22 +415,31 @@ def test_get_multi_w_deferred_non_empty(self): [key], deferred=deferred) def test_get_multi_miss_w_deferred(self): + from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.datastore.key import Key key = Key('Kind', 1234, project=self.PROJECT) + key_pb = key.to_protobuf() # Set deferred entity on mock connection. creds = _make_credentials() client = self._make_one(credentials=creds) - client._connection._add_lookup_result(deferred=[key.to_protobuf()]) + lookup_response = _make_lookup_response(deferred=[key_pb]) + ds_api = _make_datastore_api(lookup_response=lookup_response) + client._datastore_api_internal = ds_api deferred = [] entities = client.get_multi([key], deferred=deferred) self.assertEqual(entities, []) - self.assertEqual([def_key.to_protobuf() for def_key in deferred], - [key.to_protobuf()]) + self.assertEqual( + [def_key.to_protobuf() for def_key in deferred], [key_pb]) + + read_options = datastore_pb2.ReadOptions() + ds_api.lookup.assert_called_once_with( + self.PROJECT, read_options, [key_pb]) def test_get_multi_w_deferred_from_backend_but_not_passed(self): + from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key @@ -445,9 +456,15 @@ def test_get_multi_w_deferred_from_backend_but_not_passed(self): creds = _make_credentials() client = self._make_one(credentials=creds) - # mock up two separate requests - client._connection._add_lookup_result([entity1_pb], deferred=[key2_pb]) - client._connection._add_lookup_result([entity2_pb]) + # Mock up two separate requests. Using an iterable as side_effect + # allows multiple return values. + lookup_response1 = _make_lookup_response( + results=[entity1_pb], deferred=[key2_pb]) + lookup_response2 = _make_lookup_response(results=[entity2_pb]) + ds_api = _make_datastore_api() + ds_api.lookup = mock.Mock( + side_effect=[lookup_response1, lookup_response2], spec=[]) + client._datastore_api_internal = ds_api missing = [] found = client.get_multi([key1, key2], missing=missing) @@ -463,102 +480,104 @@ def test_get_multi_w_deferred_from_backend_but_not_passed(self): self.assertEqual(found[1].key.path, key2.path) self.assertEqual(found[1].key.project, key2.project) - cw = client._connection._lookup_cw - self.assertEqual(len(cw), 2) - - ds_id, k_pbs, eventual, tid = cw[0] - self.assertEqual(ds_id, self.PROJECT) - self.assertEqual(len(k_pbs), 2) - self.assertEqual(key1_pb, k_pbs[0]) - self.assertEqual(key2_pb, k_pbs[1]) - self.assertFalse(eventual) - self.assertIsNone(tid) - - ds_id, k_pbs, eventual, tid = cw[1] - self.assertEqual(ds_id, self.PROJECT) - self.assertEqual(len(k_pbs), 1) - self.assertEqual(key2_pb, k_pbs[0]) - self.assertFalse(eventual) - self.assertIsNone(tid) + self.assertEqual(ds_api.lookup.call_count, 2) + read_options = datastore_pb2.ReadOptions() + ds_api.lookup.assert_any_call( + self.PROJECT, read_options, [key2_pb]) + ds_api.lookup.assert_any_call( + self.PROJECT, read_options, [key1_pb, key2_pb]) def test_get_multi_hit(self): + from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.datastore.key import Key - KIND = 'Kind' - ID = 1234 - PATH = [{'kind': KIND, 'id': ID}] + kind = 'Kind' + id_ = 1234 + path = [{'kind': kind, 'id': id_}] # Make a found entity pb to be returned from mock backend. - entity_pb = _make_entity_pb(self.PROJECT, KIND, ID, 'foo', 'Foo') + entity_pb = _make_entity_pb(self.PROJECT, kind, id_, 'foo', 'Foo') # Make a connection to return the entity pb. creds = _make_credentials() client = self._make_one(credentials=creds) - client._connection._add_lookup_result([entity_pb]) + lookup_response = _make_lookup_response(results=[entity_pb]) + ds_api = _make_datastore_api(lookup_response=lookup_response) + client._datastore_api_internal = ds_api - key = Key(KIND, ID, project=self.PROJECT) + key = Key(kind, id_, project=self.PROJECT) result, = client.get_multi([key]) new_key = result.key # Check the returned value is as expected. self.assertIsNot(new_key, key) self.assertEqual(new_key.project, self.PROJECT) - self.assertEqual(new_key.path, PATH) + self.assertEqual(new_key.path, path) self.assertEqual(list(result), ['foo']) self.assertEqual(result['foo'], 'Foo') + read_options = datastore_pb2.ReadOptions() + ds_api.lookup.assert_called_once_with( + self.PROJECT, read_options, [key.to_protobuf()]) + def test_get_multi_hit_w_transaction(self): + from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.datastore.key import Key - TXN_ID = '123' - KIND = 'Kind' - ID = 1234 - PATH = [{'kind': KIND, 'id': ID}] + txn_id = b'123' + kind = 'Kind' + id_ = 1234 + path = [{'kind': kind, 'id': id_}] # Make a found entity pb to be returned from mock backend. - entity_pb = _make_entity_pb(self.PROJECT, KIND, ID, 'foo', 'Foo') + entity_pb = _make_entity_pb(self.PROJECT, kind, id_, 'foo', 'Foo') # Make a connection to return the entity pb. creds = _make_credentials() client = self._make_one(credentials=creds) - client._connection._add_lookup_result([entity_pb]) + lookup_response = _make_lookup_response(results=[entity_pb]) + ds_api = _make_datastore_api(lookup_response=lookup_response) + client._datastore_api_internal = ds_api - key = Key(KIND, ID, project=self.PROJECT) + key = Key(kind, id_, project=self.PROJECT) txn = client.transaction() - txn._id = TXN_ID + txn._id = txn_id result, = client.get_multi([key], transaction=txn) new_key = result.key # Check the returned value is as expected. self.assertIsNot(new_key, key) self.assertEqual(new_key.project, self.PROJECT) - self.assertEqual(new_key.path, PATH) + self.assertEqual(new_key.path, path) self.assertEqual(list(result), ['foo']) self.assertEqual(result['foo'], 'Foo') - cw = client._connection._lookup_cw - self.assertEqual(len(cw), 1) - _, _, _, transaction_id = cw[0] - self.assertEqual(transaction_id, TXN_ID) + read_options = datastore_pb2.ReadOptions(transaction=txn_id) + ds_api.lookup.assert_called_once_with( + self.PROJECT, read_options, [key.to_protobuf()]) def test_get_multi_hit_multiple_keys_same_project(self): + from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.datastore.key import Key - KIND = 'Kind' - ID1 = 1234 - ID2 = 2345 + kind = 'Kind' + id1 = 1234 + id2 = 2345 # Make a found entity pb to be returned from mock backend. - entity_pb1 = _make_entity_pb(self.PROJECT, KIND, ID1) - entity_pb2 = _make_entity_pb(self.PROJECT, KIND, ID2) + entity_pb1 = _make_entity_pb(self.PROJECT, kind, id1) + entity_pb2 = _make_entity_pb(self.PROJECT, kind, id2) # Make a connection to return the entity pbs. creds = _make_credentials() client = self._make_one(credentials=creds) - client._connection._add_lookup_result([entity_pb1, entity_pb2]) + lookup_response = _make_lookup_response( + results=[entity_pb1, entity_pb2]) + ds_api = _make_datastore_api(lookup_response=lookup_response) + client._datastore_api_internal = ds_api - key1 = Key(KIND, ID1, project=self.PROJECT) - key2 = Key(KIND, ID2, project=self.PROJECT) + key1 = Key(kind, id1, project=self.PROJECT) + key2 = Key(kind, id2, project=self.PROJECT) retrieved1, retrieved2 = client.get_multi([key1, key2]) # Check values match. @@ -567,6 +586,11 @@ def test_get_multi_hit_multiple_keys_same_project(self): self.assertEqual(retrieved2.key.path, key2.path) self.assertEqual(dict(retrieved2), {}) + read_options = datastore_pb2.ReadOptions() + ds_api.lookup.assert_called_once_with( + self.PROJECT, read_options, + [key1.to_protobuf(), key2.to_protobuf()]) + def test_get_multi_hit_multiple_keys_different_project(self): from google.cloud.datastore.key import Key @@ -588,18 +612,20 @@ def test_get_multi_hit_multiple_keys_different_project(self): def test_get_multi_max_loops(self): from google.cloud.datastore.key import Key - KIND = 'Kind' - ID = 1234 + kind = 'Kind' + id_ = 1234 # Make a found entity pb to be returned from mock backend. - entity_pb = _make_entity_pb(self.PROJECT, KIND, ID, 'foo', 'Foo') + entity_pb = _make_entity_pb(self.PROJECT, kind, id_, 'foo', 'Foo') # Make a connection to return the entity pb. creds = _make_credentials() client = self._make_one(credentials=creds) - client._connection._add_lookup_result([entity_pb]) + lookup_response = _make_lookup_response(results=[entity_pb]) + ds_api = _make_datastore_api(lookup_response=lookup_response) + client._datastore_api_internal = ds_api - key = Key(KIND, ID, project=self.PROJECT) + key = Key(kind, id_, project=self.PROJECT) deferred = [] missing = [] @@ -614,6 +640,7 @@ def test_get_multi_max_loops(self): self.assertEqual(result, []) self.assertEqual(missing, []) self.assertEqual(deferred, []) + ds_api.lookup.assert_not_called() def test_put(self): _called_with = [] @@ -987,32 +1014,39 @@ def test_query_w_namespace_collision(self): client, project=self.PROJECT, namespace=namespace2, kind=kind) -class _MockConnection(object): +class Test__get_read_options(unittest.TestCase): + + def _call_fut(self, eventual, transaction_id): + from google.cloud.datastore.client import _get_read_options + + return _get_read_options(eventual, transaction_id) + + def test_eventual_w_transaction(self): + with self.assertRaises(ValueError): + self._call_fut(True, b'123') + + def test_eventual_wo_transaction(self): + from google.cloud.proto.datastore.v1 import datastore_pb2 - def __init__(self, credentials=None, http=None): - self.credentials = credentials - self.http = http - self._lookup_cw = [] - self._lookup = [] + read_options = self._call_fut(True, None) + expected = datastore_pb2.ReadOptions( + read_consistency=datastore_pb2.ReadOptions.EVENTUAL) + self.assertEqual(read_options, expected) - def _add_lookup_result(self, results=(), missing=(), deferred=()): - self._lookup.append((list(results), list(missing), list(deferred))) + def test_default_w_transaction(self): + from google.cloud.proto.datastore.v1 import datastore_pb2 - def lookup(self, project, key_pbs, eventual=False, transaction_id=None): - self._lookup_cw.append((project, key_pbs, eventual, transaction_id)) - triple, self._lookup = self._lookup[0], self._lookup[1:] - results, missing, deferred = triple + txn_id = b'123abc-easy-as' + read_options = self._call_fut(False, txn_id) + expected = datastore_pb2.ReadOptions(transaction=txn_id) + self.assertEqual(read_options, expected) - entity_results_found = [ - mock.Mock(entity=result, spec=['entity']) for result in results] - entity_results_missing = [ - mock.Mock(entity=missing_entity, spec=['entity']) - for missing_entity in missing] - return mock.Mock( - found=entity_results_found, - missing=entity_results_missing, - deferred=deferred, - spec=['found', 'missing', 'deferred']) + def test_default_wo_transaction(self): + from google.cloud.proto.datastore.v1 import datastore_pb2 + + read_options = self._call_fut(False, None) + expected = datastore_pb2.ReadOptions() + self.assertEqual(read_options, expected) class _NoCommitBatch(object): @@ -1139,7 +1173,26 @@ def _make_commit_response(*keys): return datastore_pb2.CommitResponse(mutation_results=mutation_results) -def _make_datastore_api(*keys): +def _make_lookup_response(results=(), missing=(), deferred=()): + entity_results_found = [ + mock.Mock(entity=result, spec=['entity']) for result in results] + entity_results_missing = [ + mock.Mock(entity=missing_entity, spec=['entity']) + for missing_entity in missing] + return mock.Mock( + found=entity_results_found, + missing=entity_results_missing, + deferred=deferred, + spec=['found', 'missing', 'deferred']) + + +def _make_datastore_api(*keys, **kwargs): commit_method = mock.Mock( return_value=_make_commit_response(*keys), spec=[]) - return mock.Mock(commit=commit_method, spec=['commit']) + lookup_response = kwargs.pop( + 'lookup_response', _make_lookup_response()) + lookup_method = mock.Mock( + return_value=lookup_response, spec=[]) + return mock.Mock( + commit=commit_method, lookup=lookup_method, + spec=['commit', 'lookup']) From 1a04344b677896aa1aa7c0bbf6a7d52e70e2256b Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 15 Mar 2017 14:19:49 -0700 Subject: [PATCH 102/611] Adding (back) support for datastore emulator. --- .../google/cloud/datastore/_gax.py | 13 ++++++-- .../unit_tests/test__gax.py | 33 ++++++++++++++++--- 2 files changed, 39 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py index 4f075330d3e5..1d1f96a432e4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py @@ -22,6 +22,7 @@ from google.gax.errors import GaxError from google.gax.grpc import exc_to_code from google.gax.utils import metrics +from grpc import insecure_channel from grpc import StatusCode import six @@ -131,8 +132,14 @@ def make_datastore_api(client): :rtype: :class:`.datastore.v1.datastore_client.DatastoreClient` :returns: A datastore API instance with the proper credentials. """ - channel = make_secure_channel( - client._credentials, DEFAULT_USER_AGENT, - datastore_client.DatastoreClient.SERVICE_ADDRESS) + parse_result = six.moves.urllib_parse.urlparse( + client._base_url) + host = parse_result.netloc + if parse_result.scheme == 'https': + channel = make_secure_channel( + client._credentials, DEFAULT_USER_AGENT, host) + else: + channel = insecure_channel(host) + return GAPICDatastoreAPI( channel=channel, lib_name='gccl', lib_version=__version__) diff --git a/packages/google-cloud-datastore/unit_tests/test__gax.py b/packages/google-cloud-datastore/unit_tests/test__gax.py index bb7dd6ac1773..8ab1bcc6d1ac 100644 --- a/packages/google-cloud-datastore/unit_tests/test__gax.py +++ b/packages/google-cloud-datastore/unit_tests/test__gax.py @@ -158,19 +158,44 @@ def _call_fut(self, client): return_value=mock.sentinel.ds_client) @mock.patch('google.cloud.datastore._gax.make_secure_channel', return_value=mock.sentinel.channel) - def test_it(self, make_chan, mock_klass): + def test_live_api(self, make_chan, mock_klass): from google.cloud.gapic.datastore.v1 import datastore_client from google.cloud._http import DEFAULT_USER_AGENT from google.cloud.datastore import __version__ + host = datastore_client.DatastoreClient.SERVICE_ADDRESS + base_url = 'https://' + host client = mock.Mock( - _credentials=mock.sentinel.credentials, spec=['_credentials']) + _base_url=base_url, + _credentials=mock.sentinel.credentials, + spec=['_base_url', '_credentials']) ds_api = self._call_fut(client) self.assertIs(ds_api, mock.sentinel.ds_client) make_chan.assert_called_once_with( - mock.sentinel.credentials, DEFAULT_USER_AGENT, - datastore_client.DatastoreClient.SERVICE_ADDRESS) + mock.sentinel.credentials, DEFAULT_USER_AGENT, host) + mock_klass.assert_called_once_with( + channel=mock.sentinel.channel, lib_name='gccl', + lib_version=__version__) + + @mock.patch( + 'google.cloud.datastore._gax.GAPICDatastoreAPI', + return_value=mock.sentinel.ds_client) + @mock.patch('google.cloud.datastore._gax.insecure_channel', + return_value=mock.sentinel.channel) + def test_emulator(self, make_chan, mock_klass): + from google.cloud.datastore import __version__ + + host = 'localhost:8901' + base_url = 'http://' + host + client = mock.Mock( + _base_url=base_url, + _credentials=mock.sentinel.credentials, + spec=['_base_url', '_credentials']) + ds_api = self._call_fut(client) + self.assertIs(ds_api, mock.sentinel.ds_client) + + make_chan.assert_called_once_with(host) mock_klass.assert_called_once_with( channel=mock.sentinel.channel, lib_name='gccl', lib_version=__version__) From 4bab27752aaffc84e0a3e8a87664386cfb8b16c5 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 17 Mar 2017 11:14:35 -0700 Subject: [PATCH 103/611] Restoring datastore Gax exception re-mapping. Fixes #2746. --- .../google/cloud/datastore/_gax.py | 97 ++++++++++++++++ .../unit_tests/test__gax.py | 107 ++++++++++++++++++ 2 files changed, 204 insertions(+) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py index 1d1f96a432e4..d9580e92fb2b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py @@ -103,6 +103,64 @@ class GAPICDatastoreAPI(datastore_client.DatastoreClient): :param kwargs: Keyword arguments to pass to constructor. """ + def lookup(self, *args, **kwargs): + """Perform a ``lookup`` request. + + A light wrapper around the the base method from the parent class. + Intended to provide exception re-mapping (from GaxError to our + native errors). + + :type args: tuple + :param args: Positional arguments to pass to base method. + + :type kwargs: dict + :param kwargs: Keyword arguments to pass to base method. + + :rtype: :class:`.datastore_pb2.LookupResponse` + :returns: The returned protobuf response object. + """ + with _grpc_catch_rendezvous(): + return super(GAPICDatastoreAPI, self).lookup(*args, **kwargs) + + def run_query(self, *args, **kwargs): + """Perform a ``runQuery`` request. + + A light wrapper around the the base method from the parent class. + Intended to provide exception re-mapping (from GaxError to our + native errors). + + :type args: tuple + :param args: Positional arguments to pass to base method. + + :type kwargs: dict + :param kwargs: Keyword arguments to pass to base method. + + :rtype: :class:`.datastore_pb2.RunQueryResponse` + :returns: The returned protobuf response object. + """ + with _grpc_catch_rendezvous(): + return super(GAPICDatastoreAPI, self).run_query(*args, **kwargs) + + def begin_transaction(self, *args, **kwargs): + """Perform a ``beginTransaction`` request. + + A light wrapper around the the base method from the parent class. + Intended to provide exception re-mapping (from GaxError to our + native errors). + + :type args: tuple + :param args: Positional arguments to pass to base method. + + :type kwargs: dict + :param kwargs: Keyword arguments to pass to base method. + + :rtype: :class:`.datastore_pb2.BeginTransactionResponse` + :returns: The returned protobuf response object. + """ + with _grpc_catch_rendezvous(): + return super(GAPICDatastoreAPI, self).begin_transaction( + *args, **kwargs) + def commit(self, *args, **kwargs): """Perform a ``commit`` request. @@ -122,6 +180,45 @@ def commit(self, *args, **kwargs): with _grpc_catch_rendezvous(): return super(GAPICDatastoreAPI, self).commit(*args, **kwargs) + def rollback(self, *args, **kwargs): + """Perform a ``rollback`` request. + + A light wrapper around the the base method from the parent class. + Intended to provide exception re-mapping (from GaxError to our + native errors). + + :type args: tuple + :param args: Positional arguments to pass to base method. + + :type kwargs: dict + :param kwargs: Keyword arguments to pass to base method. + + :rtype: :class:`.datastore_pb2.RollbackResponse` + :returns: The returned protobuf response object. + """ + with _grpc_catch_rendezvous(): + return super(GAPICDatastoreAPI, self).rollback(*args, **kwargs) + + def allocate_ids(self, *args, **kwargs): + """Perform an ``allocateIds`` request. + + A light wrapper around the the base method from the parent class. + Intended to provide exception re-mapping (from GaxError to our + native errors). + + :type args: tuple + :param args: Positional arguments to pass to base method. + + :type kwargs: dict + :param kwargs: Keyword arguments to pass to base method. + + :rtype: :class:`.datastore_pb2.AllocateIdsResponse` + :returns: The returned protobuf response object. + """ + with _grpc_catch_rendezvous(): + return super(GAPICDatastoreAPI, self).allocate_ids( + *args, **kwargs) + def make_datastore_api(client): """Create an instance of the GAPIC Datastore API. diff --git a/packages/google-cloud-datastore/unit_tests/test__gax.py b/packages/google-cloud-datastore/unit_tests/test__gax.py index 8ab1bcc6d1ac..39e47fb8b068 100644 --- a/packages/google-cloud-datastore/unit_tests/test__gax.py +++ b/packages/google-cloud-datastore/unit_tests/test__gax.py @@ -124,6 +124,69 @@ def _get_target_class(): def _make_one(self, *args, **kwargs): return self._get_target_class()(*args, **kwargs) + def test_lookup(self): + from google.cloud.gapic.datastore.v1 import datastore_client + + patch1 = mock.patch.object( + datastore_client.DatastoreClient, '__init__', + return_value=None) + patch2 = mock.patch.object(datastore_client.DatastoreClient, 'lookup') + patch3 = mock.patch( + 'google.cloud.datastore._gax._grpc_catch_rendezvous') + + with patch1 as mock_constructor: + ds_api = self._make_one() + mock_constructor.assert_called_once_with() + with patch2 as mock_lookup: + with patch3 as mock_catch_rendezvous: + mock_catch_rendezvous.assert_not_called() + ds_api.lookup(None, True, bb='cc') + mock_lookup.assert_called_once_with(None, True, bb='cc') + mock_catch_rendezvous.assert_called_once_with() + + def test_run_query(self): + from google.cloud.gapic.datastore.v1 import datastore_client + + patch1 = mock.patch.object( + datastore_client.DatastoreClient, '__init__', + return_value=None) + patch2 = mock.patch.object( + datastore_client.DatastoreClient, 'run_query') + patch3 = mock.patch( + 'google.cloud.datastore._gax._grpc_catch_rendezvous') + + with patch1 as mock_constructor: + ds_api = self._make_one() + mock_constructor.assert_called_once_with() + with patch2 as mock_run_query: + with patch3 as mock_catch_rendezvous: + mock_catch_rendezvous.assert_not_called() + ds_api.run_query('47a', none=None) + mock_run_query.assert_called_once_with('47a', none=None) + mock_catch_rendezvous.assert_called_once_with() + + def test_begin_transaction(self): + from google.cloud.gapic.datastore.v1 import datastore_client + + patch1 = mock.patch.object( + datastore_client.DatastoreClient, '__init__', + return_value=None) + patch2 = mock.patch.object( + datastore_client.DatastoreClient, 'begin_transaction') + patch3 = mock.patch( + 'google.cloud.datastore._gax._grpc_catch_rendezvous') + + with patch1 as mock_constructor: + ds_api = self._make_one() + mock_constructor.assert_called_once_with() + with patch2 as mock_begin_transaction: + with patch3 as mock_catch_rendezvous: + mock_catch_rendezvous.assert_not_called() + ds_api.begin_transaction('a', 'b', [], key='kei') + mock_begin_transaction.assert_called_once_with( + 'a', 'b', [], key='kei') + mock_catch_rendezvous.assert_called_once_with() + def test_commit(self): from google.cloud.gapic.datastore.v1 import datastore_client @@ -144,6 +207,50 @@ def test_commit(self): mock_commit.assert_called_once_with(1, 2, a=3) mock_catch_rendezvous.assert_called_once_with() + def test_rollback(self): + from google.cloud.gapic.datastore.v1 import datastore_client + + patch1 = mock.patch.object( + datastore_client.DatastoreClient, '__init__', + return_value=None) + patch2 = mock.patch.object( + datastore_client.DatastoreClient, 'rollback') + patch3 = mock.patch( + 'google.cloud.datastore._gax._grpc_catch_rendezvous') + + with patch1 as mock_constructor: + ds_api = self._make_one() + mock_constructor.assert_called_once_with() + with patch2 as mock_rollback: + with patch3 as mock_catch_rendezvous: + mock_catch_rendezvous.assert_not_called() + ds_api.rollback(11, 12, arp='marp') + mock_rollback.assert_called_once_with(11, 12, arp='marp') + mock_catch_rendezvous.assert_called_once_with() + + def test_allocate_ids(self): + from google.cloud.gapic.datastore.v1 import datastore_client + + patch1 = mock.patch.object( + datastore_client.DatastoreClient, '__init__', + return_value=None) + patch2 = mock.patch.object( + datastore_client.DatastoreClient, 'allocate_ids') + patch3 = mock.patch( + 'google.cloud.datastore._gax._grpc_catch_rendezvous') + + with patch1 as mock_constructor: + ds_api = self._make_one() + mock_constructor.assert_called_once_with() + with patch2 as mock_allocate_ids: + with patch3 as mock_catch_rendezvous: + mock_catch_rendezvous.assert_not_called() + ds_api.allocate_ids( + 'hey', 'bai', bye=(47, 4), shy={'a': 4}) + mock_allocate_ids.assert_called_once_with( + 'hey', 'bai', bye=(47, 4), shy={'a': 4}) + mock_catch_rendezvous.assert_called_once_with() + @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') class Test_make_datastore_api(unittest.TestCase): From 2a1c8646596690ca4b08ac466dd19ff2dddb12dc Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 17 Mar 2017 16:17:35 -0700 Subject: [PATCH 104/611] Removing unused code-path for remapping raw gRPC exceptions in datastore. --- .../google/cloud/datastore/_gax.py | 24 +++------ .../unit_tests/test__gax.py | 50 ++++--------------- 2 files changed, 18 insertions(+), 56 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py index d9580e92fb2b..e1d0e57a7737 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py @@ -60,8 +60,8 @@ @contextlib.contextmanager -def _grpc_catch_rendezvous(): - """Remap gRPC exceptions that happen in context. +def _catch_remap_gax_error(): + """Remap GAX exceptions that happen in context. .. _code.proto: https://github.com/googleapis/googleapis/blob/\ master/google/rpc/code.proto @@ -80,14 +80,6 @@ def _grpc_catch_rendezvous(): else: new_exc = error_class(exc.cause.details()) six.reraise(error_class, new_exc, sys.exc_info()[2]) - except exceptions.GrpcRendezvous as exc: - error_code = exc.code() - error_class = _GRPC_ERROR_MAPPING.get(error_code) - if error_class is None: - raise - else: - new_exc = error_class(exc.details()) - six.reraise(error_class, new_exc, sys.exc_info()[2]) class GAPICDatastoreAPI(datastore_client.DatastoreClient): @@ -119,7 +111,7 @@ def lookup(self, *args, **kwargs): :rtype: :class:`.datastore_pb2.LookupResponse` :returns: The returned protobuf response object. """ - with _grpc_catch_rendezvous(): + with _catch_remap_gax_error(): return super(GAPICDatastoreAPI, self).lookup(*args, **kwargs) def run_query(self, *args, **kwargs): @@ -138,7 +130,7 @@ def run_query(self, *args, **kwargs): :rtype: :class:`.datastore_pb2.RunQueryResponse` :returns: The returned protobuf response object. """ - with _grpc_catch_rendezvous(): + with _catch_remap_gax_error(): return super(GAPICDatastoreAPI, self).run_query(*args, **kwargs) def begin_transaction(self, *args, **kwargs): @@ -157,7 +149,7 @@ def begin_transaction(self, *args, **kwargs): :rtype: :class:`.datastore_pb2.BeginTransactionResponse` :returns: The returned protobuf response object. """ - with _grpc_catch_rendezvous(): + with _catch_remap_gax_error(): return super(GAPICDatastoreAPI, self).begin_transaction( *args, **kwargs) @@ -177,7 +169,7 @@ def commit(self, *args, **kwargs): :rtype: :class:`.datastore_pb2.CommitResponse` :returns: The returned protobuf response object. """ - with _grpc_catch_rendezvous(): + with _catch_remap_gax_error(): return super(GAPICDatastoreAPI, self).commit(*args, **kwargs) def rollback(self, *args, **kwargs): @@ -196,7 +188,7 @@ def rollback(self, *args, **kwargs): :rtype: :class:`.datastore_pb2.RollbackResponse` :returns: The returned protobuf response object. """ - with _grpc_catch_rendezvous(): + with _catch_remap_gax_error(): return super(GAPICDatastoreAPI, self).rollback(*args, **kwargs) def allocate_ids(self, *args, **kwargs): @@ -215,7 +207,7 @@ def allocate_ids(self, *args, **kwargs): :rtype: :class:`.datastore_pb2.AllocateIdsResponse` :returns: The returned protobuf response object. """ - with _grpc_catch_rendezvous(): + with _catch_remap_gax_error(): return super(GAPICDatastoreAPI, self).allocate_ids( *args, **kwargs) diff --git a/packages/google-cloud-datastore/unit_tests/test__gax.py b/packages/google-cloud-datastore/unit_tests/test__gax.py index 39e47fb8b068..2dd7f8d0e3d5 100644 --- a/packages/google-cloud-datastore/unit_tests/test__gax.py +++ b/packages/google-cloud-datastore/unit_tests/test__gax.py @@ -20,12 +20,12 @@ @unittest.skipUnless(_HAVE_GRPC, 'No gRPC') -class Test__grpc_catch_rendezvous(unittest.TestCase): +class Test__catch_remap_gax_error(unittest.TestCase): def _call_fut(self): - from google.cloud.datastore._gax import _grpc_catch_rendezvous + from google.cloud.datastore._gax import _catch_remap_gax_error - return _grpc_catch_rendezvous() + return _catch_remap_gax_error() @staticmethod def _fake_method(exc, result=None): @@ -48,37 +48,7 @@ def test_success(self): result = self._fake_method(None, expected) self.assertIs(result, expected) - def test_failure_aborted(self): - from grpc import StatusCode - from google.cloud.exceptions import Conflict - - details = 'Bad things.' - exc = self._make_rendezvous(StatusCode.ABORTED, details) - with self.assertRaises(Conflict): - with self._call_fut(): - self._fake_method(exc) - - def test_failure_invalid_argument(self): - from grpc import StatusCode - from google.cloud.exceptions import BadRequest - - details = ('Cannot have inequality filters on multiple ' - 'properties: [created, priority]') - exc = self._make_rendezvous(StatusCode.INVALID_ARGUMENT, details) - with self.assertRaises(BadRequest): - with self._call_fut(): - self._fake_method(exc) - - def test_failure_cancelled(self): - from google.cloud.exceptions import GrpcRendezvous - from grpc import StatusCode - - exc = self._make_rendezvous(StatusCode.CANCELLED, None) - with self.assertRaises(GrpcRendezvous): - with self._call_fut(): - self._fake_method(exc) - - def test_commit_failure_non_grpc_err(self): + def test_non_grpc_err(self): exc = RuntimeError('Not a gRPC error') with self.assertRaises(RuntimeError): with self._call_fut(): @@ -132,7 +102,7 @@ def test_lookup(self): return_value=None) patch2 = mock.patch.object(datastore_client.DatastoreClient, 'lookup') patch3 = mock.patch( - 'google.cloud.datastore._gax._grpc_catch_rendezvous') + 'google.cloud.datastore._gax._catch_remap_gax_error') with patch1 as mock_constructor: ds_api = self._make_one() @@ -153,7 +123,7 @@ def test_run_query(self): patch2 = mock.patch.object( datastore_client.DatastoreClient, 'run_query') patch3 = mock.patch( - 'google.cloud.datastore._gax._grpc_catch_rendezvous') + 'google.cloud.datastore._gax._catch_remap_gax_error') with patch1 as mock_constructor: ds_api = self._make_one() @@ -174,7 +144,7 @@ def test_begin_transaction(self): patch2 = mock.patch.object( datastore_client.DatastoreClient, 'begin_transaction') patch3 = mock.patch( - 'google.cloud.datastore._gax._grpc_catch_rendezvous') + 'google.cloud.datastore._gax._catch_remap_gax_error') with patch1 as mock_constructor: ds_api = self._make_one() @@ -195,7 +165,7 @@ def test_commit(self): return_value=None) patch2 = mock.patch.object(datastore_client.DatastoreClient, 'commit') patch3 = mock.patch( - 'google.cloud.datastore._gax._grpc_catch_rendezvous') + 'google.cloud.datastore._gax._catch_remap_gax_error') with patch1 as mock_constructor: ds_api = self._make_one() @@ -216,7 +186,7 @@ def test_rollback(self): patch2 = mock.patch.object( datastore_client.DatastoreClient, 'rollback') patch3 = mock.patch( - 'google.cloud.datastore._gax._grpc_catch_rendezvous') + 'google.cloud.datastore._gax._catch_remap_gax_error') with patch1 as mock_constructor: ds_api = self._make_one() @@ -237,7 +207,7 @@ def test_allocate_ids(self): patch2 = mock.patch.object( datastore_client.DatastoreClient, 'allocate_ids') patch3 = mock.patch( - 'google.cloud.datastore._gax._grpc_catch_rendezvous') + 'google.cloud.datastore._gax._catch_remap_gax_error') with patch1 as mock_constructor: ds_api = self._make_one() From a34348a56fa8f831c42263d6ccecd2838ce1b146 Mon Sep 17 00:00:00 2001 From: daspecster Date: Wed, 22 Mar 2017 11:10:38 -0400 Subject: [PATCH 105/611] Update datastore batch API doc reference. --- packages/google-cloud-datastore/google/cloud/datastore/batch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/batch.py b/packages/google-cloud-datastore/google/cloud/datastore/batch.py index b20ba7047670..30a8aa4c67f1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/batch.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/batch.py @@ -18,7 +18,7 @@ in a single request to the Cloud Datastore API. See -https://cloud.google.com/datastore/docs/concepts/entities#Datastore_Batch_operations +https://cloud.google.com/datastore/docs/concepts/entities#batch_operations """ from google.cloud.datastore import helpers From 8d1b414edae5bd1584915732f5bd1691351f21ca Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 23 Mar 2017 14:49:26 -0700 Subject: [PATCH 106/611] CI Rehash (#3146) --- packages/google-cloud-datastore/.flake8 | 6 + packages/google-cloud-datastore/LICENSE | 202 +++++++ packages/google-cloud-datastore/MANIFEST.in | 8 +- .../google/cloud/datastore/__init__.py | 3 + .../google/cloud/datastore/client.py | 4 +- .../google/cloud/datastore/entity.py | 4 +- .../google/cloud/datastore/query.py | 6 +- .../google/cloud/datastore/transaction.py | 4 +- packages/google-cloud-datastore/nox.py | 110 ++++ packages/google-cloud-datastore/setup.py | 2 +- .../google-cloud-datastore/tests/__init__.py | 0 .../google-cloud-datastore/tests/doctests.py | 95 ++++ .../tests/system/__init__.py | 0 .../tests/system/test_system.py | 503 ++++++++++++++++++ .../tests/system/utils/__init__.py | 0 .../tests/system/utils/clear_datastore.py | 104 ++++ .../tests/system/utils/populate_datastore.py | 107 ++++ .../{unit_tests => tests/unit}/__init__.py | 0 .../{unit_tests => tests/unit}/test__gax.py | 0 .../{unit_tests => tests/unit}/test__http.py | 0 .../{unit_tests => tests/unit}/test_batch.py | 0 .../{unit_tests => tests/unit}/test_client.py | 0 .../{unit_tests => tests/unit}/test_entity.py | 0 .../unit}/test_helpers.py | 0 .../{unit_tests => tests/unit}/test_key.py | 0 .../{unit_tests => tests/unit}/test_query.py | 0 .../unit}/test_transaction.py | 0 packages/google-cloud-datastore/tox.ini | 35 -- 28 files changed, 1144 insertions(+), 49 deletions(-) create mode 100644 packages/google-cloud-datastore/.flake8 create mode 100644 packages/google-cloud-datastore/LICENSE create mode 100644 packages/google-cloud-datastore/nox.py create mode 100644 packages/google-cloud-datastore/tests/__init__.py create mode 100644 packages/google-cloud-datastore/tests/doctests.py create mode 100644 packages/google-cloud-datastore/tests/system/__init__.py create mode 100644 packages/google-cloud-datastore/tests/system/test_system.py create mode 100644 packages/google-cloud-datastore/tests/system/utils/__init__.py create mode 100644 packages/google-cloud-datastore/tests/system/utils/clear_datastore.py create mode 100644 packages/google-cloud-datastore/tests/system/utils/populate_datastore.py rename packages/google-cloud-datastore/{unit_tests => tests/unit}/__init__.py (100%) rename packages/google-cloud-datastore/{unit_tests => tests/unit}/test__gax.py (100%) rename packages/google-cloud-datastore/{unit_tests => tests/unit}/test__http.py (100%) rename packages/google-cloud-datastore/{unit_tests => tests/unit}/test_batch.py (100%) rename packages/google-cloud-datastore/{unit_tests => tests/unit}/test_client.py (100%) rename packages/google-cloud-datastore/{unit_tests => tests/unit}/test_entity.py (100%) rename packages/google-cloud-datastore/{unit_tests => tests/unit}/test_helpers.py (100%) rename packages/google-cloud-datastore/{unit_tests => tests/unit}/test_key.py (100%) rename packages/google-cloud-datastore/{unit_tests => tests/unit}/test_query.py (100%) rename packages/google-cloud-datastore/{unit_tests => tests/unit}/test_transaction.py (100%) delete mode 100644 packages/google-cloud-datastore/tox.ini diff --git a/packages/google-cloud-datastore/.flake8 b/packages/google-cloud-datastore/.flake8 new file mode 100644 index 000000000000..25168dc87605 --- /dev/null +++ b/packages/google-cloud-datastore/.flake8 @@ -0,0 +1,6 @@ +[flake8] +exclude = + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-datastore/LICENSE b/packages/google-cloud-datastore/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-datastore/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-datastore/MANIFEST.in b/packages/google-cloud-datastore/MANIFEST.in index cb3a2b9ef4fa..9f7100c9528a 100644 --- a/packages/google-cloud-datastore/MANIFEST.in +++ b/packages/google-cloud-datastore/MANIFEST.in @@ -1,4 +1,4 @@ -include README.rst -graft google -graft unit_tests -global-exclude *.pyc +include README.rst LICENSE +recursive-include google *.json *.proto +recursive-include unit_tests * +global-exclude *.pyc __pycache__ diff --git a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py index e09b2101e9ca..1cdc5db07ba3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py @@ -63,3 +63,6 @@ from google.cloud.datastore.key import Key from google.cloud.datastore.query import Query from google.cloud.datastore.transaction import Transaction + +__all__ = ['__version__', 'Batch', 'Client', 'Entity', 'Key', 'Query', + 'Transaction'] diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 2579fd85b097..ed2336a54b8a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -526,7 +526,7 @@ def do_something(entity): .. testsetup:: query-page from google.cloud import datastore - from datastore import Config # system tests + from tests.system.test_system import Config # system tests client = datastore.Client() @@ -549,7 +549,7 @@ def do_something(entity): >>> first_page = next(pages) >>> first_page_entities = list(first_page) >>> query_iter.next_page_token - '...' + b'...' :type kwargs: dict :param kwargs: Parameters for initializing and instance of diff --git a/packages/google-cloud-datastore/google/cloud/datastore/entity.py b/packages/google-cloud-datastore/google/cloud/datastore/entity.py index 51fb0c659a7a..a1708c0ca8f6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/entity.py @@ -43,7 +43,7 @@ class Entity(dict): .. testsetup:: entity-ctor from google.cloud import datastore - from datastore import Config # system tests + from tests.system.test_system import Config # system tests client = datastore.Client() key = client.key('EntityKind', 1234, namespace='_Doctest') @@ -56,7 +56,7 @@ class Entity(dict): .. doctest:: entity-ctor >>> client.get(key) - + You can the set values on the entity just like you would on any other dictionary. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index ebbe72eed52a..726e3acc4920 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -58,7 +58,7 @@ class Query(object): (Optional) The namespace to which to restrict results. If not passed, uses the client's value. - :type ancestor: :class:`google.cloud.datastore.key.Key` + :type ancestor: :class:`~google.cloud.datastore.key.Key` :param ancestor: (Optional) key of the ancestor to which this query's results are restricted. @@ -173,7 +173,7 @@ def kind(self, value): def ancestor(self): """The ancestor key for the query. - :rtype: Key or None + :rtype: :class:`~google.cloud.datastore.key.Key` or None :returns: The ancestor for the query. """ return self._ancestor @@ -182,7 +182,7 @@ def ancestor(self): def ancestor(self, value): """Set the ancestor for the query - :type value: Key + :type value: :class:`~google.cloud.datastore.key.Key` :param value: the new ancestor key """ if not isinstance(value, Key): diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index 00d4ac1d891b..6108bd80647a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -30,7 +30,7 @@ class Transaction(Batch): .. testsetup:: txn-put-multi, txn-api from google.cloud import datastore - from datastore import Config # system tests + from tests.system.test_system import Config # system tests client = datastore.Client() key1 = client.key('_Doctest') @@ -93,7 +93,7 @@ class SomeException(Exception): .. testsetup:: txn-entity-key, txn-entity-key-after, txn-manual from google.cloud import datastore - from datastore import Config # system tests + from tests.system.test_system import Config # system tests client = datastore.Client() diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py new file mode 100644 index 000000000000..f724c18e5eef --- /dev/null +++ b/packages/google-cloud-datastore/nox.py @@ -0,0 +1,110 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import os + +import nox + + +@nox.session +@nox.parametrize('python_version', ['2.7', '3.4', '3.5', '3.6']) +def unit_tests(session, python_version): + """Run the unit test suite.""" + + # Run unit tests against all supported versions of Python. + session.interpreter = 'python%s' % python_version + + # Install all test dependencies, then install this package in-place. + session.install('mock', 'pytest', 'pytest-cov', '../core/') + session.install('-e', '.') + + # Run py.test against the unit tests. + session.run('py.test', '--quiet', + '--cov=google.cloud.datastore', '--cov=tests.unit', '--cov-append', + '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', + 'tests/unit', + ) + + +@nox.session +@nox.parametrize('python_version', ['2.7', '3.6']) +def system_tests(session, python_version): + """Run the system test suite.""" + + # Sanity check: Only run system tests if the environment variable is set. + if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): + return + + # Run the system tests against latest Python 2 and Python 3 only. + session.interpreter = 'python%s' % python_version + + # Install all test dependencies, then install this package into the + # virutalenv's dist-packages. + session.install('mock', 'pytest', '../core/', '../test_utils/') + session.install('.') + + # Run py.test against the system tests. + session.run('py.test', '--quiet', 'tests/system') + + +@nox.session +def doctests(session): + """Run the system test suite.""" + + # Sanity check: Only run system tests if the environment variable is set. + if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): + return + + # Doctests run against Python 3.6 only. + # It is difficult to make doctests run against both Python 2 and Python 3 + # because they test string output equivalence, which is difficult to + # make match (e.g. unicode literals starting with "u"). + session.interpreter = 'python3.6' + + # Install all test dependencies, then install this package into the + # virutalenv's dist-packages. + session.install('mock', 'pytest', 'sphinx', + '../core/', '../test_utils/') + session.install('.') + + # Run py.test against the system tests. + session.run('py.test', '--quiet', 'tests/doctests.py') + + +@nox.session +def lint(session): + """Run flake8. + + Returns a failure if flake8 finds linting errors or sufficiently + serious code quality issues. + """ + session.interpreter = 'python3.6' + session.install('flake8') + session.install('.') + session.run('flake8', 'google/cloud/datastore') + + +@nox.session +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.interpreter = 'python3.6' + session.install('coverage', 'pytest-cov') + session.run('coverage', 'report', '--show-missing', '--fail-under=100') + session.run('coverage', 'erase') diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 625415c65ddb..b8a5ec01d109 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -64,7 +64,7 @@ 'google', 'google.cloud', ], - packages=find_packages(), + packages=find_packages(exclude=('unit_tests*',)), install_requires=REQUIREMENTS, **SETUP_BASE ) diff --git a/packages/google-cloud-datastore/tests/__init__.py b/packages/google-cloud-datastore/tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-datastore/tests/doctests.py b/packages/google-cloud-datastore/tests/doctests.py new file mode 100644 index 000000000000..5264635af03c --- /dev/null +++ b/packages/google-cloud-datastore/tests/doctests.py @@ -0,0 +1,95 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import pkgutil +import tempfile +import unittest + +import six + +import sphinx + +from google.cloud import datastore + + +SPHINX_CONF = """\ +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', +] +""" + +SPHINX_SECTION_TEMPLATE = """\ +Section %02d +=========== + +.. automodule:: google.cloud.%s + :members: + +""" + + +@unittest.skipIf(six.PY2, 'Doctests run against Python 3 only.') +class TestDoctest(unittest.TestCase): + + def _submodules(self): + pkg_iter = pkgutil.iter_modules(datastore.__path__) + result = [] + for _, mod_name, ispkg in pkg_iter: + self.assertFalse(ispkg) + result.append(mod_name) + + self.assertNotIn('__init__', result) + return result + + @staticmethod + def _add_section(index, mod_name, file_obj): + mod_part = 'datastore' + if mod_name != '__init__': + mod_part += '.' + mod_name + content = SPHINX_SECTION_TEMPLATE % (index, mod_part) + file_obj.write(content) + + def _make_temp_docs(self): + docs_dir = tempfile.mkdtemp(prefix='datastore-') + + conf_file = os.path.join(docs_dir, 'conf.py') + + with open(conf_file, 'w') as file_obj: + file_obj.write(SPHINX_CONF) + + index_file = os.path.join(docs_dir, 'contents.rst') + datastore_modules = self._submodules() + with open(index_file, 'w') as file_obj: + self._add_section(0, '__init__', file_obj) + for index, datastore_module in enumerate(datastore_modules): + self._add_section(index + 1, datastore_module, file_obj) + + return docs_dir + + def test_it(self): + from sphinx import application + + docs_dir = self._make_temp_docs() + outdir = os.path.join(docs_dir, 'doctest', 'out') + doctreedir = os.path.join(docs_dir, 'doctest', 'doctrees') + + app = application.Sphinx( + srcdir=docs_dir, confdir=docs_dir, + outdir=outdir, doctreedir=doctreedir, + buildername='doctest', warningiserror=True, parallel=1) + + app.build() + self.assertEqual(app.statuscode, 0) diff --git a/packages/google-cloud-datastore/tests/system/__init__.py b/packages/google-cloud-datastore/tests/system/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-datastore/tests/system/test_system.py b/packages/google-cloud-datastore/tests/system/test_system.py new file mode 100644 index 000000000000..64d4b86fd007 --- /dev/null +++ b/packages/google-cloud-datastore/tests/system/test_system.py @@ -0,0 +1,503 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import os +import unittest + +import httplib2 +import six + +from google.cloud._helpers import UTC +from google.cloud import datastore +from google.cloud.datastore.helpers import GeoPoint +from google.cloud.environment_vars import GCD_DATASET +from google.cloud.exceptions import Conflict + +from test_utils.system import EmulatorCreds +from test_utils.system import unique_resource_id + +from tests.system.utils import clear_datastore +from tests.system.utils import populate_datastore + + +class Config(object): + """Run-time configuration to be modified at set-up. + + This is a mutable stand-in to allow test set-up to modify + global state. + """ + CLIENT = None + TO_DELETE = [] + + +def clone_client(client): + return datastore.Client(project=client.project, + namespace=client.namespace, + credentials=client._credentials, + http=client._http) + + +def setUpModule(): + emulator_dataset = os.getenv(GCD_DATASET) + # Isolated namespace so concurrent test runs don't collide. + test_namespace = 'ns' + unique_resource_id() + if emulator_dataset is None: + Config.CLIENT = datastore.Client(namespace=test_namespace) + else: + credentials = EmulatorCreds() + http = httplib2.Http() # Un-authorized. + Config.CLIENT = datastore.Client(project=emulator_dataset, + namespace=test_namespace, + credentials=credentials, + http=http) + + +def tearDownModule(): + keys = [entity.key for entity in Config.TO_DELETE] + with Config.CLIENT.transaction(): + Config.CLIENT.delete_multi(keys) + + +class TestDatastore(unittest.TestCase): + + def setUp(self): + self.case_entities_to_delete = [] + + def tearDown(self): + with Config.CLIENT.transaction(): + keys = [entity.key for entity in self.case_entities_to_delete] + Config.CLIENT.delete_multi(keys) + + +class TestDatastoreAllocateIDs(TestDatastore): + + def test_allocate_ids(self): + num_ids = 10 + allocated_keys = Config.CLIENT.allocate_ids( + Config.CLIENT.key('Kind'), num_ids) + self.assertEqual(len(allocated_keys), num_ids) + + unique_ids = set() + for key in allocated_keys: + unique_ids.add(key.id) + self.assertIsNone(key.name) + self.assertNotEqual(key.id, None) + + self.assertEqual(len(unique_ids), num_ids) + + +class TestDatastoreSave(TestDatastore): + + @classmethod + def setUpClass(cls): + cls.PARENT = Config.CLIENT.key('Blog', 'PizzaMan') + + def _get_post(self, id_or_name=None, post_content=None): + post_content = post_content or { + 'title': u'How to make the perfect pizza in your grill', + 'tags': [u'pizza', u'grill'], + 'publishedAt': datetime.datetime(2001, 1, 1, tzinfo=UTC), + 'author': u'Silvano', + 'isDraft': False, + 'wordCount': 400, + 'rating': 5.0, + } + # Create an entity with the given content. + # NOTE: Using a parent to ensure consistency for query + # in `test_empty_kind`. + key = Config.CLIENT.key('Post', parent=self.PARENT) + entity = datastore.Entity(key=key) + entity.update(post_content) + + # Update the entity key. + if id_or_name is not None: + entity.key = entity.key.completed_key(id_or_name) + + return entity + + def _generic_test_post(self, name=None, key_id=None): + entity = self._get_post(id_or_name=(name or key_id)) + Config.CLIENT.put(entity) + + # Register entity to be deleted. + self.case_entities_to_delete.append(entity) + + if name is not None: + self.assertEqual(entity.key.name, name) + if key_id is not None: + self.assertEqual(entity.key.id, key_id) + retrieved_entity = Config.CLIENT.get(entity.key) + # Check the given and retrieved are the the same. + self.assertEqual(retrieved_entity, entity) + + def test_post_with_name(self): + self._generic_test_post(name='post1') + + def test_post_with_id(self): + self._generic_test_post(key_id=123456789) + + def test_post_with_generated_id(self): + self._generic_test_post() + + def test_save_multiple(self): + with Config.CLIENT.transaction() as xact: + entity1 = self._get_post() + xact.put(entity1) + # Register entity to be deleted. + self.case_entities_to_delete.append(entity1) + + second_post_content = { + 'title': u'How to make the perfect homemade pasta', + 'tags': [u'pasta', u'homemade'], + 'publishedAt': datetime.datetime(2001, 1, 1), + 'author': u'Silvano', + 'isDraft': False, + 'wordCount': 450, + 'rating': 4.5, + } + entity2 = self._get_post(post_content=second_post_content) + xact.put(entity2) + # Register entity to be deleted. + self.case_entities_to_delete.append(entity2) + + keys = [entity1.key, entity2.key] + matches = Config.CLIENT.get_multi(keys) + self.assertEqual(len(matches), 2) + + def test_empty_kind(self): + query = Config.CLIENT.query(kind='Post') + query.ancestor = self.PARENT + posts = list(query.fetch(limit=2)) + self.assertEqual(posts, []) + + def test_all_value_types(self): + key = Config.CLIENT.key('TestPanObject', 1234) + entity = datastore.Entity(key=key) + entity['timestamp'] = datetime.datetime(2014, 9, 9, tzinfo=UTC) + key_stored = Config.CLIENT.key('SavedKey', 'right-here') + entity['key'] = key_stored + entity['truthy'] = True + entity['float'] = 2.718281828 + entity['int'] = 3735928559 + entity['words'] = u'foo' + entity['blob'] = b'seekretz' + entity_stored = datastore.Entity(key=key_stored) + entity_stored['hi'] = 'bye' + entity['nested'] = entity_stored + entity['items'] = [1, 2, 3] + entity['geo'] = GeoPoint(1.0, 2.0) + entity['nothing_here'] = None + + # Store the entity. + self.case_entities_to_delete.append(entity) + Config.CLIENT.put(entity) + + # Check the original and retrieved are the the same. + retrieved_entity = Config.CLIENT.get(entity.key) + self.assertEqual(retrieved_entity, entity) + + +class TestDatastoreSaveKeys(TestDatastore): + + def test_save_key_self_reference(self): + parent_key = Config.CLIENT.key('Residence', 'NewYork') + key = Config.CLIENT.key('Person', 'name', parent=parent_key) + entity = datastore.Entity(key=key) + entity['fullName'] = u'Full name' + entity['linkedTo'] = key # Self reference. + + Config.CLIENT.put(entity) + self.case_entities_to_delete.append(entity) + + query = Config.CLIENT.query(kind='Person') + # Adding ancestor to ensure consistency. + query.ancestor = parent_key + query.add_filter('linkedTo', '=', key) + + stored_persons = list(query.fetch(limit=2)) + self.assertEqual(stored_persons, [entity]) + + +class TestDatastoreQuery(TestDatastore): + + @classmethod + def setUpClass(cls): + cls.CLIENT = clone_client(Config.CLIENT) + # Remove the namespace from the cloned client, since these + # query tests rely on the entities to be already stored and indexed, + # hence ``test_namespace`` set at runtime can't be used. + cls.CLIENT.namespace = None + + # In the emulator, re-populating the datastore is cheap. + if os.getenv(GCD_DATASET) is not None: + # Populate the datastore with the cloned client. + populate_datastore.add_characters(client=cls.CLIENT) + + cls.CHARACTERS = populate_datastore.CHARACTERS + # Use the client for this test instead of the global. + cls.ANCESTOR_KEY = cls.CLIENT.key(*populate_datastore.ANCESTOR) + + @classmethod + def tearDownClass(cls): + # In the emulator, destroy the query entities. + if os.getenv(GCD_DATASET) is not None: + # Use the client for this test instead of the global. + clear_datastore.remove_all_entities(client=cls.CLIENT) + + def _base_query(self): + # Use the client for this test instead of the global. + return self.CLIENT.query(kind='Character', + ancestor=self.ANCESTOR_KEY) + + def test_limit_queries(self): + limit = 5 + query = self._base_query() + + # Fetch characters. + iterator = query.fetch(limit=limit) + page = six.next(iterator.pages) + character_entities = list(page) + cursor = iterator.next_page_token + self.assertEqual(len(character_entities), limit) + + # Check cursor after fetch. + self.assertIsNotNone(cursor) + + # Fetch remaining characters. + new_character_entities = list(query.fetch(start_cursor=cursor)) + characters_remaining = len(self.CHARACTERS) - limit + self.assertEqual(len(new_character_entities), characters_remaining) + + def test_query_simple_filter(self): + query = self._base_query() + query.add_filter('appearances', '>=', 20) + expected_matches = 6 + # We expect 6, but allow the query to get 1 extra. + entities = list(query.fetch(limit=expected_matches + 1)) + self.assertEqual(len(entities), expected_matches) + + def test_query_multiple_filters(self): + query = self._base_query() + query.add_filter('appearances', '>=', 26) + query.add_filter('family', '=', 'Stark') + expected_matches = 4 + # We expect 4, but allow the query to get 1 extra. + entities = list(query.fetch(limit=expected_matches + 1)) + self.assertEqual(len(entities), expected_matches) + + def test_ancestor_query(self): + filtered_query = self._base_query() + + expected_matches = 8 + # We expect 8, but allow the query to get 1 extra. + entities = list(filtered_query.fetch(limit=expected_matches + 1)) + self.assertEqual(len(entities), expected_matches) + + def test_query_key_filter(self): + # Use the client for this test instead of the global. + rickard_key = self.CLIENT.key(*populate_datastore.RICKARD) + + query = self._base_query() + query.key_filter(rickard_key) + expected_matches = 1 + # We expect 1, but allow the query to get 1 extra. + entities = list(query.fetch(limit=expected_matches + 1)) + self.assertEqual(len(entities), expected_matches) + + def test_ordered_query(self): + query = self._base_query() + query.order = 'appearances' + expected_matches = 8 + # We expect 8, but allow the query to get 1 extra. + entities = list(query.fetch(limit=expected_matches + 1)) + self.assertEqual(len(entities), expected_matches) + + # Actually check the ordered data returned. + self.assertEqual(entities[0]['name'], self.CHARACTERS[0]['name']) + self.assertEqual(entities[7]['name'], self.CHARACTERS[3]['name']) + + def test_projection_query(self): + filtered_query = self._base_query() + filtered_query.projection = ['name', 'family'] + filtered_query.order = ['name', 'family'] + + # NOTE: There are 9 responses because of Catelyn. She has both + # Stark and Tully as her families, hence occurs twice in + # the results. + expected_matches = 9 + # We expect 9, but allow the query to get 1 extra. + entities = list(filtered_query.fetch(limit=expected_matches + 1)) + self.assertEqual(len(entities), expected_matches) + + arya_entity = entities[0] + catelyn_tully_entity = entities[3] + sansa_entity = entities[8] + + arya_dict = dict(arya_entity) + self.assertEqual(arya_dict, {'name': 'Arya', 'family': 'Stark'}) + + catelyn_stark_entity = entities[2] + catelyn_stark_dict = dict(catelyn_stark_entity) + self.assertEqual(catelyn_stark_dict, + {'name': 'Catelyn', 'family': 'Stark'}) + + catelyn_tully_dict = dict(catelyn_tully_entity) + self.assertEqual(catelyn_tully_dict, + {'name': 'Catelyn', 'family': 'Tully'}) + + # Check both Catelyn keys are the same. + self.assertEqual(catelyn_stark_entity.key, catelyn_tully_entity.key) + + sansa_dict = dict(sansa_entity) + self.assertEqual(sansa_dict, {'name': 'Sansa', 'family': 'Stark'}) + + def test_query_paginate_with_offset(self): + page_query = self._base_query() + page_query.order = 'appearances' + offset = 2 + limit = 3 + iterator = page_query.fetch(limit=limit, offset=offset) + + # Fetch characters. + page = six.next(iterator.pages) + entities = list(page) + cursor = iterator.next_page_token + self.assertEqual(len(entities), limit) + self.assertEqual(entities[0]['name'], 'Robb') + self.assertEqual(entities[1]['name'], 'Bran') + self.assertEqual(entities[2]['name'], 'Catelyn') + + # Fetch next set of characters. + new_iterator = page_query.fetch(limit=limit, offset=0, + start_cursor=cursor) + entities = list(new_iterator) + self.assertEqual(len(entities), limit) + self.assertEqual(entities[0]['name'], 'Sansa') + self.assertEqual(entities[1]['name'], 'Jon Snow') + self.assertEqual(entities[2]['name'], 'Arya') + + def test_query_paginate_with_start_cursor(self): + page_query = self._base_query() + page_query.order = 'appearances' + limit = 3 + offset = 2 + iterator = page_query.fetch(limit=limit, offset=offset) + + # Fetch characters. + page = six.next(iterator.pages) + entities = list(page) + cursor = iterator.next_page_token + self.assertEqual(len(entities), limit) + + # Use cursor to create a fresh query. + fresh_query = self._base_query() + fresh_query.order = 'appearances' + + new_entities = list(fresh_query.fetch(start_cursor=cursor, + limit=limit)) + characters_remaining = len(self.CHARACTERS) - limit - offset + self.assertEqual(len(new_entities), characters_remaining) + self.assertEqual(new_entities[0]['name'], 'Sansa') + self.assertEqual(new_entities[2]['name'], 'Arya') + + def test_query_distinct_on(self): + query = self._base_query() + query.distinct_on = ['alive'] + + expected_matches = 2 + # We expect 2, but allow the query to get 1 extra. + entities = list(query.fetch(limit=expected_matches + 1)) + self.assertEqual(len(entities), expected_matches) + + self.assertEqual(entities[0]['name'], 'Catelyn') + self.assertEqual(entities[1]['name'], 'Arya') + + +class TestDatastoreTransaction(TestDatastore): + + def test_transaction_via_with_statement(self): + entity = datastore.Entity(key=Config.CLIENT.key('Company', 'Google')) + entity['url'] = u'www.google.com' + + with Config.CLIENT.transaction() as xact: + result = Config.CLIENT.get(entity.key) + if result is None: + xact.put(entity) + self.case_entities_to_delete.append(entity) + + # This will always return after the transaction. + retrieved_entity = Config.CLIENT.get(entity.key) + self.case_entities_to_delete.append(retrieved_entity) + self.assertEqual(retrieved_entity, entity) + + def test_transaction_via_explicit_begin_get_commit(self): + # See + # github.com/GoogleCloudPlatform/google-cloud-python/issues/1859 + # Note that this example lacks the threading which provokes the race + # condition in that issue: we are basically just exercising the + # "explict" path for using transactions. + BEFORE_1 = 100 + BEFORE_2 = 0 + TRANSFER_AMOUNT = 40 + key1 = Config.CLIENT.key('account', '123') + account1 = datastore.Entity(key=key1) + account1['balance'] = BEFORE_1 + key2 = Config.CLIENT.key('account', '234') + account2 = datastore.Entity(key=key2) + account2['balance'] = BEFORE_2 + Config.CLIENT.put_multi([account1, account2]) + self.case_entities_to_delete.append(account1) + self.case_entities_to_delete.append(account2) + + xact = Config.CLIENT.transaction() + xact.begin() + from_account = Config.CLIENT.get(key1, transaction=xact) + to_account = Config.CLIENT.get(key2, transaction=xact) + from_account['balance'] -= TRANSFER_AMOUNT + to_account['balance'] += TRANSFER_AMOUNT + + xact.put(from_account) + xact.put(to_account) + xact.commit() + + after1 = Config.CLIENT.get(key1) + after2 = Config.CLIENT.get(key2) + self.assertEqual(after1['balance'], BEFORE_1 - TRANSFER_AMOUNT) + self.assertEqual(after2['balance'], BEFORE_2 + TRANSFER_AMOUNT) + + def test_failure_with_contention(self): + contention_prop_name = 'baz' + local_client = clone_client(Config.CLIENT) + + # Insert an entity which will be retrieved in a transaction + # and updated outside it with a contentious value. + key = local_client.key('BreakTxn', 1234) + orig_entity = datastore.Entity(key=key) + orig_entity['foo'] = u'bar' + local_client.put(orig_entity) + self.case_entities_to_delete.append(orig_entity) + + with self.assertRaises(Conflict): + with local_client.transaction() as txn: + entity_in_txn = local_client.get(key) + + # Update the original entity outside the transaction. + orig_entity[contention_prop_name] = u'outside' + Config.CLIENT.put(orig_entity) + + # Try to update the entity which we already updated outside the + # transaction. + entity_in_txn[contention_prop_name] = u'inside' + txn.put(entity_in_txn) diff --git a/packages/google-cloud-datastore/tests/system/utils/__init__.py b/packages/google-cloud-datastore/tests/system/utils/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py new file mode 100644 index 000000000000..9dc24a49dc28 --- /dev/null +++ b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py @@ -0,0 +1,104 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Script to populate datastore with system test data.""" + +from __future__ import print_function + +import os + +import six + +from google.cloud import datastore + + +FETCH_MAX = 20 +ALL_KINDS = ( + 'Character', + 'Company', + 'Kind', + 'Person', + 'Post', +) +TRANSACTION_MAX_GROUPS = 5 + + +def print_func(message): + if os.getenv('GOOGLE_CLOUD_NO_PRINT') != 'true': + print(message) + + +def fetch_keys(kind, client, fetch_max=FETCH_MAX, query=None, cursor=None): + if query is None: + query = client.query(kind=kind) + query.keys_only() + + iterator = query.fetch(limit=fetch_max, start_cursor=cursor) + page = six.next(iterator.pages) + return query, list(page), iterator.next_page_token + + +def get_ancestors(entities): + # NOTE: A key will always have at least one path element. + key_roots = [entity.key.flat_path[:2] for entity in entities] + # Return the unique roots. + return list(set(key_roots)) + + +def remove_kind(kind, client): + results = [] + + query, curr_results, cursor = fetch_keys(kind, client) + results.extend(curr_results) + while curr_results: + query, curr_results, cursor = fetch_keys( + kind, client, query=query, cursor=cursor) + results.extend(curr_results) + + if not results: + return + + delete_outside_transaction = False + with client.transaction(): + # Now that we have all results, we seek to delete. + print_func('Deleting keys:') + print_func(results) + + ancestors = get_ancestors(results) + if len(ancestors) > TRANSACTION_MAX_GROUPS: + delete_outside_transaction = True + else: + client.delete_multi([result.key for result in results]) + + if delete_outside_transaction: + client.delete_multi([result.key for result in results]) + + +def remove_all_entities(client=None): + if client is None: + # Get a client that uses the test dataset. + client = datastore.Client() + for kind in ALL_KINDS: + remove_kind(kind, client) + + +if __name__ == '__main__': + print_func('This command will remove all entities for ' + 'the following kinds:') + print_func('\n'.join('- ' + val for val in ALL_KINDS)) + response = six.moves.input('Is this OK [y/n]? ') + if response.lower() == 'y': + remove_all_entities() + else: + print_func('Doing nothing.') diff --git a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py new file mode 100644 index 000000000000..bb43ae315473 --- /dev/null +++ b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py @@ -0,0 +1,107 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Script to populate datastore with system test data.""" + + +from __future__ import print_function + +import os + +import six + +from google.cloud import datastore + + +ANCESTOR = ('Book', 'GoT') +RICKARD = ANCESTOR + ('Character', 'Rickard') +EDDARD = RICKARD + ('Character', 'Eddard') +KEY_PATHS = ( + RICKARD, + EDDARD, + ANCESTOR + ('Character', 'Catelyn'), + EDDARD + ('Character', 'Arya'), + EDDARD + ('Character', 'Sansa'), + EDDARD + ('Character', 'Robb'), + EDDARD + ('Character', 'Bran'), + EDDARD + ('Character', 'Jon Snow'), +) +CHARACTERS = ( + { + 'name': u'Rickard', + 'family': u'Stark', + 'appearances': 0, + 'alive': False, + }, { + 'name': u'Eddard', + 'family': u'Stark', + 'appearances': 9, + 'alive': False, + }, { + 'name': u'Catelyn', + 'family': [u'Stark', u'Tully'], + 'appearances': 26, + 'alive': False, + }, { + 'name': u'Arya', + 'family': u'Stark', + 'appearances': 33, + 'alive': True, + }, { + 'name': u'Sansa', + 'family': u'Stark', + 'appearances': 31, + 'alive': True, + }, { + 'name': u'Robb', + 'family': u'Stark', + 'appearances': 22, + 'alive': False, + }, { + 'name': u'Bran', + 'family': u'Stark', + 'appearances': 25, + 'alive': True, + }, { + 'name': u'Jon Snow', + 'family': u'Stark', + 'appearances': 32, + 'alive': True, + }, +) + + +def print_func(message): + if os.getenv('GOOGLE_CLOUD_NO_PRINT') != 'true': + print(message) + + +def add_characters(client=None): + if client is None: + # Get a client that uses the test dataset. + client = datastore.Client() + with client.transaction() as xact: + for key_path, character in six.moves.zip(KEY_PATHS, CHARACTERS): + if key_path[-1] != character['name']: + raise ValueError(('Character and key don\'t agree', + key_path, character)) + entity = datastore.Entity(key=client.key(*key_path)) + entity.update(character) + xact.put(entity) + print_func('Adding Character %s %s' % (character['name'], + character['family'])) + + +if __name__ == '__main__': + add_characters() diff --git a/packages/google-cloud-datastore/unit_tests/__init__.py b/packages/google-cloud-datastore/tests/unit/__init__.py similarity index 100% rename from packages/google-cloud-datastore/unit_tests/__init__.py rename to packages/google-cloud-datastore/tests/unit/__init__.py diff --git a/packages/google-cloud-datastore/unit_tests/test__gax.py b/packages/google-cloud-datastore/tests/unit/test__gax.py similarity index 100% rename from packages/google-cloud-datastore/unit_tests/test__gax.py rename to packages/google-cloud-datastore/tests/unit/test__gax.py diff --git a/packages/google-cloud-datastore/unit_tests/test__http.py b/packages/google-cloud-datastore/tests/unit/test__http.py similarity index 100% rename from packages/google-cloud-datastore/unit_tests/test__http.py rename to packages/google-cloud-datastore/tests/unit/test__http.py diff --git a/packages/google-cloud-datastore/unit_tests/test_batch.py b/packages/google-cloud-datastore/tests/unit/test_batch.py similarity index 100% rename from packages/google-cloud-datastore/unit_tests/test_batch.py rename to packages/google-cloud-datastore/tests/unit/test_batch.py diff --git a/packages/google-cloud-datastore/unit_tests/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py similarity index 100% rename from packages/google-cloud-datastore/unit_tests/test_client.py rename to packages/google-cloud-datastore/tests/unit/test_client.py diff --git a/packages/google-cloud-datastore/unit_tests/test_entity.py b/packages/google-cloud-datastore/tests/unit/test_entity.py similarity index 100% rename from packages/google-cloud-datastore/unit_tests/test_entity.py rename to packages/google-cloud-datastore/tests/unit/test_entity.py diff --git a/packages/google-cloud-datastore/unit_tests/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py similarity index 100% rename from packages/google-cloud-datastore/unit_tests/test_helpers.py rename to packages/google-cloud-datastore/tests/unit/test_helpers.py diff --git a/packages/google-cloud-datastore/unit_tests/test_key.py b/packages/google-cloud-datastore/tests/unit/test_key.py similarity index 100% rename from packages/google-cloud-datastore/unit_tests/test_key.py rename to packages/google-cloud-datastore/tests/unit/test_key.py diff --git a/packages/google-cloud-datastore/unit_tests/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py similarity index 100% rename from packages/google-cloud-datastore/unit_tests/test_query.py rename to packages/google-cloud-datastore/tests/unit/test_query.py diff --git a/packages/google-cloud-datastore/unit_tests/test_transaction.py b/packages/google-cloud-datastore/tests/unit/test_transaction.py similarity index 100% rename from packages/google-cloud-datastore/unit_tests/test_transaction.py rename to packages/google-cloud-datastore/tests/unit/test_transaction.py diff --git a/packages/google-cloud-datastore/tox.ini b/packages/google-cloud-datastore/tox.ini deleted file mode 100644 index 76ffb1f2a8b5..000000000000 --- a/packages/google-cloud-datastore/tox.ini +++ /dev/null @@ -1,35 +0,0 @@ -[tox] -envlist = - py27,py34,py35,cover - -[testing] -localdeps = - pip install --quiet --upgrade {toxinidir}/../core -deps = - {toxinidir}/../core - mock - pytest -covercmd = - py.test --quiet \ - --cov=google.cloud.datastore \ - --cov=unit_tests \ - --cov-config {toxinidir}/.coveragerc \ - unit_tests - -[testenv] -commands = - {[testing]localdeps} - py.test --quiet {posargs} unit_tests -deps = - {[testing]deps} - -[testenv:cover] -basepython = - python2.7 -commands = - {[testing]localdeps} - {[testing]covercmd} -deps = - {[testenv]deps} - coverage - pytest-cov From bb6d62e4d2d9c60d051ea09cd89c7efb87f8264e Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 27 Mar 2017 10:20:16 -0700 Subject: [PATCH 107/611] Fixing up some format strings in nox configs. Using `STRING_TEMPLATE % VARIABLE` can introduce hard-to-find bugs if `VARIABLE` is expected to be a string but ends up being a tuple. Instead of using percent formatting, just using `.format`. Also making tweaks to `get_target_packages` to make some path manipulation / checks OS-independent. --- packages/google-cloud-datastore/nox.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py index f724c18e5eef..bc380656bfc3 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/nox.py @@ -25,7 +25,7 @@ def unit_tests(session, python_version): """Run the unit test suite.""" # Run unit tests against all supported versions of Python. - session.interpreter = 'python%s' % python_version + session.interpreter = 'python{}'.format(python_version) # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', '../core/') @@ -49,7 +49,7 @@ def system_tests(session, python_version): return # Run the system tests against latest Python 2 and Python 3 only. - session.interpreter = 'python%s' % python_version + session.interpreter = 'python{}'.format(python_version) # Install all test dependencies, then install this package into the # virutalenv's dist-packages. From 3b75ae2fb1f82ea57f703ef0d70f83112b3330a8 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 30 Mar 2017 08:43:22 -0700 Subject: [PATCH 108/611] Renaming http argument(s) as _http. (#3235) --- .../google/cloud/datastore/client.py | 44 ++++++++++--------- .../tests/system/test_system.py | 4 +- .../tests/unit/test_client.py | 42 +++++++++--------- 3 files changed, 47 insertions(+), 43 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index ed2336a54b8a..af7d6d4f9113 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -45,7 +45,7 @@ _DATASTORE_BASE_URL = 'https://datastore.googleapis.com' """Datastore API request URL base.""" -_USE_GAX = _HAVE_GRPC and not os.getenv(DISABLE_GRPC, False) +_USE_GRPC = _HAVE_GRPC and not os.getenv(DISABLE_GRPC, False) def _get_gcd_project(): @@ -173,38 +173,42 @@ class Client(ClientWithProject): :type credentials: :class:`~google.auth.credentials.Credentials` :param credentials: (Optional) The OAuth2 Credentials to use for this - client. If not passed (and if no ``http`` object is + client. If not passed (and if no ``_http`` object is passed), falls back to the default inferred from the environment. - :type http: :class:`~httplib2.Http` - :param http: (Optional) HTTP object to make requests. Can be any object - that defines ``request()`` with the same interface as - :meth:`~httplib2.Http.request`. If not passed, an - ``http`` object is created that is bound to the - ``credentials`` for the current object. - - :type use_gax: bool - :param use_gax: (Optional) Explicitly specifies whether - to use the gRPC transport (via GAX) or HTTP. If unset, - falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` environment - variable. + :type _http: :class:`~httplib2.Http` + :param _http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an + ``_http`` object is created that is bound to the + ``credentials`` for the current object. + This parameter should be considered private, and could + change in the future. + + :type _use_grpc: bool + :param _use_grpc: (Optional) Explicitly specifies whether + to use the gRPC transport (via GAX) or HTTP. If unset, + falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` + environment variable. + This parameter should be considered private, and could + change in the future. """ SCOPE = ('https://www.googleapis.com/auth/datastore',) """The scopes required for authenticating as a Cloud Datastore consumer.""" def __init__(self, project=None, namespace=None, - credentials=None, http=None, use_gax=None): + credentials=None, _http=None, _use_grpc=None): super(Client, self).__init__( - project=project, credentials=credentials, http=http) + project=project, credentials=credentials, _http=_http) self.namespace = namespace self._batch_stack = _LocalStack() self._datastore_api_internal = None - if use_gax is None: - self._use_gax = _USE_GAX + if _use_grpc is None: + self._use_grpc = _USE_GRPC else: - self._use_gax = use_gax + self._use_grpc = _use_grpc try: host = os.environ[GCD_HOST] self._base_url = 'http://' + host @@ -220,7 +224,7 @@ def _determine_default(project): def _datastore_api(self): """Getter for a wrapped API object.""" if self._datastore_api_internal is None: - if self._use_gax: + if self._use_grpc: self._datastore_api_internal = make_datastore_api(self) else: self._datastore_api_internal = HTTPDatastoreAPI(self) diff --git a/packages/google-cloud-datastore/tests/system/test_system.py b/packages/google-cloud-datastore/tests/system/test_system.py index 64d4b86fd007..129018748e08 100644 --- a/packages/google-cloud-datastore/tests/system/test_system.py +++ b/packages/google-cloud-datastore/tests/system/test_system.py @@ -46,7 +46,7 @@ def clone_client(client): return datastore.Client(project=client.project, namespace=client.namespace, credentials=client._credentials, - http=client._http) + _http=client._http) def setUpModule(): @@ -61,7 +61,7 @@ def setUpModule(): Config.CLIENT = datastore.Client(project=emulator_dataset, namespace=test_namespace, credentials=credentials, - http=http) + _http=http) def tearDownModule(): diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 8f4197c51419..9824e06b73ad 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -127,12 +127,12 @@ def _get_target_class(): return Client def _make_one(self, project=PROJECT, namespace=None, - credentials=None, http=None, use_gax=None): + credentials=None, _http=None, _use_grpc=None): return self._get_target_class()(project=project, namespace=namespace, credentials=credentials, - http=http, - use_gax=use_gax) + _http=_http, + _use_grpc=_use_grpc) def test_constructor_w_project_no_environ(self): # Some environments (e.g. AppVeyor CI) run in GCE, so @@ -186,7 +186,7 @@ def test_constructor_w_explicit_inputs(self): client = self._make_one(project=other, namespace=namespace, credentials=creds, - http=http) + _http=http) self.assertEqual(client.project, other) self.assertEqual(client.namespace, namespace) self.assertIs(client._credentials, creds) @@ -195,32 +195,32 @@ def test_constructor_w_explicit_inputs(self): self.assertEqual(list(client._batch_stack), []) self.assertEqual(client._base_url, _DATASTORE_BASE_URL) - def test_constructor_use_gax_default(self): + def test_constructor_use_grpc_default(self): import google.cloud.datastore.client as MUT project = 'PROJECT' creds = _make_credentials() http = object() - with mock.patch.object(MUT, '_USE_GAX', new=True): + with mock.patch.object(MUT, '_USE_GRPC', new=True): client1 = self._make_one( - project=project, credentials=creds, http=http) - self.assertTrue(client1._use_gax) + project=project, credentials=creds, _http=http) + self.assertTrue(client1._use_grpc) # Explicitly over-ride the environment. client2 = self._make_one( - project=project, credentials=creds, http=http, - use_gax=False) - self.assertFalse(client2._use_gax) + project=project, credentials=creds, _http=http, + _use_grpc=False) + self.assertFalse(client2._use_grpc) - with mock.patch.object(MUT, '_USE_GAX', new=False): + with mock.patch.object(MUT, '_USE_GRPC', new=False): client3 = self._make_one( - project=project, credentials=creds, http=http) - self.assertFalse(client3._use_gax) + project=project, credentials=creds, _http=http) + self.assertFalse(client3._use_grpc) # Explicitly over-ride the environment. client4 = self._make_one( - project=project, credentials=creds, http=http, - use_gax=True) - self.assertTrue(client4._use_gax) + project=project, credentials=creds, _http=http, + _use_grpc=True) + self.assertTrue(client4._use_grpc) def test_constructor_gcd_host(self): from google.cloud.environment_vars import GCD_HOST @@ -233,13 +233,13 @@ def test_constructor_gcd_host(self): with mock.patch('os.environ', new=fake_environ): client = self._make_one( - project=project, credentials=creds, http=http) + project=project, credentials=creds, _http=http) self.assertEqual(client._base_url, 'http://' + host) def test__datastore_api_property_gax(self): client = self._make_one( project='prahj-ekt', credentials=_make_credentials(), - http=object(), use_gax=True) + _http=object(), _use_grpc=True) self.assertIsNone(client._datastore_api_internal) patch = mock.patch( @@ -262,7 +262,7 @@ def test__datastore_api_property_http(self): client = self._make_one( project='prahj-ekt', credentials=_make_credentials(), - http=object(), use_gax=False) + _http=object(), _use_grpc=False) self.assertIsNone(client._datastore_api_internal) ds_api = client._datastore_api @@ -816,7 +816,7 @@ def test_allocate_ids_w_partial_key(self): incomplete_key._id = None creds = _make_credentials() - client = self._make_one(credentials=creds, use_gax=False) + client = self._make_one(credentials=creds, _use_grpc=False) allocated = mock.Mock( keys=[_KeyPB(i) for i in range(num_ids)], spec=['keys']) alloc_ids = mock.Mock(return_value=allocated, spec=[]) From 16a9f8473ed0e9b8b80f9db29f9cd2ff37d004d4 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 30 Mar 2017 14:45:10 -0700 Subject: [PATCH 109/611] GA and Beta Promotions (#3245) * Make clients explicitly unpickleable. Closes #3211. * Make clients explicitly unpickleable. Closes #3211. * Add GA designator, add 1.0 version numbers. * Version changes. Eep. * Oops, Speech is still alpha. * 0.24.0, not 0.24.1 * Remove double __getstate__ goof. * Version changes. Eep. * Oops, Speech is still alpha. * Remove double __getstate__ goof. * Adding 3.6 classifier where missing and fixing bad versions. Done via "git grep '0\.24'" and "git grep '0\.23'". * Fix Noxfiles forlocal packages. * Fixing copy-pasta issue in error reporting nox config. Also fixing bad indent in same file. * Depend on stable logging in error reporting package. * Fixing lint errors in error_reporting. These were masked because error_reporting's lint nox session was linting the datastore codebase. This also means that the error reporting package has gained __all__. * Fixing a syntax error in nox config for logging. Also fixing an indent error while I was in there. * Revert "Add docs for 'result_index' usage and a system test." This reverts commit b5742aa160f604ec7cd81873ad24ac9aa75e548d. * Fixing docs nox session for umbrella package. Two issues: - error_reporting came BEFORE logging (which means it would try to pull in a logging dep from PyPI that doesn't exist) - dns was NOT in the list of local packages * Updating upper bound on logging in error_reporting. * Un-revert typo fix. --- packages/google-cloud-datastore/nox.py | 14 +++++++++----- packages/google-cloud-datastore/setup.py | 7 ++++--- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py index bc380656bfc3..78a187aa51a0 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/nox.py @@ -19,6 +19,9 @@ import nox +LOCAL_DEPS = ('../core/',) + + @nox.session @nox.parametrize('python_version', ['2.7', '3.4', '3.5', '3.6']) def unit_tests(session, python_version): @@ -28,7 +31,7 @@ def unit_tests(session, python_version): session.interpreter = 'python{}'.format(python_version) # Install all test dependencies, then install this package in-place. - session.install('mock', 'pytest', 'pytest-cov', '../core/') + session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') # Run py.test against the unit tests. @@ -53,7 +56,8 @@ def system_tests(session, python_version): # Install all test dependencies, then install this package into the # virutalenv's dist-packages. - session.install('mock', 'pytest', '../core/', '../test_utils/') + session.install('mock', 'pytest', *LOCAL_DEPS) + session.install('../test_utils/') session.install('.') # Run py.test against the system tests. @@ -76,8 +80,8 @@ def doctests(session): # Install all test dependencies, then install this package into the # virutalenv's dist-packages. - session.install('mock', 'pytest', 'sphinx', - '../core/', '../test_utils/') + session.install('mock', 'pytest', 'sphinx', *LOCAL_DEPS) + session.install('../test_utils/') session.install('.') # Run py.test against the system tests. @@ -92,7 +96,7 @@ def lint(session): serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8') + session.install('flake8', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/datastore') diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index b8a5ec01d109..67bb0bf9575e 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -35,7 +35,7 @@ 'include_package_data': True, 'zip_safe': False, 'classifiers': [ - 'Development Status :: 4 - Beta', + 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', @@ -44,20 +44,21 @@ 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', 'Topic :: Internet', ], } REQUIREMENTS = [ - 'google-cloud-core >= 0.23.1, < 0.24dev', + 'google-cloud-core >= 0.24.0, < 0.25dev', 'google-gax>=0.15.7, <0.16dev', 'gapic-google-cloud-datastore-v1 >= 0.15.0, < 0.16dev', ] setup( name='google-cloud-datastore', - version='0.23.0', + version='1.0.0', description='Python Client for Google Cloud Datastore', long_description=README, namespace_packages=[ From d26822becf99e79eeaae633a309d674e6f4391d5 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 20 Apr 2017 07:49:24 -0700 Subject: [PATCH 110/611] Adding check (in datastore) that setup.py README is valid RST. (#3316) --- packages/google-cloud-datastore/nox.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py index 78a187aa51a0..7894b1ae0b23 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/nox.py @@ -101,6 +101,15 @@ def lint(session): session.run('flake8', 'google/cloud/datastore') +@nox.session +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.interpreter = 'python3.6' + session.install('docutils', 'Pygments') + session.run( + 'python', 'setup.py', 'check', '--restructuredtext', '--strict') + + @nox.session def cover(session): """Run the final coverage report. From 5bc15bb77e2a61671868a483729af03a538ce29c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 21 Apr 2017 10:03:56 -0700 Subject: [PATCH 111/611] Ignore tests (rather than unit_tests) in setup.py files. (#3319) --- packages/google-cloud-datastore/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 67bb0bf9575e..cc82802315ae 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -65,7 +65,7 @@ 'google', 'google.cloud', ], - packages=find_packages(exclude=('unit_tests*',)), + packages=find_packages(exclude=('tests*',)), install_requires=REQUIREMENTS, **SETUP_BASE ) From d4230e68bf2311b976174e19031c45e5f38f2ba6 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 9 May 2017 12:14:41 -0700 Subject: [PATCH 112/611] Add better documentation for support types in datastore Entity. (#3363) H/T to @GustavoRPS for bringing this up in #3361. Also snuck in a change in `google.cloud.datastore.helpers` to use `six.binary_type` in place of `(str, bytes)`. (It wasn't a Py3 error before because that check came **after** a `six.text_type` check.) --- .../google/cloud/datastore/entity.py | 31 +++++++++++++++++-- .../google/cloud/datastore/helpers.py | 2 +- 2 files changed, 29 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/entity.py b/packages/google-cloud-datastore/google/cloud/datastore/entity.py index a1708c0ca8f6..dc8a60b038be 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/entity.py @@ -24,9 +24,9 @@ class Entity(dict): An entity storing the actual instance of data. Each entity is officially represented with a - :class:`google.cloud.datastore.key.Key` class, however it is possible that - you might create an Entity with only a partial Key (that is, a Key - with a Kind, and possibly a parent, but without an ID). In such a + :class:`~google.cloud.datastore.key.Key`, however it is possible that + you might create an entity with only a partial key (that is, a key + with a kind, and possibly a parent, but without an ID). In such a case, the datastore service will automatically assign an ID to the partial key. @@ -66,6 +66,31 @@ class Entity(dict): >>> entity['age'] = 20 >>> entity['name'] = 'JJ' + However, not all types are allowed as a value for a Google Cloud Datastore + entity. The following basic types are supported by the API: + + * :class:`datetime.datetime` + * :class:`~google.cloud.datastore.key.Key` + * :class:`bool` + * :class:`float` + * :class:`int` (as well as :class:`long` in Python 2) + * ``unicode`` (called ``str`` in Python 3) + * ``bytes`` (called ``str`` in Python 2) + * :class:`~google.cloud.datastore.helpers.GeoPoint` + * :data:`None` + + In addition, two container types are supported: + + * :class:`list` + * :class:`~google.cloud.datastore.entity.Entity` + + Each entry in a list must be one of the value types (basic or + container) and each value in an + :class:`~google.cloud.datastore.entity.Entity` must as well. In + this case an :class:`~google.cloud.datastore.entity.Entity` **as a + container** acts as a :class:`dict`, but also has the special annotations + of ``key`` and ``exclude_from_indexes``. + And you can treat an entity like a regular Python dictionary: .. testsetup:: entity-dict diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index 36b11d0f8900..ee4537317030 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -311,7 +311,7 @@ def _pb_attr_value(val): name, value = 'integer', val elif isinstance(val, six.text_type): name, value = 'string', val - elif isinstance(val, (bytes, str)): + elif isinstance(val, six.binary_type): name, value = 'blob', val elif isinstance(val, Entity): name, value = 'entity', val From 373ebd36d49114c1e7130a6be4b38ef438ee436f Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 2 Jun 2017 14:36:29 -0700 Subject: [PATCH 113/611] Vision semi-GAPIC (#3373) --- .../google/cloud/datastore/query.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 726e3acc4920..2ab65064f85e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -63,14 +63,15 @@ class Query(object): (Optional) key of the ancestor to which this query's results are restricted. - :type filters: sequence of (property_name, operator, value) tuples - :param filters: property filters applied by this query. + :type filters: tuple[str, str, str] + :param filters: Property filters applied by this query. The sequence + is ``(property_name, operator, value)``. :type projection: sequence of string :param projection: fields returned as part of query results. :type order: sequence of string - :param order: field names used to order query results. Prepend '-' + :param order: field names used to order query results. Prepend ``-`` to a field name to sort it in descending order. :type distinct_on: sequence of string @@ -198,8 +199,9 @@ def ancestor(self): def filters(self): """Filters set on the query. - :rtype: sequence of (property_name, operator, value) tuples. - :returns: The filters set on the query. + :rtype: tuple[str, str, str] + :returns: The filters set on the query. The sequence is + ``(property_name, operator, value)``. """ return self._filters[:] From d020a479efb0d3956d50ff20a15ccfdef54026bf Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 8 Jun 2017 16:15:41 -0700 Subject: [PATCH 114/611] Adding datastore Key.(to|from)_legacy_urlsafe. (#3491) * Adding bare-minimum proto for converting legacy App Engine "Reference" pbs. * Rough draft of working implementation of datastore Key.(to|from)_legacy_urlsafe. Needs more tests but wanted to get the PR in front of reviewers ASAP. * Adding implementation for datastore Key.to_legacy_urlsafe(). Also resolved some lint issues (line too long) and restructured unit test to be able to re-use "stored" values. * Adding _onestore_v3_pb2 to ignored files for flake8. * Addressing @jonparrott feedback. In particular: - Just splitting on ~ when cleaning app strings - Rewording to_legacy_urlsafe() docstring to invoke `ndb.Key(urlsafe=...)` and to restate the "returns" text - Removing the _urlsafe_b64(decode|encode) micro-optimizations that were brought over from the ndb codebase * Adding test coverage for helpers needed for Key.(to|from)_legacy_urlsafe. * Adding LICENSE header to hand-written legacy GAE proto. * Renaming _onestore_v3.proto --> _app_engine_key.proto. --- packages/google-cloud-datastore/.coveragerc | 2 + packages/google-cloud-datastore/.flake8 | 5 + .../cloud/datastore/_app_engine_key.proto | 30 ++ .../cloud/datastore/_app_engine_key_pb2.py | 184 ++++++++++++ .../google/cloud/datastore/key.py | 205 ++++++++++++- .../tests/unit/test_key.py | 277 ++++++++++++++++++ 6 files changed, 702 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key.proto create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py diff --git a/packages/google-cloud-datastore/.coveragerc b/packages/google-cloud-datastore/.coveragerc index a54b99aa14b7..1596e4637d3f 100644 --- a/packages/google-cloud-datastore/.coveragerc +++ b/packages/google-cloud-datastore/.coveragerc @@ -2,6 +2,8 @@ branch = True [report] +omit = + _app_engine_key_pb2.py fail_under = 100 show_missing = True exclude_lines = diff --git a/packages/google-cloud-datastore/.flake8 b/packages/google-cloud-datastore/.flake8 index 25168dc87605..2feb7fefea2a 100644 --- a/packages/google-cloud-datastore/.flake8 +++ b/packages/google-cloud-datastore/.flake8 @@ -1,5 +1,10 @@ [flake8] exclude = + # Datastore includes generated code in the manual layer; + # do not lint this. + google/cloud/datastore/_app_engine_key_pb2.py, + + # Standard linting exemptions. __pycache__, .git, *.pyc, diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key.proto b/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key.proto new file mode 100644 index 000000000000..7248f1a4e4ef --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key.proto @@ -0,0 +1,30 @@ +// Copyright 2017 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +message Reference { + required string app = 13; + optional string name_space = 20; + required Path path = 14; + optional string database_id = 23; +} + +message Path { + repeated group Element = 1 { + required string type = 2; + optional int64 id = 3; + optional string name = 4; + } +} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py new file mode 100644 index 000000000000..bbb1c75b80df --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py @@ -0,0 +1,184 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: _app_engine_key.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='_app_engine_key.proto', + package='', + syntax='proto2', + serialized_pb=_b('\n\x15_app_engine_key.proto\"V\n\tReference\x12\x0b\n\x03\x61pp\x18\r \x02(\t\x12\x12\n\nname_space\x18\x14 \x01(\t\x12\x13\n\x04path\x18\x0e \x02(\x0b\x32\x05.Path\x12\x13\n\x0b\x64\x61tabase_id\x18\x17 \x01(\t\"Y\n\x04Path\x12\x1e\n\x07\x65lement\x18\x01 \x03(\n2\r.Path.Element\x1a\x31\n\x07\x45lement\x12\x0c\n\x04type\x18\x02 \x02(\t\x12\n\n\x02id\x18\x03 \x01(\x03\x12\x0c\n\x04name\x18\x04 \x01(\t') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_REFERENCE = _descriptor.Descriptor( + name='Reference', + full_name='Reference', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='app', full_name='Reference.app', index=0, + number=13, type=9, cpp_type=9, label=2, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='name_space', full_name='Reference.name_space', index=1, + number=20, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='path', full_name='Reference.path', index=2, + number=14, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='database_id', full_name='Reference.database_id', index=3, + number=23, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=25, + serialized_end=111, +) + + +_PATH_ELEMENT = _descriptor.Descriptor( + name='Element', + full_name='Path.Element', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='Path.Element.type', index=0, + number=2, type=9, cpp_type=9, label=2, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='id', full_name='Path.Element.id', index=1, + number=3, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='name', full_name='Path.Element.name', index=2, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=153, + serialized_end=202, +) + +_PATH = _descriptor.Descriptor( + name='Path', + full_name='Path', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='element', full_name='Path.element', index=0, + number=1, type=10, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_PATH_ELEMENT, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=113, + serialized_end=202, +) + +_REFERENCE.fields_by_name['path'].message_type = _PATH +_PATH_ELEMENT.containing_type = _PATH +_PATH.fields_by_name['element'].message_type = _PATH_ELEMENT +DESCRIPTOR.message_types_by_name['Reference'] = _REFERENCE +DESCRIPTOR.message_types_by_name['Path'] = _PATH + +Reference = _reflection.GeneratedProtocolMessageType('Reference', (_message.Message,), dict( + DESCRIPTOR = _REFERENCE, + __module__ = '_app_engine_key_pb2' + # @@protoc_insertion_point(class_scope:Reference) + )) +_sym_db.RegisterMessage(Reference) + +Path = _reflection.GeneratedProtocolMessageType('Path', (_message.Message,), dict( + + Element = _reflection.GeneratedProtocolMessageType('Element', (_message.Message,), dict( + DESCRIPTOR = _PATH_ELEMENT, + __module__ = '_app_engine_key_pb2' + # @@protoc_insertion_point(class_scope:Path.Element) + )) + , + DESCRIPTOR = _PATH, + __module__ = '_app_engine_key_pb2' + # @@protoc_insertion_point(class_scope:Path) + )) +_sym_db.RegisterMessage(Path) +_sym_db.RegisterMessage(Path.Element) + + +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index 5ae08c5642ca..166a5afde46b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -14,11 +14,28 @@ """Create / interact with Google Cloud Datastore keys.""" +import base64 import copy import six from google.cloud.proto.datastore.v1 import entity_pb2 as _entity_pb2 +from google.cloud._helpers import _to_bytes +from google.cloud.datastore import _app_engine_key_pb2 + + +_DATABASE_ID_TEMPLATE = ( + 'Received non-empty database ID: {!r}.\n' + 'urlsafe strings are not expected to encode a Reference that ' + 'contains a database ID.') +_BAD_ELEMENT_TEMPLATE = ( + 'At most one of ID and name can be set on an element. Received ' + 'id = {!r} and name = {!r}.') +_EMPTY_ELEMENT = ( + 'Exactly one of ID and name must be set on an element. ' + 'Encountered an element with neither set that was not the last ' + 'element of a path.') + class Key(object): """An immutable representation of a datastore Key. @@ -79,7 +96,7 @@ class Key(object): * namespace (string): A namespace identifier for the key. * project (string): The project associated with the key. - * parent (:class:`google.cloud.datastore.key.Key`): The parent of the key. + * parent (:class:`~google.cloud.datastore.key.Key`): The parent of the key. The project argument is required unless it has been set implicitly. """ @@ -281,6 +298,53 @@ def to_protobuf(self): return key + def to_legacy_urlsafe(self): + """Convert to a base64 encode urlsafe string for App Engine. + + This is intended to work with the "legacy" representation of a + datastore "Key" used within Google App Engine (a so-called + "Reference"). The returned string can be used as the ``urlsafe`` + argument to ``ndb.Key(urlsafe=...)``. + + :rtype: bytes + :returns: A bytestring containing the key encoded as URL-safe base64. + """ + reference = _app_engine_key_pb2.Reference( + app=self.project, + path=_to_legacy_path(self._path), # Avoid the copy. + name_space=self.namespace, + ) + raw_bytes = reference.SerializeToString() + return base64.urlsafe_b64encode(raw_bytes) + + @classmethod + def from_legacy_urlsafe(cls, urlsafe): + """Convert urlsafe string to :class:`~google.cloud.datastore.key.Key`. + + This is intended to work with the "legacy" representation of a + datastore "Key" used within Google App Engine (a so-called + "Reference"). This assumes that ``urlsafe`` was created within an App + Engine app via something like ``ndb.Key(...).urlsafe()``. + + :type urlsafe: bytes or unicode + :param urlsafe: The base64 encoded (ASCII) string corresponding to a + datastore "Key" / "Reference". + + :rtype: :class:`~google.cloud.datastore.key.Key`. + :returns: The key corresponding to ``urlsafe``. + """ + urlsafe = _to_bytes(urlsafe, encoding='ascii') + raw_bytes = base64.urlsafe_b64decode(urlsafe) + + reference = _app_engine_key_pb2.Reference() + reference.ParseFromString(raw_bytes) + + project = _clean_app(reference.app) + namespace = _get_empty(reference.name_space, u'') + _check_database_id(reference.database_id) + flat_path = _get_flat_path(reference.path) + return cls(*flat_path, project=project, namespace=namespace) + @property def is_partial(self): """Boolean indicating if the key has an ID (or name). @@ -427,3 +491,142 @@ def _validate_project(project, parent): raise ValueError("A Key must have a project set.") return project + + +def _clean_app(app_str): + """Clean a legacy (i.e. from App Engine) app string. + + :type app_str: str + :param app_str: The ``app`` value stored in a "Reference" pb. + + :rtype: str + :returns: The cleaned value. + """ + parts = app_str.split('~', 1) + return parts[-1] + + +def _get_empty(value, empty_value): + """Check if a protobuf field is "empty". + + :type value: object + :param value: A basic field from a protobuf. + + :type empty_value: object + :param empty_value: The "empty" value for the same type as + ``value``. + """ + if value == empty_value: + return None + else: + return value + + +def _check_database_id(database_id): + """Make sure a "Reference" database ID is empty. + + :type database_id: unicode + :param database_id: The ``database_id`` field from a "Reference" protobuf. + + :raises: :exc:`ValueError` if the ``database_id`` is not empty. + """ + if database_id != u'': + msg = _DATABASE_ID_TEMPLATE.format(database_id) + raise ValueError(msg) + + +def _add_id_or_name(flat_path, element_pb, empty_allowed): + """Add the ID or name from an element to a list. + + :type flat_path: list + :param flat_path: List of accumulated path parts. + + :type element_pb: :class:`._app_engine_key_pb2.Path.Element` + :param element_pb: The element containing ID or name. + + :type empty_allowed: bool + :param empty_allowed: Indicates if neither ID or name need be set. If + :data:`False`, then **exactly** one of them must be. + + :raises: :exc:`ValueError` if 0 or 2 of ID/name are set (unless + ``empty_allowed=True`` and 0 are set). + """ + id_ = element_pb.id + name = element_pb.name + # NOTE: Below 0 and the empty string are the "null" values for their + # respective types, indicating that the value is unset. + if id_ == 0: + if name == u'': + if not empty_allowed: + raise ValueError(_EMPTY_ELEMENT) + else: + flat_path.append(name) + else: + if name == u'': + flat_path.append(id_) + else: + msg = _BAD_ELEMENT_TEMPLATE.format(id_, name) + raise ValueError(msg) + + +def _get_flat_path(path_pb): + """Convert a legacy "Path" protobuf to a flat path. + + For example + + Element { + type: "parent" + id: 59 + } + Element { + type: "child" + name: "naem" + } + + would convert to ``('parent', 59, 'child', 'naem')``. + + :type path_pb: :class:`._app_engine_key_pb2.Path` + :param path_pb: Legacy protobuf "Path" object (from a "Reference"). + + :rtype: tuple + :returns: The path parts from ``path_pb``. + """ + num_elts = len(path_pb.element) + last_index = num_elts - 1 + + result = [] + for index, element in enumerate(path_pb.element): + result.append(element.type) + _add_id_or_name(result, element, index == last_index) + + return tuple(result) + + +def _to_legacy_path(dict_path): + """Convert a tuple of ints and strings in a legacy "Path". + + .. note: + + This assumes, but does not verify, that each entry in + ``dict_path`` is valid (i.e. doesn't have more than one + key out of "name" / "id"). + + :type dict_path: lsit + :param dict_path: The "structured" path for a key, i.e. it + is a list of dictionaries, each of which has + "kind" and one of "name" / "id" as keys. + + :rtype: :class:`._app_engine_key_pb2.Path` + :returns: The legacy path corresponding to ``dict_path``. + """ + elements = [] + for part in dict_path: + element_kwargs = {'type': part['kind']} + if 'id' in part: + element_kwargs['id'] = part['id'] + elif 'name' in part: + element_kwargs['name'] = part['name'] + element = _app_engine_key_pb2.Path.Element(**element_kwargs) + elements.append(element) + + return _app_engine_key_pb2.Path(element=elements) diff --git a/packages/google-cloud-datastore/tests/unit/test_key.py b/packages/google-cloud-datastore/tests/unit/test_key.py index 904338368c02..5b89e146254d 100644 --- a/packages/google-cloud-datastore/tests/unit/test_key.py +++ b/packages/google-cloud-datastore/tests/unit/test_key.py @@ -18,6 +18,20 @@ class TestKey(unittest.TestCase): _DEFAULT_PROJECT = 'PROJECT' + # NOTE: This comes directly from a running (in the dev appserver) + # App Engine app. Created via: + # + # from google.appengine.ext import ndb + # key = ndb.Key( + # 'Parent', 59, 'Child', 'Feather', + # namespace='space', app='s~sample-app') + # urlsafe = key.urlsafe() + _URLSAFE_EXAMPLE = ( + b'agxzfnNhbXBsZS1hcHByHgsSBlBhcmVudBg7DAsSBUNoaWxkIgdGZ' + b'WF0aGVyDKIBBXNwYWNl') + _URLSAFE_APP = 's~sample-app' + _URLSAFE_NAMESPACE = 'space' + _URLSAFE_FLAT_PATH = ('Parent', 59, 'Child', 'Feather') @staticmethod def _get_target_class(): @@ -372,6 +386,27 @@ def test_to_protobuf_w_no_kind(self): # Unset values are False-y. self.assertEqual(pb.path[0].kind, '') + def test_to_legacy_urlsafe(self): + key = self._make_one( + *self._URLSAFE_FLAT_PATH, + project=self._URLSAFE_APP, + namespace=self._URLSAFE_NAMESPACE) + # NOTE: ``key.project`` is somewhat "invalid" but that is OK. + urlsafe = key.to_legacy_urlsafe() + self.assertEqual(urlsafe, self._URLSAFE_EXAMPLE) + + def test_from_legacy_urlsafe(self): + klass = self._get_target_class() + key = klass.from_legacy_urlsafe(self._URLSAFE_EXAMPLE) + + self.assertEqual('s~' + key.project, self._URLSAFE_APP) + self.assertEqual(key.namespace, self._URLSAFE_NAMESPACE) + self.assertEqual(key.flat_path, self._URLSAFE_FLAT_PATH) + # Also make sure we didn't accidentally set the parent. + self.assertIsNone(key._parent) + self.assertIsNotNone(key.parent) + self.assertIs(key._parent, key.parent) + def test_is_partial_no_name_or_id(self): key = self._make_one('KIND', project=self._DEFAULT_PROJECT) self.assertTrue(key.is_partial) @@ -431,3 +466,245 @@ def test_parent_multiple_calls(self): self.assertEqual(parent.path, _PARENT_PATH) new_parent = key.parent self.assertIs(parent, new_parent) + + +class Test__clean_app(unittest.TestCase): + + PROJECT = 'my-prahjekt' + + @staticmethod + def _call_fut(app_str): + from google.cloud.datastore.key import _clean_app + + return _clean_app(app_str) + + def test_already_clean(self): + app_str = self.PROJECT + self.assertEqual(self._call_fut(app_str), self.PROJECT) + + def test_standard(self): + app_str = 's~' + self.PROJECT + self.assertEqual(self._call_fut(app_str), self.PROJECT) + + def test_european(self): + app_str = 'e~' + self.PROJECT + self.assertEqual(self._call_fut(app_str), self.PROJECT) + + def test_dev_server(self): + app_str = 'dev~' + self.PROJECT + self.assertEqual(self._call_fut(app_str), self.PROJECT) + + +class Test__get_empty(unittest.TestCase): + + @staticmethod + def _call_fut(value, empty_value): + from google.cloud.datastore.key import _get_empty + + return _get_empty(value, empty_value) + + def test_unset(self): + for empty_value in (u'', 0, 0.0, []): + ret_val = self._call_fut(empty_value, empty_value) + self.assertIsNone(ret_val) + + def test_actually_set(self): + value_pairs = ( + (u'hello', u''), + (10, 0), + (3.14, 0.0), + (['stuff', 'here'], []), + ) + for value, empty_value in value_pairs: + ret_val = self._call_fut(value, empty_value) + self.assertIs(ret_val, value) + + +class Test__check_database_id(unittest.TestCase): + + @staticmethod + def _call_fut(database_id): + from google.cloud.datastore.key import _check_database_id + + return _check_database_id(database_id) + + def test_empty_value(self): + ret_val = self._call_fut(u'') + # Really we are just happy there was no exception. + self.assertIsNone(ret_val) + + def test_failure(self): + with self.assertRaises(ValueError): + self._call_fut(u'some-database-id') + + +class Test__add_id_or_name(unittest.TestCase): + + @staticmethod + def _call_fut(flat_path, element_pb, empty_allowed): + from google.cloud.datastore.key import _add_id_or_name + + return _add_id_or_name(flat_path, element_pb, empty_allowed) + + def test_add_id(self): + flat_path = [] + id_ = 123 + element_pb = _make_element_pb(id=id_) + + ret_val = self._call_fut(flat_path, element_pb, False) + self.assertIsNone(ret_val) + self.assertEqual(flat_path, [id_]) + ret_val = self._call_fut(flat_path, element_pb, True) + self.assertIsNone(ret_val) + self.assertEqual(flat_path, [id_, id_]) + + def test_add_name(self): + flat_path = [] + name = 'moon-shadow' + element_pb = _make_element_pb(name=name) + + ret_val = self._call_fut(flat_path, element_pb, False) + self.assertIsNone(ret_val) + self.assertEqual(flat_path, [name]) + ret_val = self._call_fut(flat_path, element_pb, True) + self.assertIsNone(ret_val) + self.assertEqual(flat_path, [name, name]) + + def test_both_present(self): + element_pb = _make_element_pb(id=17, name='seventeen') + flat_path = [] + with self.assertRaises(ValueError): + self._call_fut(flat_path, element_pb, False) + with self.assertRaises(ValueError): + self._call_fut(flat_path, element_pb, True) + + self.assertEqual(flat_path, []) + + def test_both_empty_failure(self): + element_pb = _make_element_pb() + flat_path = [] + with self.assertRaises(ValueError): + self._call_fut(flat_path, element_pb, False) + + self.assertEqual(flat_path, []) + + def test_both_empty_allowed(self): + element_pb = _make_element_pb() + flat_path = [] + ret_val = self._call_fut(flat_path, element_pb, True) + self.assertIsNone(ret_val) + self.assertEqual(flat_path, []) + + +class Test__get_flat_path(unittest.TestCase): + + @staticmethod + def _call_fut(path_pb): + from google.cloud.datastore.key import _get_flat_path + + return _get_flat_path(path_pb) + + def test_one_pair(self): + kind = 'Widget' + name = 'Scooter' + element_pb = _make_element_pb(type=kind, name=name) + path_pb = _make_path_pb(element_pb) + flat_path = self._call_fut(path_pb) + self.assertEqual(flat_path, (kind, name)) + + def test_two_pairs(self): + kind1 = 'parent' + id1 = 59 + element_pb1 = _make_element_pb(type=kind1, id=id1) + + kind2 = 'child' + name2 = 'naem' + element_pb2 = _make_element_pb(type=kind2, name=name2) + + path_pb = _make_path_pb(element_pb1, element_pb2) + flat_path = self._call_fut(path_pb) + self.assertEqual(flat_path, (kind1, id1, kind2, name2)) + + def test_partial_key(self): + kind1 = 'grandparent' + name1 = 'cats' + element_pb1 = _make_element_pb(type=kind1, name=name1) + + kind2 = 'parent' + id2 = 1337 + element_pb2 = _make_element_pb(type=kind2, id=id2) + + kind3 = 'child' + element_pb3 = _make_element_pb(type=kind3) + + path_pb = _make_path_pb(element_pb1, element_pb2, element_pb3) + flat_path = self._call_fut(path_pb) + self.assertEqual(flat_path, (kind1, name1, kind2, id2, kind3)) + + +class Test__to_legacy_path(unittest.TestCase): + + @staticmethod + def _call_fut(dict_path): + from google.cloud.datastore.key import _to_legacy_path + + return _to_legacy_path(dict_path) + + def test_one_pair(self): + kind = 'Widget' + name = 'Scooter' + dict_path = [{'kind': kind, 'name': name}] + path_pb = self._call_fut(dict_path) + + element_pb = _make_element_pb(type=kind, name=name) + expected_pb = _make_path_pb(element_pb) + self.assertEqual(path_pb, expected_pb) + + def test_two_pairs(self): + kind1 = 'parent' + id1 = 59 + + kind2 = 'child' + name2 = 'naem' + + dict_path = [{'kind': kind1, 'id': id1}, {'kind': kind2, 'name': name2}] + path_pb = self._call_fut(dict_path) + + element_pb1 = _make_element_pb(type=kind1, id=id1) + element_pb2 = _make_element_pb(type=kind2, name=name2) + expected_pb = _make_path_pb(element_pb1, element_pb2) + self.assertEqual(path_pb, expected_pb) + + def test_partial_key(self): + kind1 = 'grandparent' + name1 = 'cats' + + kind2 = 'parent' + id2 = 1337 + + kind3 = 'child' + + dict_path = [ + {'kind': kind1, 'name': name1}, + {'kind': kind2, 'id': id2}, + {'kind': kind3}, + ] + path_pb = self._call_fut(dict_path) + + element_pb1 = _make_element_pb(type=kind1, name=name1) + element_pb2 = _make_element_pb(type=kind2, id=id2) + element_pb3 = _make_element_pb(type=kind3) + expected_pb = _make_path_pb(element_pb1, element_pb2, element_pb3) + self.assertEqual(path_pb, expected_pb) + + +def _make_element_pb(**kwargs): + from google.cloud.datastore import _app_engine_key_pb2 + + return _app_engine_key_pb2.Path.Element(**kwargs) + + +def _make_path_pb(*element_pbs): + from google.cloud.datastore import _app_engine_key_pb2 + + return _app_engine_key_pb2.Path(element=element_pbs) From 9e99e7f5ae26956547a74d300c5082c470a17608 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 23 Jun 2017 15:08:10 -0700 Subject: [PATCH 115/611] Re-enable pylint in info-only mode for all packages (#3519) --- packages/google-cloud-datastore/nox.py | 13 +++++++--- .../google-cloud-datastore/pylint.config.py | 25 +++++++++++++++++++ 2 files changed, 35 insertions(+), 3 deletions(-) create mode 100644 packages/google-cloud-datastore/pylint.config.py diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py index 7894b1ae0b23..5171bf0bb012 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/nox.py @@ -90,15 +90,22 @@ def doctests(session): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', *LOCAL_DEPS) + session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/datastore') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session diff --git a/packages/google-cloud-datastore/pylint.config.py b/packages/google-cloud-datastore/pylint.config.py new file mode 100644 index 000000000000..d8ca7b92e85e --- /dev/null +++ b/packages/google-cloud-datastore/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) From e8198d21d155cf1d9540e8826705c8187068bcfe Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 26 Jun 2017 18:31:36 -0400 Subject: [PATCH 116/611] Prep datastore-1.1.0 release. (#3527) --- packages/google-cloud-datastore/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index cc82802315ae..6a09fb8595f3 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -51,14 +51,14 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.0, < 0.25dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', 'google-gax>=0.15.7, <0.16dev', 'gapic-google-cloud-datastore-v1 >= 0.15.0, < 0.16dev', ] setup( name='google-cloud-datastore', - version='1.0.0', + version='1.1.0', description='Python Client for Google Cloud Datastore', long_description=README, namespace_packages=[ From 9c1840c2f11c262728e6fd11a7744b51c1cbaf9d Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 27 Jun 2017 10:32:30 -0700 Subject: [PATCH 117/611] Fix inclusion of tests in manifest.in (#3552) --- packages/google-cloud-datastore/MANIFEST.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/MANIFEST.in b/packages/google-cloud-datastore/MANIFEST.in index 9f7100c9528a..fc77f8c82ff0 100644 --- a/packages/google-cloud-datastore/MANIFEST.in +++ b/packages/google-cloud-datastore/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst LICENSE recursive-include google *.json *.proto -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ From 23f6d8e906da34909cae3272b134bd297c1bf380 Mon Sep 17 00:00:00 2001 From: Michael Englo Date: Wed, 28 Jun 2017 10:49:14 -0700 Subject: [PATCH 118/611] Strip base64 padding characters from urlsafe in Datastore's (to|from)_legacy_urlsafe (#3560) Also * add padding characters in `from_legacy_urlsafe` if needed * add an extra example in the unit tests that actually requires base64 padding --- .../google/cloud/datastore/key.py | 7 ++- .../tests/unit/test_key.py | 47 ++++++++++++++----- 2 files changed, 40 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index 166a5afde46b..f1733f8f5d8e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -304,7 +304,8 @@ def to_legacy_urlsafe(self): This is intended to work with the "legacy" representation of a datastore "Key" used within Google App Engine (a so-called "Reference"). The returned string can be used as the ``urlsafe`` - argument to ``ndb.Key(urlsafe=...)``. + argument to ``ndb.Key(urlsafe=...)``. The base64 encoded values + will have padding removed. :rtype: bytes :returns: A bytestring containing the key encoded as URL-safe base64. @@ -315,7 +316,7 @@ def to_legacy_urlsafe(self): name_space=self.namespace, ) raw_bytes = reference.SerializeToString() - return base64.urlsafe_b64encode(raw_bytes) + return base64.urlsafe_b64encode(raw_bytes).strip(b'=') @classmethod def from_legacy_urlsafe(cls, urlsafe): @@ -334,6 +335,8 @@ def from_legacy_urlsafe(cls, urlsafe): :returns: The key corresponding to ``urlsafe``. """ urlsafe = _to_bytes(urlsafe, encoding='ascii') + padding = b'=' * (-len(urlsafe) % 4) + urlsafe += padding raw_bytes = base64.urlsafe_b64decode(urlsafe) reference = _app_engine_key_pb2.Reference() diff --git a/packages/google-cloud-datastore/tests/unit/test_key.py b/packages/google-cloud-datastore/tests/unit/test_key.py index 5b89e146254d..4fb7b89911b2 100644 --- a/packages/google-cloud-datastore/tests/unit/test_key.py +++ b/packages/google-cloud-datastore/tests/unit/test_key.py @@ -26,12 +26,15 @@ class TestKey(unittest.TestCase): # 'Parent', 59, 'Child', 'Feather', # namespace='space', app='s~sample-app') # urlsafe = key.urlsafe() - _URLSAFE_EXAMPLE = ( + _URLSAFE_EXAMPLE1 = ( b'agxzfnNhbXBsZS1hcHByHgsSBlBhcmVudBg7DAsSBUNoaWxkIgdGZ' b'WF0aGVyDKIBBXNwYWNl') - _URLSAFE_APP = 's~sample-app' - _URLSAFE_NAMESPACE = 'space' - _URLSAFE_FLAT_PATH = ('Parent', 59, 'Child', 'Feather') + _URLSAFE_APP1 = 's~sample-app' + _URLSAFE_NAMESPACE1 = 'space' + _URLSAFE_FLAT_PATH1 = ('Parent', 59, 'Child', 'Feather') + _URLSAFE_EXAMPLE2 = b'agZzfmZpcmVyDwsSBEtpbmQiBVRoaW5nDA' + _URLSAFE_APP2 = 's~fire' + _URLSAFE_FLAT_PATH2 = ('Kind', 'Thing') @staticmethod def _get_target_class(): @@ -388,25 +391,45 @@ def test_to_protobuf_w_no_kind(self): def test_to_legacy_urlsafe(self): key = self._make_one( - *self._URLSAFE_FLAT_PATH, - project=self._URLSAFE_APP, - namespace=self._URLSAFE_NAMESPACE) + *self._URLSAFE_FLAT_PATH1, + project=self._URLSAFE_APP1, + namespace=self._URLSAFE_NAMESPACE1) # NOTE: ``key.project`` is somewhat "invalid" but that is OK. urlsafe = key.to_legacy_urlsafe() - self.assertEqual(urlsafe, self._URLSAFE_EXAMPLE) + self.assertEqual(urlsafe, self._URLSAFE_EXAMPLE1) + + def test_to_legacy_urlsafe_strip_padding(self): + key = self._make_one( + *self._URLSAFE_FLAT_PATH2, + project=self._URLSAFE_APP2) + # NOTE: ``key.project`` is somewhat "invalid" but that is OK. + urlsafe = key.to_legacy_urlsafe() + self.assertEqual(urlsafe, self._URLSAFE_EXAMPLE2) + # Make sure it started with base64 padding. + self.assertNotEqual(len(self._URLSAFE_EXAMPLE2) % 4, 0) def test_from_legacy_urlsafe(self): klass = self._get_target_class() - key = klass.from_legacy_urlsafe(self._URLSAFE_EXAMPLE) + key = klass.from_legacy_urlsafe(self._URLSAFE_EXAMPLE1) - self.assertEqual('s~' + key.project, self._URLSAFE_APP) - self.assertEqual(key.namespace, self._URLSAFE_NAMESPACE) - self.assertEqual(key.flat_path, self._URLSAFE_FLAT_PATH) + self.assertEqual('s~' + key.project, self._URLSAFE_APP1) + self.assertEqual(key.namespace, self._URLSAFE_NAMESPACE1) + self.assertEqual(key.flat_path, self._URLSAFE_FLAT_PATH1) # Also make sure we didn't accidentally set the parent. self.assertIsNone(key._parent) self.assertIsNotNone(key.parent) self.assertIs(key._parent, key.parent) + def test_from_legacy_urlsafe_needs_padding(self): + klass = self._get_target_class() + # Make sure it will have base64 padding added. + self.assertNotEqual(len(self._URLSAFE_EXAMPLE2) % 4, 0) + key = klass.from_legacy_urlsafe(self._URLSAFE_EXAMPLE2) + + self.assertEqual('s~' + key.project, self._URLSAFE_APP2) + self.assertIsNone(key.namespace) + self.assertEqual(key.flat_path, self._URLSAFE_FLAT_PATH2) + def test_is_partial_no_name_or_id(self): key = self._make_one('KIND', project=self._DEFAULT_PROJECT) self.assertTrue(key.is_partial) From b92f4dc02f7c085391531deacd3c0d58c04f605b Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 28 Jun 2017 14:07:25 -0700 Subject: [PATCH 119/611] Making all LICENSE headers "uniform". (#3563) --- packages/google-cloud-datastore/pylint.config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/pylint.config.py b/packages/google-cloud-datastore/pylint.config.py index d8ca7b92e85e..b618319b8b61 100644 --- a/packages/google-cloud-datastore/pylint.config.py +++ b/packages/google-cloud-datastore/pylint.config.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, From 3beb2f92cfdac3b4225c2f71c38e802d602a7ba2 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 29 Jun 2017 10:56:09 -0700 Subject: [PATCH 120/611] Skipping system tests when credentials env. var is unset. (#3475) --- packages/google-cloud-datastore/nox.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py index 5171bf0bb012..983152e7db92 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/nox.py @@ -49,7 +49,7 @@ def system_tests(session, python_version): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(python_version) @@ -70,7 +70,7 @@ def doctests(session): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Doctests run against Python 3.6 only. # It is difficult to make doctests run against both Python 2 and Python 3 From 07f85ab9917b5c9ab131f5c10157e2942c902e9b Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 6 Jul 2017 16:41:31 -0400 Subject: [PATCH 121/611] Shorten nox virtualenv names to avoid hashing. (#3585) --- packages/google-cloud-datastore/nox.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py index 983152e7db92..2cf2186aa45a 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/nox.py @@ -30,6 +30,9 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') @@ -54,6 +57,9 @@ def system_tests(session, python_version): # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'sys-' + python_version + # Install all test dependencies, then install this package into the # virutalenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) @@ -112,6 +118,10 @@ def lint(session): def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') From 3a6b6fe26ca5c82c036b8999799239f64056e10a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 11 Jul 2017 10:51:40 -0700 Subject: [PATCH 122/611] Updating author_email in all setup.py. (#3598) Done via: $ git grep -l author_email | \ > xargs sed -i s/jjg+google-cloud-python@google.com/googleapis-publisher@google.com/g and manually editing `videointelligence/setup.py` and `vision/setup.py`. --- packages/google-cloud-datastore/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 6a09fb8595f3..692dd109a481 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', From 6ce87e955eb342ce2e490d691486fcc7d4fd1ea8 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 19 Jul 2017 14:58:17 -0700 Subject: [PATCH 123/611] Fixing references to "dead" docs links. (#3631) * Fixing references to "dead" docs links. Done via: $ git grep -l 'google-cloud-auth.html' | \ > xargs sed -i s/'google-cloud-auth.html'/'core\/auth.html'/g $ git grep -l 'http\:\/\/google-cloud-python.readthedocs.io' | \ > xargs sed -i s/'http\:\/\/google-cloud-python.readthedocs.io'/\ > 'https\:\/\/google-cloud-python.readthedocs.io'/g Fixes #3531. * Fixing up other docs that were moved in #3459. --- packages/google-cloud-datastore/README.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index d913abc7821f..dbfc252564ea 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Datastore - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore-client.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore/client.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -44,7 +44,7 @@ queries, and eventual consistency for all other queries. See the ``google-cloud-python`` API `datastore documentation`_ to learn how to interact with the Cloud Datastore using this Client Library. -.. _datastore documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore-client.html +.. _datastore documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore/client.html See the `official Google Cloud Datastore documentation`_ for more details on how to activate Cloud Datastore for your project. From 2cd0c46152301a34a308b4e164677f7270d3e803 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 20 Jul 2017 09:33:21 -0700 Subject: [PATCH 124/611] Changing all pypi.python.org links to warehouse links. (#3641) Done via $ export OLD='https\:\/\/pypi.python.org\/pypi\/' $ export NEW='https\:\/\/pypi.org\/project\/' $ git grep -l ${OLD} | xargs sed -i s/${OLD}/${NEW}/g Then manually going through and adding a trailing slash to all warehouse links. (Though I did undo changes to `docs/json/`.) --- packages/google-cloud-datastore/README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index dbfc252564ea..89ba561baed3 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -67,6 +67,6 @@ how to activate Cloud Datastore for your project. print(result) .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-datastore.svg - :target: https://pypi.python.org/pypi/google-cloud-datastore + :target: https://pypi.org/project/google-cloud-datastore/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-datastore.svg - :target: https://pypi.python.org/pypi/google-cloud-datastore + :target: https://pypi.org/project/google-cloud-datastore/ From 943d85b8511b0334c6c14d6196a49efad8267a13 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Jul 2017 14:13:44 -0700 Subject: [PATCH 125/611] Removing `get_credentials()` from `core`. (#3667) * Removing `get_credentials()` from `core`. In the process also: - Slight re-org on `nox.py` config (to pass posargs) for `core` and `datastore` - Getting rid of last usage of `_Monkey` in datastore This is part of `@jonparrott`'s effort to slim down / stabilize `core`. * Removing `google.cloud.credentials` module from docs. --- packages/google-cloud-datastore/nox.py | 15 ++++++++---- .../tests/unit/test_client.py | 18 ++++++--------- .../tests/unit/test_query.py | 23 +++++++------------ 3 files changed, 26 insertions(+), 30 deletions(-) diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py index 2cf2186aa45a..f93b02944631 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/nox.py @@ -38,10 +38,17 @@ def unit_tests(session, python_version): session.install('-e', '.') # Run py.test against the unit tests. - session.run('py.test', '--quiet', - '--cov=google.cloud.datastore', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', - 'tests/unit', + session.run( + 'py.test', + '--quiet', + '--cov=google.cloud.datastore', + '--cov=tests.unit', + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=97', + os.path.join('tests', 'unit'), + *session.posargs ) diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 9824e06b73ad..a03bbe8b710f 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -148,22 +148,16 @@ def test_constructor_w_implicit_inputs(self): other = 'other' creds = _make_credentials() - default_called = [] - - def fallback_mock(project): - default_called.append(project) - return project or other klass = self._get_target_class() patch1 = mock.patch( 'google.cloud.datastore.client._determine_default_project', - new=fallback_mock) + return_value=other) patch2 = mock.patch( - 'google.cloud.client.get_credentials', - return_value=creds) + 'google.auth.default', return_value=(creds, None)) - with patch1: - with patch2: + with patch1 as _determine_default_project: + with patch2 as default: client = klass() self.assertEqual(client.project, other) @@ -174,7 +168,9 @@ def fallback_mock(project): self.assertIsNone(client.current_batch) self.assertIsNone(client.current_transaction) - self.assertEqual(default_called, [None]) + + default.assert_called_once_with() + _determine_default_project.assert_called_once_with(None) def test_constructor_w_explicit_inputs(self): from google.cloud.datastore.client import _DATASTORE_BASE_URL diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index b361ec25a42f..26c1b6cc0831 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -550,21 +550,14 @@ def _call_fut(self, iterator, entity_pb): return _item_to_entity(iterator, entity_pb) def test_it(self): - from google.cloud._testing import _Monkey - from google.cloud.datastore import helpers - - result = object() - entities = [] - - def mocked(entity_pb): - entities.append(entity_pb) - return result - - entity_pb = object() - with _Monkey(helpers, entity_from_protobuf=mocked): - self.assertIs(result, self._call_fut(None, entity_pb)) - - self.assertEqual(entities, [entity_pb]) + entity_pb = mock.sentinel.entity_pb + patch = mock.patch( + 'google.cloud.datastore.helpers.entity_from_protobuf') + with patch as entity_from_protobuf: + result = self._call_fut(None, entity_pb) + self.assertIs(result, entity_from_protobuf.return_value) + + entity_from_protobuf.assert_called_once_with(entity_pb) class Test__pb_from_query(unittest.TestCase): From 77d2c65c330f37c10e61e7aeab979a1007a41cfc Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 27 Jul 2017 11:21:30 -0700 Subject: [PATCH 126/611] Remove httplib2, replace with Requests (#3674) * Core: remove httplib2, replace with Requests Additionally remove make_exception in favor of from_http_status and from_http_response. * Datastore: replace httplib2 with Requests * DNS: replace httplib2 with Requests * Error Reporting: replace httplib2 with requests * Language: replace httplib2 with Requests * Logging: replace httplib2 with requests * Monitoring: replace httplib2 with Requests * Pubsub: replace httplib2 with Requests * Resource Manager: replace httplib2 with Requests * Runtimeconfig: replace httplib2 with Requests * Speech: replace httplib2 with Requests * Storage: replace httplib2 with Requests * BigQuery: replace httplib2 with Requests * Translate: replace httplib2 with Requests * Vision: replace httplib2 with Requests --- .../google/cloud/datastore/_http.py | 21 +- .../google/cloud/datastore/client.py | 4 +- .../tests/system/test_system.py | 4 +- .../tests/unit/test__http.py | 248 +++++++++--------- 4 files changed, 134 insertions(+), 143 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 0723a97a0de4..de976f7e1bb3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -39,7 +39,7 @@ def _request(http, project, method, data, base_url): """Make a request over the Http transport to the Cloud Datastore API. - :type http: :class:`~httplib2.Http` + :type http: :class:`requests.Session` :param http: HTTP object to make requests. :type project: str @@ -63,27 +63,26 @@ def _request(http, project, method, data, base_url): """ headers = { 'Content-Type': 'application/x-protobuf', - 'Content-Length': str(len(data)), 'User-Agent': connection_module.DEFAULT_USER_AGENT, connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, } api_url = build_api_url(project, method, base_url) - headers, content = http.request( - uri=api_url, method='POST', headers=headers, body=data) - status = headers['status'] - if status != '200': - error_status = status_pb2.Status.FromString(content) - raise exceptions.make_exception( - headers, error_status.message, use_json=False) + response = http.request( + url=api_url, method='POST', headers=headers, data=data) - return content + if response.status_code != 200: + error_status = status_pb2.Status.FromString(response.content) + raise exceptions.from_http_status( + response.status_code, error_status.message, errors=[error_status]) + + return response.content def _rpc(http, project, method, base_url, request_pb, response_pb_cls): """Make a protobuf RPC request. - :type http: :class:`~httplib2.Http` + :type http: :class:`requests.Session` :param http: HTTP object to make requests. :type project: str diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index af7d6d4f9113..0ccef9f5f8f0 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -177,10 +177,10 @@ class Client(ClientWithProject): passed), falls back to the default inferred from the environment. - :type _http: :class:`~httplib2.Http` + :type _http: :class:`~requests.Session` :param _http: (Optional) HTTP object to make requests. Can be any object that defines ``request()`` with the same interface as - :meth:`~httplib2.Http.request`. If not passed, an + :meth:`requests.Session.request`. If not passed, an ``_http`` object is created that is bound to the ``credentials`` for the current object. This parameter should be considered private, and could diff --git a/packages/google-cloud-datastore/tests/system/test_system.py b/packages/google-cloud-datastore/tests/system/test_system.py index 129018748e08..b33f7de21925 100644 --- a/packages/google-cloud-datastore/tests/system/test_system.py +++ b/packages/google-cloud-datastore/tests/system/test_system.py @@ -16,7 +16,7 @@ import os import unittest -import httplib2 +import requests import six from google.cloud._helpers import UTC @@ -57,7 +57,7 @@ def setUpModule(): Config.CLIENT = datastore.Client(namespace=test_namespace) else: credentials = EmulatorCreds() - http = httplib2.Http() # Un-authorized. + http = requests.Session() # Un-authorized. Config.CLIENT = datastore.Client(project=emulator_dataset, namespace=test_namespace, credentials=credentials, diff --git a/packages/google-cloud-datastore/tests/unit/test__http.py b/packages/google-cloud-datastore/tests/unit/test__http.py index db364ec4dd61..c416cd36671a 100644 --- a/packages/google-cloud-datastore/tests/unit/test__http.py +++ b/packages/google-cloud-datastore/tests/unit/test__http.py @@ -15,6 +15,9 @@ import unittest import mock +from six.moves import http_client + +import requests class Test__request(unittest.TestCase): @@ -32,29 +35,25 @@ def test_success(self): project = 'PROJECT' method = 'METHOD' data = b'DATA' - uri = 'http://api-url' - - # Make mock HTTP object with canned response. + base_url = 'http://api-url' response_data = 'CONTENT' - http = Http({'status': '200'}, response_data) + + http = _make_requests_session([_make_response(content=response_data)]) # Call actual function under test. - response = self._call_fut(http, project, method, data, uri) + response = self._call_fut(http, project, method, data, base_url) self.assertEqual(response, response_data) # Check that the mocks were called as expected. - called_with = http._called_with - self.assertEqual(len(called_with), 4) - self.assertTrue(called_with['uri'].startswith(uri)) - self.assertEqual(called_with['method'], 'POST') + expected_url = _build_expected_url(base_url, project, method) expected_headers = { 'Content-Type': 'application/x-protobuf', 'User-Agent': connection_module.DEFAULT_USER_AGENT, - 'Content-Length': '4', connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, } - self.assertEqual(called_with['headers'], expected_headers) - self.assertEqual(called_with['body'], data) + http.request.assert_called_once_with( + method='POST', url=expected_url, headers=expected_headers, + data=data) def test_failure(self): from google.cloud.exceptions import BadRequest @@ -66,17 +65,19 @@ def test_failure(self): data = 'DATA' uri = 'http://api-url' - # Make mock HTTP object with canned response. error = status_pb2.Status() error.message = 'Entity value is indexed.' error.code = code_pb2.FAILED_PRECONDITION - http = Http({'status': '400'}, error.SerializeToString()) - # Call actual function under test. + http = _make_requests_session([ + _make_response( + http_client.BAD_REQUEST, + content=error.SerializeToString()) + ]) + with self.assertRaises(BadRequest) as exc: self._call_fut(http, project, method, data, uri) - # Check that the mocks were called as expected. expected_message = '400 Entity value is indexed.' self.assertEqual(str(exc.exception), expected_message) @@ -147,7 +148,8 @@ def test_lookup_single_key_empty_response(self): read_options = datastore_pb2.ReadOptions() # Create mock HTTP and client with response. - http = Http({'status': '200'}, rsp_pb.SerializeToString()) + http = _make_requests_session( + [_make_response(content=rsp_pb.SerializeToString())]) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) @@ -161,10 +163,9 @@ def test_lookup_single_key_empty_response(self): self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) - cw = http._called_with - _verify_protobuf_call(self, cw, uri) - request = datastore_pb2.LookupRequest() - request.ParseFromString(cw['body']) + + request = _verify_protobuf_call( + http, uri, datastore_pb2.LookupRequest()) self.assertEqual(list(request.keys), [key_pb]) self.assertEqual(request.read_options, read_options) @@ -178,7 +179,8 @@ def test_lookup_single_key_empty_response_w_eventual(self): read_consistency=datastore_pb2.ReadOptions.EVENTUAL) # Create mock HTTP and client with response. - http = Http({'status': '200'}, rsp_pb.SerializeToString()) + http = _make_requests_session( + [_make_response(content=rsp_pb.SerializeToString())]) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) @@ -192,10 +194,9 @@ def test_lookup_single_key_empty_response_w_eventual(self): self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) - cw = http._called_with - _verify_protobuf_call(self, cw, uri) - request = datastore_pb2.LookupRequest() - request.ParseFromString(cw['body']) + + request = _verify_protobuf_call( + http, uri, datastore_pb2.LookupRequest()) self.assertEqual(list(request.keys), [key_pb]) self.assertEqual(request.read_options, read_options) @@ -209,7 +210,8 @@ def test_lookup_single_key_empty_response_w_transaction(self): read_options = datastore_pb2.ReadOptions(transaction=transaction) # Create mock HTTP and client with response. - http = Http({'status': '200'}, rsp_pb.SerializeToString()) + http = _make_requests_session( + [_make_response(content=rsp_pb.SerializeToString())]) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) @@ -223,10 +225,9 @@ def test_lookup_single_key_empty_response_w_transaction(self): self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) - cw = http._called_with - _verify_protobuf_call(self, cw, uri) - request = datastore_pb2.LookupRequest() - request.ParseFromString(cw['body']) + + request = _verify_protobuf_call( + http, uri, datastore_pb2.LookupRequest()) self.assertEqual(list(request.keys), [key_pb]) self.assertEqual(request.read_options, read_options) @@ -243,7 +244,8 @@ def test_lookup_single_key_nonempty_response(self): read_options = datastore_pb2.ReadOptions() # Create mock HTTP and client with response. - http = Http({'status': '200'}, rsp_pb.SerializeToString()) + http = _make_requests_session( + [_make_response(content=rsp_pb.SerializeToString())]) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) @@ -260,10 +262,9 @@ def test_lookup_single_key_nonempty_response(self): found = response.found[0].entity self.assertEqual(found.key.path[0].kind, 'Kind') self.assertEqual(found.key.path[0].id, 1234) - cw = http._called_with - _verify_protobuf_call(self, cw, uri) - request = datastore_pb2.LookupRequest() - request.ParseFromString(cw['body']) + + request = _verify_protobuf_call( + http, uri, datastore_pb2.LookupRequest()) self.assertEqual(list(request.keys), [key_pb]) self.assertEqual(request.read_options, read_options) @@ -277,7 +278,8 @@ def test_lookup_multiple_keys_empty_response(self): read_options = datastore_pb2.ReadOptions() # Create mock HTTP and client with response. - http = Http({'status': '200'}, rsp_pb.SerializeToString()) + http = _make_requests_session( + [_make_response(content=rsp_pb.SerializeToString())]) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) @@ -291,10 +293,9 @@ def test_lookup_multiple_keys_empty_response(self): self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) - cw = http._called_with - _verify_protobuf_call(self, cw, uri) - request = datastore_pb2.LookupRequest() - request.ParseFromString(cw['body']) + + request = _verify_protobuf_call( + http, uri, datastore_pb2.LookupRequest()) self.assertEqual(list(request.keys), [key_pb1, key_pb2]) self.assertEqual(request.read_options, read_options) @@ -312,7 +313,8 @@ def test_lookup_multiple_keys_w_missing(self): read_options = datastore_pb2.ReadOptions() # Create mock HTTP and client with response. - http = Http({'status': '200'}, rsp_pb.SerializeToString()) + http = _make_requests_session( + [_make_response(content=rsp_pb.SerializeToString())]) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) @@ -327,17 +329,14 @@ def test_lookup_multiple_keys_w_missing(self): self.assertEqual(len(response.deferred), 0) missing_keys = [result.entity.key for result in response.missing] self.assertEqual(missing_keys, [key_pb1, key_pb2]) - cw = http._called_with - _verify_protobuf_call(self, cw, uri) - request = datastore_pb2.LookupRequest() - request.ParseFromString(cw['body']) + + request = _verify_protobuf_call( + http, uri, datastore_pb2.LookupRequest()) self.assertEqual(list(request.keys), [key_pb1, key_pb2]) self.assertEqual(request.read_options, read_options) def test_lookup_multiple_keys_w_deferred(self): from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud import _http as connection_module - from google.cloud.datastore._http import _CLIENT_INFO project = 'PROJECT' key_pb1 = _make_key_pb(project) @@ -348,7 +347,8 @@ def test_lookup_multiple_keys_w_deferred(self): read_options = datastore_pb2.ReadOptions() # Create mock HTTP and client with response. - http = Http({'status': '200'}, rsp_pb.SerializeToString()) + http = _make_requests_session( + [_make_response(content=rsp_pb.SerializeToString())]) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) @@ -362,19 +362,9 @@ def test_lookup_multiple_keys_w_deferred(self): self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(list(response.deferred), [key_pb1, key_pb2]) - cw = http._called_with - _verify_protobuf_call(self, cw, uri) - self.assertEqual(cw['uri'], uri) - self.assertEqual(cw['method'], 'POST') - expected_headers = { - 'Content-Type': 'application/x-protobuf', - 'User-Agent': connection_module.DEFAULT_USER_AGENT, - 'Content-Length': str(len(cw['body'])), - connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, - } - self.assertEqual(cw['headers'], expected_headers) - request = datastore_pb2.LookupRequest() - request.ParseFromString(cw['body']) + + request = _verify_protobuf_call( + http, uri, datastore_pb2.LookupRequest()) self.assertEqual(list(request.keys), [key_pb1, key_pb2]) self.assertEqual(request.read_options, read_options) @@ -399,7 +389,8 @@ def test_run_query_w_eventual_no_transaction(self): ) # Create mock HTTP and client with response. - http = Http({'status': '200'}, rsp_pb.SerializeToString()) + http = _make_requests_session( + [_make_response(content=rsp_pb.SerializeToString())]) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) @@ -410,11 +401,10 @@ def test_run_query_w_eventual_no_transaction(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) + uri = _build_expected_url(client._base_url, project, 'runQuery') - cw = http._called_with - _verify_protobuf_call(self, cw, uri) - request = datastore_pb2.RunQueryRequest() - request.ParseFromString(cw['body']) + request = _verify_protobuf_call( + http, uri, datastore_pb2.RunQueryRequest()) self.assertEqual(request.partition_id, partition_id) self.assertEqual(request.query, query_pb) self.assertEqual(request.read_options, read_options) @@ -440,7 +430,8 @@ def test_run_query_wo_eventual_w_transaction(self): ) # Create mock HTTP and client with response. - http = Http({'status': '200'}, rsp_pb.SerializeToString()) + http = _make_requests_session( + [_make_response(content=rsp_pb.SerializeToString())]) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) @@ -451,11 +442,10 @@ def test_run_query_wo_eventual_w_transaction(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) + uri = _build_expected_url(client._base_url, project, 'runQuery') - cw = http._called_with - _verify_protobuf_call(self, cw, uri) - request = datastore_pb2.RunQueryRequest() - request.ParseFromString(cw['body']) + request = _verify_protobuf_call( + http, uri, datastore_pb2.RunQueryRequest()) self.assertEqual(request.partition_id, partition_id) self.assertEqual(request.query, query_pb) self.assertEqual(request.read_options, read_options) @@ -480,7 +470,8 @@ def test_run_query_wo_namespace_empty_result(self): ) # Create mock HTTP and client with response. - http = Http({'status': '200'}, rsp_pb.SerializeToString()) + http = _make_requests_session( + [_make_response(content=rsp_pb.SerializeToString())]) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) @@ -491,11 +482,10 @@ def test_run_query_wo_namespace_empty_result(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) + uri = _build_expected_url(client._base_url, project, 'runQuery') - cw = http._called_with - _verify_protobuf_call(self, cw, uri) - request = datastore_pb2.RunQueryRequest() - request.ParseFromString(cw['body']) + request = _verify_protobuf_call( + http, uri, datastore_pb2.RunQueryRequest()) self.assertEqual(request.partition_id, partition_id) self.assertEqual(request.query, query_pb) self.assertEqual(request.read_options, read_options) @@ -523,7 +513,8 @@ def test_run_query_w_namespace_nonempty_result(self): ) # Create mock HTTP and client with response. - http = Http({'status': '200'}, rsp_pb.SerializeToString()) + http = _make_requests_session( + [_make_response(content=rsp_pb.SerializeToString())]) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) @@ -534,11 +525,10 @@ def test_run_query_w_namespace_nonempty_result(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - cw = http._called_with + uri = _build_expected_url(client._base_url, project, 'runQuery') - _verify_protobuf_call(self, cw, uri) - request = datastore_pb2.RunQueryRequest() - request.ParseFromString(cw['body']) + request = _verify_protobuf_call( + http, uri, datastore_pb2.RunQueryRequest()) self.assertEqual(request.partition_id, partition_id) self.assertEqual(request.query, query_pb) @@ -551,7 +541,8 @@ def test_begin_transaction(self): rsp_pb.transaction = transaction # Create mock HTTP and client with response. - http = Http({'status': '200'}, rsp_pb.SerializeToString()) + http = _make_requests_session( + [_make_response(content=rsp_pb.SerializeToString())]) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) @@ -561,12 +552,11 @@ def test_begin_transaction(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) + uri = _build_expected_url( client._base_url, project, 'beginTransaction') - cw = http._called_with - _verify_protobuf_call(self, cw, uri) - request = datastore_pb2.BeginTransactionRequest() - request.ParseFromString(cw['body']) + request = _verify_protobuf_call( + http, uri, datastore_pb2.BeginTransactionRequest()) # The RPC-over-HTTP request does not set the project in the request. self.assertEqual(request.project_id, u'') @@ -585,7 +575,8 @@ def test_commit_wo_transaction(self): value_pb.string_value = u'Foo' # Create mock HTTP and client with response. - http = Http({'status': '200'}, rsp_pb.SerializeToString()) + http = _make_requests_session( + [_make_response(content=rsp_pb.SerializeToString())]) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) @@ -597,11 +588,9 @@ def test_commit_wo_transaction(self): # Check the result and verify the callers. self.assertEqual(result, rsp_pb) + uri = _build_expected_url(client._base_url, project, 'commit') - cw = http._called_with - _verify_protobuf_call(self, cw, uri) - request = rq_class() - request.ParseFromString(cw['body']) + request = _verify_protobuf_call(http, uri, rq_class()) self.assertEqual(request.transaction, b'') self.assertEqual(list(request.mutations), [mutation]) self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL) @@ -621,7 +610,8 @@ def test_commit_w_transaction(self): value_pb.string_value = u'Foo' # Create mock HTTP and client with response. - http = Http({'status': '200'}, rsp_pb.SerializeToString()) + http = _make_requests_session( + [_make_response(content=rsp_pb.SerializeToString())]) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) @@ -633,11 +623,9 @@ def test_commit_w_transaction(self): # Check the result and verify the callers. self.assertEqual(result, rsp_pb) + uri = _build_expected_url(client._base_url, project, 'commit') - cw = http._called_with - _verify_protobuf_call(self, cw, uri) - request = rq_class() - request.ParseFromString(cw['body']) + request = _verify_protobuf_call(http, uri, rq_class()) self.assertEqual(request.transaction, b'xact') self.assertEqual(list(request.mutations), [mutation]) self.assertEqual(request.mode, rq_class.TRANSACTIONAL) @@ -650,7 +638,8 @@ def test_rollback_ok(self): rsp_pb = datastore_pb2.RollbackResponse() # Create mock HTTP and client with response. - http = Http({'status': '200'}, rsp_pb.SerializeToString()) + http = _make_requests_session( + [_make_response(content=rsp_pb.SerializeToString())]) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) @@ -660,11 +649,10 @@ def test_rollback_ok(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) + uri = _build_expected_url(client._base_url, project, 'rollback') - cw = http._called_with - _verify_protobuf_call(self, cw, uri) - request = datastore_pb2.RollbackRequest() - request.ParseFromString(cw['body']) + request = _verify_protobuf_call( + http, uri, datastore_pb2.RollbackRequest()) self.assertEqual(request.transaction, transaction) def test_allocate_ids_empty(self): @@ -674,7 +662,8 @@ def test_allocate_ids_empty(self): rsp_pb = datastore_pb2.AllocateIdsResponse() # Create mock HTTP and client with response. - http = Http({'status': '200'}, rsp_pb.SerializeToString()) + http = _make_requests_session( + [_make_response(content=rsp_pb.SerializeToString())]) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) @@ -685,11 +674,10 @@ def test_allocate_ids_empty(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) self.assertEqual(list(response.keys), []) + uri = _build_expected_url(client._base_url, project, 'allocateIds') - cw = http._called_with - _verify_protobuf_call(self, cw, uri) - request = datastore_pb2.AllocateIdsRequest() - request.ParseFromString(cw['body']) + request = _verify_protobuf_call( + http, uri, datastore_pb2.AllocateIdsRequest()) self.assertEqual(list(request.keys), []) def test_allocate_ids_non_empty(self): @@ -709,7 +697,8 @@ def test_allocate_ids_non_empty(self): rsp_pb.keys.add().CopyFrom(after_key_pbs[1]) # Create mock HTTP and client with response. - http = Http({'status': '200'}, rsp_pb.SerializeToString()) + http = _make_requests_session( + [_make_response(content=rsp_pb.SerializeToString())]) client = mock.Mock( _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) @@ -720,29 +709,28 @@ def test_allocate_ids_non_empty(self): # Check the result and verify the callers. self.assertEqual(list(response.keys), after_key_pbs) self.assertEqual(response, rsp_pb) + uri = _build_expected_url(client._base_url, project, 'allocateIds') - cw = http._called_with - _verify_protobuf_call(self, cw, uri) - request = datastore_pb2.AllocateIdsRequest() - request.ParseFromString(cw['body']) + request = _verify_protobuf_call( + http, uri, datastore_pb2.AllocateIdsRequest()) self.assertEqual(len(request.keys), len(before_key_pbs)) for key_before, key_after in zip(before_key_pbs, request.keys): self.assertEqual(key_before, key_after) -class Http(object): +def _make_response(status=http_client.OK, content=b'', headers={}): + response = requests.Response() + response.status_code = status + response._content = content + response.headers = headers + response.request = requests.Request() + return response - _called_with = None - def __init__(self, headers, content): - from httplib2 import Response - - self._response = Response(headers) - self._content = content - - def request(self, **kw): - self._called_with = kw - return self._response, self._content +def _make_requests_session(responses): + session = mock.create_autospec(requests.Session, instance=True) + session.request.side_effect = responses + return session def _build_expected_url(api_base_url, project, method): @@ -765,16 +753,20 @@ def _make_key_pb(project, id_=1234): return Key(*path_args, project=project).to_protobuf() -def _verify_protobuf_call(testcase, called_with, uri): +def _verify_protobuf_call(http, expected_url, pb): from google.cloud import _http as connection_module from google.cloud.datastore._http import _CLIENT_INFO - testcase.assertEqual(called_with['uri'], uri) - testcase.assertEqual(called_with['method'], 'POST') expected_headers = { 'Content-Type': 'application/x-protobuf', 'User-Agent': connection_module.DEFAULT_USER_AGENT, - 'Content-Length': str(len(called_with['body'])), connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, } - testcase.assertEqual(called_with['headers'], expected_headers) + + http.request.assert_called_once_with( + method='POST', url=expected_url, headers=expected_headers, + data=mock.ANY) + + data = http.request.mock_calls[0][2]['data'] + pb.ParseFromString(data) + return pb From 32204f59994c44ef3b43f1f90457a69a003701ff Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Aug 2017 16:45:43 -0700 Subject: [PATCH 127/611] Updating all affected packages after google-cloud-core update. (#3730) * Updating all affected packages after google-cloud-core update. * Moving 'pip install .' **after** subpackages in nox docs. @lukesneeringer still hasn't explained why it was moved. In it's current location, the depencencies are first retrieved from PyPI (which fails here for the unreleased versions), e.g. https://circleci.com/gh/GoogleCloudPlatform/google-cloud-python/2716 --- packages/google-cloud-datastore/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 692dd109a481..675e58bcc22d 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -51,14 +51,14 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.25.0, < 0.26dev', + 'google-cloud-core >= 0.26.0, < 0.27dev', 'google-gax>=0.15.7, <0.16dev', 'gapic-google-cloud-datastore-v1 >= 0.15.0, < 0.16dev', ] setup( name='google-cloud-datastore', - version='1.1.0', + version='1.2.0', description='Python Client for Google Cloud Datastore', long_description=README, namespace_packages=[ From 2b8aaa6affa7dfb6f7e0b3ce810b05953fd14093 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 8 Aug 2017 12:37:24 -0700 Subject: [PATCH 128/611] Make exclude_from_indexes a set, and public API. (#3756) --- .../google/cloud/datastore/entity.py | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/entity.py b/packages/google-cloud-datastore/google/cloud/datastore/entity.py index dc8a60b038be..e74d5aa640ee 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/entity.py @@ -129,8 +129,9 @@ class Entity(dict): def __init__(self, key=None, exclude_from_indexes=()): super(Entity, self).__init__() self.key = key - self._exclude_from_indexes = set(_ensure_tuple_or_list( + self.exclude_from_indexes = set(_ensure_tuple_or_list( 'exclude_from_indexes', exclude_from_indexes)) + """Names of fields which are *not* to be indexed for this entity.""" # NOTE: This will be populated when parsing a protobuf in # google.cloud.datastore.helpers.entity_from_protobuf. self._meanings = {} @@ -148,7 +149,7 @@ def __eq__(self, other): return False return (self.key == other.key and - self._exclude_from_indexes == other._exclude_from_indexes and + self.exclude_from_indexes == other.exclude_from_indexes and self._meanings == other._meanings and super(Entity, self).__eq__(other)) @@ -176,15 +177,6 @@ def kind(self): if self.key: return self.key.kind - @property - def exclude_from_indexes(self): - """Names of fields which are *not* to be indexed for this entity. - - :rtype: sequence of field names - :returns: The set of fields excluded from indexes. - """ - return frozenset(self._exclude_from_indexes) - def __repr__(self): if self.key: return '' % (self.key._flat_path, From a5c8d80f2eb8fb128bce0cb3889a0dfc496a386c Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 8 Aug 2017 14:50:31 -0700 Subject: [PATCH 129/611] Use latest/ directory for docs instead of stable/ (#3766) See also https://github.com/GoogleCloudPlatform/google-cloud-python/pull/3763 $ sed -i '' 's/googlecloudplatform.github.io\/google-cloud-python\/stable\//googlecloudplatform.github.io\/google-cloud-python\/latest\//g' **/*.rst --- packages/google-cloud-datastore/README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index 89ba561baed3..dd2fc68cc217 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Datastore - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore/client.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/datastore/client.html Quick Start ----------- @@ -44,7 +44,7 @@ queries, and eventual consistency for all other queries. See the ``google-cloud-python`` API `datastore documentation`_ to learn how to interact with the Cloud Datastore using this Client Library. -.. _datastore documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore/client.html +.. _datastore documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/datastore/client.html See the `official Google Cloud Datastore documentation`_ for more details on how to activate Cloud Datastore for your project. From 546d7ff738685c3cd35d47579cb8f6f4728a471d Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 8 Aug 2017 14:51:50 -0700 Subject: [PATCH 130/611] Fix __eq__ and __ne__. (#3765) --- .../google-cloud-datastore/google/cloud/datastore/entity.py | 4 ++-- .../google-cloud-datastore/google/cloud/datastore/helpers.py | 4 ++-- packages/google-cloud-datastore/google/cloud/datastore/key.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/entity.py b/packages/google-cloud-datastore/google/cloud/datastore/entity.py index e74d5aa640ee..be30aa915172 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/entity.py @@ -146,7 +146,7 @@ def __eq__(self, other): :returns: True if the entities compare equal, else False. """ if not isinstance(other, Entity): - return False + return NotImplemented return (self.key == other.key and self.exclude_from_indexes == other.exclude_from_indexes and @@ -162,7 +162,7 @@ def __ne__(self, other): :rtype: bool :returns: False if the entities compare equal, else True. """ - return not self.__eq__(other) + return not self == other @property def kind(self): diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index ee4537317030..eeae9d427572 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -454,7 +454,7 @@ def __eq__(self, other): :returns: True if the points compare equal, else False. """ if not isinstance(other, GeoPoint): - return False + return NotImplemented return (self.latitude == other.latitude and self.longitude == other.longitude) @@ -465,4 +465,4 @@ def __ne__(self, other): :rtype: bool :returns: False if the points compare equal, else True. """ - return not self.__eq__(other) + return not self == other diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index f1733f8f5d8e..615cab696568 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -123,7 +123,7 @@ def __eq__(self, other): :returns: True if the keys compare equal, else False. """ if not isinstance(other, Key): - return False + return NotImplemented if self.is_partial or other.is_partial: return False @@ -143,7 +143,7 @@ def __ne__(self, other): :rtype: bool :returns: False if the keys compare equal, else True. """ - return not self.__eq__(other) + return not self == other def __hash__(self): """Hash a keys for use in a dictionary lookp. From eca2c46c74cd56ffd56cf65ab5a8d7e52a1bd9bf Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 9 Aug 2017 10:02:05 -0700 Subject: [PATCH 131/611] Move google.cloud.iterator to google.api.core.page_iterator (#3770) * Move google.cloud.iterator to google.api.core.page_iterator * Re-write tests to pytest style. * Make GAXIterator private- it will soon be removed. * Pass api_request into HTTPIterator to avoid accessing private members * BigQuery: use google.api.core.page_iterator * DNS: use google.api.core.page_iterator * Logging: use google.api.core.page_iterator * PubSub: use google.api.core.page_iterator * Resource manager: use google.api.core.page_iterator * Runtimeconfig: use google.api.core.page_iterator * logging: use google.api.core._GAXIterator * Storage: use google.api.core.page_iterator * Pubsub: use google.api.core._GAXIterator * Trace: use google.api.core._GAXIterator * Spanner: use google.api.core._GAXIterator --- .../google/cloud/datastore/query.py | 9 ++++----- packages/google-cloud-datastore/tests/unit/test_query.py | 4 ++-- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 2ab65064f85e..d0c9cea9f711 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -16,9 +16,8 @@ import base64 +from google.api.core import page_iterator from google.cloud._helpers import _ensure_tuple_or_list -from google.cloud.iterator import Iterator as BaseIterator -from google.cloud.iterator import Page from google.cloud.proto.datastore.v1 import datastore_pb2 as _datastore_pb2 from google.cloud.proto.datastore.v1 import entity_pb2 as _entity_pb2 @@ -373,7 +372,7 @@ def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, start_cursor=start_cursor, end_cursor=end_cursor) -class Iterator(BaseIterator): +class Iterator(page_iterator.Iterator): """Represent the state of a given execution of a Query. :type query: :class:`~google.cloud.datastore.query.Query` @@ -499,7 +498,7 @@ def _next_page(self): query=query_pb, ) entity_pbs = self._process_query_results(response_pb) - return Page(self, entity_pbs, self._item_to_value) + return page_iterator.Page(self, entity_pbs, self._item_to_value) def _pb_from_query(query): @@ -571,7 +570,7 @@ def _pb_from_query(query): def _item_to_entity(iterator, entity_pb): """Convert a raw protobuf entity to the native object. - :type iterator: :class:`~google.cloud.iterator.Iterator` + :type iterator: :class:`~google.api.core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type entity_pb: diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 26c1b6cc0831..d8d08430dab9 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -488,7 +488,7 @@ def test__process_query_results_bad_enum(self): iterator._process_query_results(response_pb) def _next_page_helper(self, txn_id=None): - from google.cloud.iterator import Page + from google.api.core import page_iterator from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.proto.datastore.v1 import query_pb2 @@ -509,7 +509,7 @@ def _next_page_helper(self, txn_id=None): iterator = self._make_one(query, client) page = iterator._next_page() - self.assertIsInstance(page, Page) + self.assertIsInstance(page, page_iterator.Page) self.assertIs(page._parent, iterator) partition_id = entity_pb2.PartitionId(project_id=project) From b23760c3601cf24efefa0330229e4f3008638265 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 10 Aug 2017 09:30:51 -0700 Subject: [PATCH 132/611] Document to_legacy_urlsafe as being equivalent to ndb. (#3783) --- .../google-cloud-datastore/google/cloud/datastore/key.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index 615cab696568..03bcb303b594 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -307,6 +307,11 @@ def to_legacy_urlsafe(self): argument to ``ndb.Key(urlsafe=...)``. The base64 encoded values will have padding removed. + .. note:: + + The string returned by ``to_legacy_urlsafe`` is equivalent, but + not identical, to the string returned by ``ndb``. + :rtype: bytes :returns: A bytestring containing the key encoded as URL-safe base64. """ From 9729254ea871e06260360f67675f39195f86fa6b Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 11 Aug 2017 10:42:13 -0700 Subject: [PATCH 133/611] Make Datastore doctests use a namespace. (#3793) --- .../google/cloud/datastore/client.py | 66 ++++++++------ .../google/cloud/datastore/entity.py | 28 +++--- .../google/cloud/datastore/transaction.py | 90 +++++++++++-------- 3 files changed, 104 insertions(+), 80 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 0ccef9f5f8f0..71144e1e3aa2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -504,56 +504,64 @@ def query(self, **kwargs): .. testsetup:: query - from google.cloud import datastore + import os + import uuid - client = datastore.Client() - query = client.query(kind='_Doctest') + from google.cloud import datastore - def do_something(entity): - pass + unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8]) + client = datastore.Client(namespace='ns{}'.format(unique)) + query = client.query(kind='_Doctest') + + def do_something(entity): + pass .. doctest:: query - >>> query = client.query(kind='MyKind') - >>> query.add_filter('property', '=', 'val') + >>> query = client.query(kind='MyKind') + >>> query.add_filter('property', '=', 'val') Using the query iterator .. doctest:: query - >>> query_iter = query.fetch() - >>> for entity in query_iter: - ... do_something(entity) + >>> query_iter = query.fetch() + >>> for entity in query_iter: + ... do_something(entity) or manually page through results .. testsetup:: query-page - from google.cloud import datastore - from tests.system.test_system import Config # system tests + import os + import uuid + + from google.cloud import datastore + from tests.system.test_system import Config # system tests - client = datastore.Client() + unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8]) + client = datastore.Client(namespace='ns{}'.format(unique)) - key = client.key('_Doctest') - entity1 = datastore.Entity(key=key) - entity1['foo'] = 1337 - entity2 = datastore.Entity(key=key) - entity2['foo'] = 42 - Config.TO_DELETE.extend([entity1, entity2]) - client.put_multi([entity1, entity2]) + key = client.key('_Doctest') + entity1 = datastore.Entity(key=key) + entity1['foo'] = 1337 + entity2 = datastore.Entity(key=key) + entity2['foo'] = 42 + Config.TO_DELETE.extend([entity1, entity2]) + client.put_multi([entity1, entity2]) - query = client.query(kind='_Doctest') - cursor = None + query = client.query(kind='_Doctest') + cursor = None .. doctest:: query-page - >>> query_iter = query.fetch(start_cursor=cursor) - >>> pages = query_iter.pages - >>> - >>> first_page = next(pages) - >>> first_page_entities = list(first_page) - >>> query_iter.next_page_token - b'...' + >>> query_iter = query.fetch(start_cursor=cursor) + >>> pages = query_iter.pages + >>> + >>> first_page = next(pages) + >>> first_page_entities = list(first_page) + >>> query_iter.next_page_token + b'...' :type kwargs: dict :param kwargs: Parameters for initializing and instance of diff --git a/packages/google-cloud-datastore/google/cloud/datastore/entity.py b/packages/google-cloud-datastore/google/cloud/datastore/entity.py index be30aa915172..bf3b99be2066 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/entity.py @@ -42,29 +42,33 @@ class Entity(dict): .. testsetup:: entity-ctor - from google.cloud import datastore - from tests.system.test_system import Config # system tests + import os + import uuid + + from google.cloud import datastore + from tests.system.test_system import Config # system tests - client = datastore.Client() - key = client.key('EntityKind', 1234, namespace='_Doctest') - entity = datastore.Entity(key=key) - entity['property'] = 'value' - Config.TO_DELETE.append(entity) + unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8]) + client = datastore.Client(namespace='ns{}'.format(unique)) + key = client.key('EntityKind', 1234, namespace='_Doctest') + entity = datastore.Entity(key=key) + entity['property'] = 'value' + Config.TO_DELETE.append(entity) - client.put(entity) + client.put(entity) .. doctest:: entity-ctor - >>> client.get(key) - + >>> client.get(key) + You can the set values on the entity just like you would on any other dictionary. .. doctest:: entity-ctor - >>> entity['age'] = 20 - >>> entity['name'] = 'JJ' + >>> entity['age'] = 20 + >>> entity['name'] = 'JJ' However, not all types are allowed as a value for a Google Cloud Datastore entity. The following basic types are supported by the API: diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index 6108bd80647a..9b755f2210eb 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -29,24 +29,28 @@ class Transaction(Batch): .. testsetup:: txn-put-multi, txn-api - from google.cloud import datastore - from tests.system.test_system import Config # system tests + import os + import uuid - client = datastore.Client() - key1 = client.key('_Doctest') - entity1 = datastore.Entity(key=key1) - entity1['foo'] = 1337 + from google.cloud import datastore + from tests.system.test_system import Config # system tests - key2 = client.key('_Doctest', 'abcd1234') - entity2 = datastore.Entity(key=key2) - entity2['foo'] = 42 + unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8]) + client = datastore.Client(namespace='ns{}'.format(unique)) + key1 = client.key('_Doctest') + entity1 = datastore.Entity(key=key1) + entity1['foo'] = 1337 - Config.TO_DELETE.extend([entity1, entity2]) + key2 = client.key('_Doctest', 'abcd1234') + entity2 = datastore.Entity(key=key2) + entity2['foo'] = 42 + + Config.TO_DELETE.extend([entity1, entity2]) .. doctest:: txn-put-multi - >>> with client.transaction(): - ... client.put_multi([entity1, entity2]) + >>> with client.transaction(): + ... client.put_multi([entity1, entity2]) Because it derives from :class:`~google.cloud.datastore.batch.Batch`, :class:`Transaction` also provides :meth:`put` and :meth:`delete` methods: @@ -62,51 +66,59 @@ class Transaction(Batch): .. testsetup:: txn-error - from google.cloud import datastore + import os + import uuid + + from google.cloud import datastore - client = datastore.Client() + unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8]) + client = datastore.Client(namespace='ns{}'.format(unique)) - def do_some_work(): - return + def do_some_work(): + return - class SomeException(Exception): - pass + class SomeException(Exception): + pass .. doctest:: txn-error - >>> with client.transaction(): - ... do_some_work() - ... raise SomeException # rolls back - Traceback (most recent call last): - ... - SomeException + >>> with client.transaction(): + ... do_some_work() + ... raise SomeException # rolls back + Traceback (most recent call last): + ... + SomeException If the transaction block exits without an exception, it will commit by default. .. warning:: - Inside a transaction, automatically assigned IDs for - entities will not be available at save time! That means, if you - try: + Inside a transaction, automatically assigned IDs for + entities will not be available at save time! That means, if you + try: + + .. testsetup:: txn-entity-key, txn-entity-key-after, txn-manual - .. testsetup:: txn-entity-key, txn-entity-key-after, txn-manual + import os + import uuid - from google.cloud import datastore - from tests.system.test_system import Config # system tests + from google.cloud import datastore + from tests.system.test_system import Config # system tests - client = datastore.Client() + unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8]) + client = datastore.Client(namespace='ns{}'.format(unique)) - def Entity(*args, **kwargs): - entity = datastore.Entity(*args, **kwargs) - Config.TO_DELETE.append(entity) - return entity + def Entity(*args, **kwargs): + entity = datastore.Entity(*args, **kwargs) + Config.TO_DELETE.append(entity) + return entity - .. doctest:: txn-entity-key + .. doctest:: txn-entity-key - >>> with client.transaction(): - ... entity = Entity(key=client.key('Thing')) - ... client.put(entity) + >>> with client.transaction(): + ... entity = Entity(key=client.key('Thing')) + ... client.put(entity) ``entity`` won't have a complete key until the transaction is committed. From b6e2c16faaad962abb13ffef53ef5d8e23a62b5e Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 24 Aug 2017 13:28:07 -0700 Subject: [PATCH 134/611] Bump core version number (#3864) --- packages/google-cloud-datastore/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 675e58bcc22d..8be9eef62b2c 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -51,7 +51,7 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.26.0, < 0.27dev', + 'google-cloud-core >= 0.27.0, < 0.28dev', 'google-gax>=0.15.7, <0.16dev', 'gapic-google-cloud-datastore-v1 >= 0.15.0, < 0.16dev', ] From 4e6931af9b2fc895117765ba609b15d73aa47e06 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 24 Aug 2017 14:28:31 -0700 Subject: [PATCH 135/611] Bump Datastore to 1.3.0 (#3868) --- packages/google-cloud-datastore/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 8be9eef62b2c..e8339369b18c 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -58,7 +58,7 @@ setup( name='google-cloud-datastore', - version='1.2.0', + version='1.3.0', description='Python Client for Google Cloud Datastore', long_description=README, namespace_packages=[ From bc2382f55d28e48334424e1cb83741606abd2d37 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 8 Sep 2017 08:47:53 -0700 Subject: [PATCH 136/611] Allowing `dict` (as an `Entity`) for property values. (#3927) * Allowing `dict` (as an `Entity`) for property values. Fixes #3923. * Using explicit unicode / bytes in new dict->Entity unit tests. --- .../google/cloud/datastore/entity.py | 4 +- .../google/cloud/datastore/helpers.py | 13 ++- .../tests/unit/test_helpers.py | 103 ++++++++++++++++-- 3 files changed, 106 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/entity.py b/packages/google-cloud-datastore/google/cloud/datastore/entity.py index bf3b99be2066..c8213f0ebcbb 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/entity.py @@ -83,10 +83,12 @@ class Entity(dict): * :class:`~google.cloud.datastore.helpers.GeoPoint` * :data:`None` - In addition, two container types are supported: + In addition, three container types are supported: * :class:`list` * :class:`~google.cloud.datastore.entity.Entity` + * :class:`dict` (will just be treated like an ``Entity`` without + a key or ``exclude_from_indexes``) Each entry in a list must be one of the value types (basic or container) and each value in an diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index eeae9d427572..f07dacdbe5d5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -290,8 +290,11 @@ def _pb_attr_value(val): >>> _pb_attr_value('my_string') ('string_value', 'my_string') - :type val: `datetime.datetime`, :class:`google.cloud.datastore.key.Key`, - bool, float, integer, string + :type val: + :class:`datetime.datetime`, :class:`google.cloud.datastore.key.Key`, + bool, float, integer, bytes, str, unicode, + :class:`google.cloud.datastore.entity.Entity`, dict, list, + :class:`google.cloud.datastore.helpers.GeoPoint`, NoneType :param val: The value to be scrutinized. :rtype: tuple @@ -315,6 +318,10 @@ def _pb_attr_value(val): name, value = 'blob', val elif isinstance(val, Entity): name, value = 'entity', val + elif isinstance(val, dict): + entity_val = Entity(key=None) + entity_val.update(val) + name, value = 'entity', entity_val elif isinstance(val, list): name, value = 'array', val elif isinstance(val, GeoPoint): @@ -322,7 +329,7 @@ def _pb_attr_value(val): elif val is None: name, value = 'null', struct_pb2.NULL_VALUE else: - raise ValueError("Unknown protobuf attr type %s" % type(val)) + raise ValueError('Unknown protobuf attr type', type(val)) return name + '_value', value diff --git a/packages/google-cloud-datastore/tests/unit/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py index 18ff98e64781..693536e04feb 100644 --- a/packages/google-cloud-datastore/tests/unit/test_helpers.py +++ b/packages/google-cloud-datastore/tests/unit/test_helpers.py @@ -199,7 +199,7 @@ def _call_fut(self, entity): return entity_to_protobuf(entity) - def _compareEntityProto(self, entity_pb1, entity_pb2): + def _compare_entity_proto(self, entity_pb1, entity_pb2): from google.cloud.datastore.helpers import _property_tuples self.assertEqual(entity_pb1.key, entity_pb2.key) @@ -212,8 +212,8 @@ def _compareEntityProto(self, entity_pb1, entity_pb2): self.assertEqual(name1, name2) if val1.HasField('entity_value'): # Message field (Entity) self.assertEqual(val1.meaning, val2.meaning) - self._compareEntityProto(val1.entity_value, - val2.entity_value) + self._compare_entity_proto( + val1.entity_value, val2.entity_value) else: self.assertEqual(val1, val2) @@ -223,7 +223,7 @@ def test_empty(self): entity = Entity() entity_pb = self._call_fut(entity) - self._compareEntityProto(entity_pb, entity_pb2.Entity()) + self._compare_entity_proto(entity_pb, entity_pb2.Entity()) def test_key_only(self): from google.cloud.proto.datastore.v1 import entity_pb2 @@ -242,7 +242,7 @@ def test_key_only(self): path_elt.kind = kind path_elt.name = name - self._compareEntityProto(entity_pb, expected_pb) + self._compare_entity_proto(entity_pb, expected_pb) def test_simple_fields(self): from google.cloud.proto.datastore.v1 import entity_pb2 @@ -262,7 +262,7 @@ def test_simple_fields(self): val_pb2 = _new_value_pb(expected_pb, name2) val_pb2.string_value = value2 - self._compareEntityProto(entity_pb, expected_pb) + self._compare_entity_proto(entity_pb, expected_pb) def test_with_empty_list(self): from google.cloud.proto.datastore.v1 import entity_pb2 @@ -272,7 +272,7 @@ def test_with_empty_list(self): entity['foo'] = [] entity_pb = self._call_fut(entity) - self._compareEntityProto(entity_pb, entity_pb2.Entity()) + self._compare_entity_proto(entity_pb, entity_pb2.Entity()) def test_inverts_to_protobuf(self): from google.cloud.proto.datastore.v1 import entity_pb2 @@ -325,7 +325,7 @@ def test_inverts_to_protobuf(self): # NOTE: entity_to_protobuf() strips the project so we "cheat". new_pb.key.partition_id.project_id = project - self._compareEntityProto(original_pb, new_pb) + self._compare_entity_proto(original_pb, new_pb) def test_meaning_with_change(self): from google.cloud.proto.datastore.v1 import entity_pb2 @@ -343,7 +343,7 @@ def test_meaning_with_change(self): value_pb.integer_value = value # NOTE: No meaning is used since the value differs from the # value stored. - self._compareEntityProto(entity_pb, expected_pb) + self._compare_entity_proto(entity_pb, expected_pb) def test_variable_meanings(self): from google.cloud.proto.datastore.v1 import entity_pb2 @@ -369,7 +369,78 @@ def test_variable_meanings(self): value2 = value_pb.array_value.values.add() value2.integer_value = values[2] - self._compareEntityProto(entity_pb, expected_pb) + self._compare_entity_proto(entity_pb, expected_pb) + + def test_dict_to_entity(self): + from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore.entity import Entity + + entity = Entity() + entity['a'] = {'b': u'c'} + entity_pb = self._call_fut(entity) + + expected_pb = entity_pb2.Entity( + properties={ + 'a': entity_pb2.Value( + entity_value=entity_pb2.Entity( + properties={ + 'b': entity_pb2.Value( + string_value='c', + ), + }, + ), + ), + }, + ) + self.assertEqual(entity_pb, expected_pb) + + def test_dict_to_entity_recursive(self): + from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore.entity import Entity + + entity = Entity() + entity['a'] = { + 'b': { + 'c': { + 'd': 1.25, + }, + 'e': True, + }, + 'f': 10, + } + entity_pb = self._call_fut(entity) + + b_entity_pb = entity_pb2.Entity( + properties={ + 'c': entity_pb2.Value( + entity_value=entity_pb2.Entity( + properties={ + 'd': entity_pb2.Value( + double_value=1.25, + ), + }, + ), + ), + 'e': entity_pb2.Value(boolean_value=True), + } + ) + expected_pb = entity_pb2.Entity( + properties={ + 'a': entity_pb2.Value( + entity_value=entity_pb2.Entity( + properties={ + 'b': entity_pb2.Value( + entity_value=b_entity_pb, + ), + 'f': entity_pb2.Value( + integer_value=10, + ), + }, + ), + ), + }, + ) + self.assertEqual(entity_pb, expected_pb) class Test_key_from_protobuf(unittest.TestCase): @@ -516,6 +587,18 @@ def test_entity(self): self.assertEqual(name, 'entity_value') self.assertIs(value, entity) + def test_dict(self): + from google.cloud.datastore.entity import Entity + + orig_value = {'richard': b'feynman'} + name, value = self._call_fut(orig_value) + self.assertEqual(name, 'entity_value') + self.assertIsInstance(value, Entity) + self.assertIsNone(value.key) + self.assertEqual(value._meanings, {}) + self.assertEqual(value.exclude_from_indexes, set()) + self.assertEqual(dict(value), orig_value) + def test_array(self): values = ['a', 0, 3.14] name, value = self._call_fut(values) From 786429d1bebdcd45a3bb8e01b0b320f63313dd3d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 3 Oct 2017 13:02:49 -0700 Subject: [PATCH 137/611] Fixing virutal->virtual typo. (#4108) Done via: $ git grep -l virutal | xargs sed -i s/virutal/virtual/g --- packages/google-cloud-datastore/nox.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py index f93b02944631..505b028f7e5e 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/nox.py @@ -68,7 +68,7 @@ def system_tests(session, python_version): session.virtualenv_dirname = 'sys-' + python_version # Install all test dependencies, then install this package into the - # virutalenv's dist-packages. + # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) session.install('../test_utils/') session.install('.') @@ -92,7 +92,7 @@ def doctests(session): session.interpreter = 'python3.6' # Install all test dependencies, then install this package into the - # virutalenv's dist-packages. + # virtualenv's dist-packages. session.install('mock', 'pytest', 'sphinx', *LOCAL_DEPS) session.install('../test_utils/') session.install('.') From a4e5d7aa29e92cf292d1a46fe80f97fb06ad45ed Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 12 Oct 2017 17:13:19 -0700 Subject: [PATCH 138/611] s/gcloud-common/google-cloud-common/g (#4180) The gcloud-common repo moved to https://github.com/GoogleCloudPlatform/google-cloud-common --- packages/google-cloud-datastore/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index dd2fc68cc217..d75de04c7fd7 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -27,7 +27,7 @@ learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. .. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html -.. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication +.. _authentication document: https://github.com/GoogleCloudPlatform/google-cloud-common/tree/master/authentication Using the API ------------- From b281e22075627d78bea848e76283eea8f41449df Mon Sep 17 00:00:00 2001 From: michaelawyu Date: Fri, 13 Oct 2017 13:46:24 -0700 Subject: [PATCH 139/611] Update Docs with Python Setup Guide (#4187) --- packages/google-cloud-datastore/README.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index d75de04c7fd7..725671f35dc8 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -18,6 +18,10 @@ Quick Start $ pip install --upgrade google-cloud-datastore +Fore more information on setting up your Python development environment, such as installing ``pip`` and on your system, please refer to `Python Development Environment Setup Guide`_ for Google Cloud Platform. + +.. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup + Authentication -------------- From b1f270f3e5d2023c06e4068b59ef86a1764d3d15 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 18 Oct 2017 15:36:57 -0700 Subject: [PATCH 140/611] Replace usage of google.api.core with google.api_core (#4221) * Remove api.core packages from google.cloud.core, make google.cloud.core depend on api_core. * s/google.api.core/google.api_core/g and nox updates * Fixing core tests, addressing review feedback * Fix bigquery --- .../google-cloud-datastore/google/cloud/datastore/query.py | 4 ++-- packages/google-cloud-datastore/nox.py | 5 ++++- packages/google-cloud-datastore/tests/unit/test_query.py | 2 +- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index d0c9cea9f711..5cf87d2f092e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -16,7 +16,7 @@ import base64 -from google.api.core import page_iterator +from google.api_core import page_iterator from google.cloud._helpers import _ensure_tuple_or_list from google.cloud.proto.datastore.v1 import datastore_pb2 as _datastore_pb2 @@ -570,7 +570,7 @@ def _pb_from_query(query): def _item_to_entity(iterator, entity_pb): """Convert a raw protobuf entity to the native object. - :type iterator: :class:`~google.api.core.page_iterator.Iterator` + :type iterator: :class:`~google.api_core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type entity_pb: diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py index 505b028f7e5e..148071b429c3 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/nox.py @@ -19,7 +19,10 @@ import nox -LOCAL_DEPS = ('../core/',) +LOCAL_DEPS = ( + os.path.join('..', 'api_core'), + os.path.join('..', 'core'), +) @nox.session diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index d8d08430dab9..e8a9834ca1d4 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -488,7 +488,7 @@ def test__process_query_results_bad_enum(self): iterator._process_query_results(response_pb) def _next_page_helper(self, txn_id=None): - from google.api.core import page_iterator + from google.api_core import page_iterator from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.proto.datastore.v1 import query_pb2 From 44bbef8c250a034017ad09f35a7d81b3014bcaca Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 30 Oct 2017 14:41:42 -0700 Subject: [PATCH 141/611] Cutting version 0.28.0 of `google-cloud-core`. (#4280) Also - updating all dependencies of `grpcio` to `>= 1.7.0`. This was due to an issue [1] with `1.6.0`. - updating the version of `google-api-core` (also to be released, This is required since the bounds on `grpcio` of `google-cloud-core==0.28.0` and `google-api-core==0.1.0` are mutually exclusive.) - Updating `google-api-core` CHANGELOG for release. - Updating packages to depend on `google-cloud-core>=0.28.0`. - Installing `nox -s lint` deps locally for vision. [1]: https://github.com/grpc/grpc/issues/12455 --- packages/google-cloud-datastore/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index e8339369b18c..2190bc351b71 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -51,7 +51,7 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.27.0, < 0.28dev', + 'google-cloud-core >= 0.28.0, < 0.29dev', 'google-gax>=0.15.7, <0.16dev', 'gapic-google-cloud-datastore-v1 >= 0.15.0, < 0.16dev', ] From 492468a2fe1a8bbf5d351ffc06117963fa9e4ba0 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 31 Oct 2017 08:57:09 -0700 Subject: [PATCH 142/611] Switch copyright holder to "Google LLC" (#4287) --- packages/google-cloud-datastore/google/__init__.py | 2 +- packages/google-cloud-datastore/google/cloud/__init__.py | 2 +- .../google-cloud-datastore/google/cloud/datastore/__init__.py | 2 +- .../google/cloud/datastore/_app_engine_key.proto | 2 +- packages/google-cloud-datastore/google/cloud/datastore/_gax.py | 2 +- packages/google-cloud-datastore/google/cloud/datastore/_http.py | 2 +- packages/google-cloud-datastore/google/cloud/datastore/batch.py | 2 +- .../google-cloud-datastore/google/cloud/datastore/client.py | 2 +- .../google-cloud-datastore/google/cloud/datastore/entity.py | 2 +- .../google-cloud-datastore/google/cloud/datastore/helpers.py | 2 +- packages/google-cloud-datastore/google/cloud/datastore/key.py | 2 +- packages/google-cloud-datastore/google/cloud/datastore/query.py | 2 +- .../google/cloud/datastore/transaction.py | 2 +- packages/google-cloud-datastore/nox.py | 2 +- packages/google-cloud-datastore/pylint.config.py | 2 +- packages/google-cloud-datastore/setup.py | 2 +- packages/google-cloud-datastore/tests/doctests.py | 2 +- packages/google-cloud-datastore/tests/system/test_system.py | 2 +- .../tests/system/utils/clear_datastore.py | 2 +- .../tests/system/utils/populate_datastore.py | 2 +- packages/google-cloud-datastore/tests/unit/__init__.py | 2 +- packages/google-cloud-datastore/tests/unit/test__gax.py | 2 +- packages/google-cloud-datastore/tests/unit/test__http.py | 2 +- packages/google-cloud-datastore/tests/unit/test_batch.py | 2 +- packages/google-cloud-datastore/tests/unit/test_client.py | 2 +- packages/google-cloud-datastore/tests/unit/test_entity.py | 2 +- packages/google-cloud-datastore/tests/unit/test_helpers.py | 2 +- packages/google-cloud-datastore/tests/unit/test_key.py | 2 +- packages/google-cloud-datastore/tests/unit/test_query.py | 2 +- packages/google-cloud-datastore/tests/unit/test_transaction.py | 2 +- 30 files changed, 30 insertions(+), 30 deletions(-) diff --git a/packages/google-cloud-datastore/google/__init__.py b/packages/google-cloud-datastore/google/__init__.py index b2b833373882..9ee9bf4342ab 100644 --- a/packages/google-cloud-datastore/google/__init__.py +++ b/packages/google-cloud-datastore/google/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/__init__.py b/packages/google-cloud-datastore/google/cloud/__init__.py index b2b833373882..9ee9bf4342ab 100644 --- a/packages/google-cloud-datastore/google/cloud/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py index 1cdc5db07ba3..12ff017e6e40 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key.proto b/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key.proto index 7248f1a4e4ef..ea6a25b22eea 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key.proto +++ b/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key.proto @@ -1,4 +1,4 @@ -// Copyright 2017 Google Inc. +// Copyright 2017 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py index e1d0e57a7737..2d3e7459f6dd 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py @@ -1,4 +1,4 @@ -# Copyright 2017 Google Inc. +# Copyright 2017 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index de976f7e1bb3..02bce52b730e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/batch.py b/packages/google-cloud-datastore/google/cloud/datastore/batch.py index 30a8aa4c67f1..5dd7a3146e7c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/batch.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/batch.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 71144e1e3aa2..4a4228a6b7f2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/entity.py b/packages/google-cloud-datastore/google/cloud/datastore/entity.py index c8213f0ebcbb..649da274aba3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/entity.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index f07dacdbe5d5..056376965725 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index 03bcb303b594..f2581b48714c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 5cf87d2f092e..477eccb04395 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index 9b755f2210eb..b10aa2a2e64e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py index 148071b429c3..8ba58e1f4ddd 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/nox.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/pylint.config.py b/packages/google-cloud-datastore/pylint.config.py index b618319b8b61..5d64b9d2f256 100644 --- a/packages/google-cloud-datastore/pylint.config.py +++ b/packages/google-cloud-datastore/pylint.config.py @@ -1,4 +1,4 @@ -# Copyright 2017 Google Inc. +# Copyright 2017 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 2190bc351b71..7a30edc0c530 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/doctests.py b/packages/google-cloud-datastore/tests/doctests.py index 5264635af03c..2f93bae29d54 100644 --- a/packages/google-cloud-datastore/tests/doctests.py +++ b/packages/google-cloud-datastore/tests/doctests.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/system/test_system.py b/packages/google-cloud-datastore/tests/system/test_system.py index b33f7de21925..a5e46c6dd8da 100644 --- a/packages/google-cloud-datastore/tests/system/test_system.py +++ b/packages/google-cloud-datastore/tests/system/test_system.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py index 9dc24a49dc28..fb8d50b2ade6 100644 --- a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py index bb43ae315473..27a31caf1337 100644 --- a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/__init__.py b/packages/google-cloud-datastore/tests/unit/__init__.py index 58e0d9153632..df379f1e9d88 100644 --- a/packages/google-cloud-datastore/tests/unit/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/test__gax.py b/packages/google-cloud-datastore/tests/unit/test__gax.py index 2dd7f8d0e3d5..f81d709c6b65 100644 --- a/packages/google-cloud-datastore/tests/unit/test__gax.py +++ b/packages/google-cloud-datastore/tests/unit/test__gax.py @@ -1,4 +1,4 @@ -# Copyright 2017 Google Inc. +# Copyright 2017 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/test__http.py b/packages/google-cloud-datastore/tests/unit/test__http.py index c416cd36671a..e840b649cdd1 100644 --- a/packages/google-cloud-datastore/tests/unit/test__http.py +++ b/packages/google-cloud-datastore/tests/unit/test__http.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/test_batch.py b/packages/google-cloud-datastore/tests/unit/test_batch.py index df01a0ce2c7a..01262ae17ba6 100644 --- a/packages/google-cloud-datastore/tests/unit/test_batch.py +++ b/packages/google-cloud-datastore/tests/unit/test_batch.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index a03bbe8b710f..6477f53c5fa7 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/test_entity.py b/packages/google-cloud-datastore/tests/unit/test_entity.py index 4a04ac259577..f018a89fc7ea 100644 --- a/packages/google-cloud-datastore/tests/unit/test_entity.py +++ b/packages/google-cloud-datastore/tests/unit/test_entity.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py index 693536e04feb..be4855d5e48c 100644 --- a/packages/google-cloud-datastore/tests/unit/test_helpers.py +++ b/packages/google-cloud-datastore/tests/unit/test_helpers.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/test_key.py b/packages/google-cloud-datastore/tests/unit/test_key.py index 4fb7b89911b2..3f82412e27cd 100644 --- a/packages/google-cloud-datastore/tests/unit/test_key.py +++ b/packages/google-cloud-datastore/tests/unit/test_key.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index e8a9834ca1d4..111a2ceed0bf 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/test_transaction.py b/packages/google-cloud-datastore/tests/unit/test_transaction.py index a9a4194c7dca..5479fbf80812 100644 --- a/packages/google-cloud-datastore/tests/unit/test_transaction.py +++ b/packages/google-cloud-datastore/tests/unit/test_transaction.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2014 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 125981b1dc538146f855f295414a585aaf6ca0ec Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 31 Oct 2017 14:28:55 -0700 Subject: [PATCH 143/611] Making release for most packages. (#4296) * Making release for most packages. Every package except those that have already been released (`google-cloud-core`, `google-api-core`, `google-cloud-bigquery`): - `google-cloud` - `google-cloud-bigtable` - `google-cloud-datastore` - `google-cloud-dns` - `google-cloud-error-reporting` - `google-cloud-firestore` - `google-cloud-language` - `google-cloud-logging` - `google-cloud-monitoring` - `google-cloud-resource-manager` - `google-cloud-runtimeconfig` - `google-cloud-spanner` - `google-cloud-speech` - `google-cloud-storage` - `google-cloud-trace` - `google-cloud-translate` - `google-cloud-videointelligence` - `google-cloud-vision` * Adding changelog files for each package. --- packages/google-cloud-datastore/CHANGELOG.md | 23 ++++++++++++++++++++ packages/google-cloud-datastore/setup.py | 5 +++-- 2 files changed, 26 insertions(+), 2 deletions(-) create mode 100644 packages/google-cloud-datastore/CHANGELOG.md diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md new file mode 100644 index 000000000000..08c6d71efd4a --- /dev/null +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -0,0 +1,23 @@ +# Changelog + +[PyPI History][1] + +[1]: https://pypi.org/project/google-cloud-datastore/#history + +## 1.4.0 + +### Interface changes / additions + +- Allowing `dict` (as an `Entity`) for property values. (#3927) + +### Documentation + +- Added link to "Python Development Environment Setup Guide" in + project README (#4187, h/t to @michaelawyu) + +### Dependencies + +- Upgrading to `google-cloud-core >= 0.28.0` and adding dependency + on `google-api-core` (#4221, #4280) + +PyPI: https://pypi.org/project/google-cloud-datastore/1.4.0/ diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 7a30edc0c530..057e27a43d89 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -52,13 +52,14 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.28.0, < 0.29dev', - 'google-gax>=0.15.7, <0.16dev', + 'google-api-core >= 0.1.1, < 0.2.0dev', + 'google-gax >= 0.15.7, < 0.16dev', 'gapic-google-cloud-datastore-v1 >= 0.15.0, < 0.16dev', ] setup( name='google-cloud-datastore', - version='1.3.0', + version='1.4.0', description='Python Client for Google Cloud Datastore', long_description=README, namespace_packages=[ From eba78cc303ee49f4896ccdd95e72e46f04b7a730 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 31 Oct 2017 15:43:51 -0700 Subject: [PATCH 144/611] Marking all remaining versions as "dev". (#4299) This is to make it clear the code is between releases. Any code that relies on a **new** feature (e.g. of `google-api-core`) will then be able to **explicitly** make this clear by using the lower bound of the `devN` version. Fixes #4208. See: https://snarky.ca/how-i-manage-package-version-numbers/ --- packages/google-cloud-datastore/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 057e27a43d89..92588ba2de0c 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -59,7 +59,7 @@ setup( name='google-cloud-datastore', - version='1.4.0', + version='1.4.1.dev1', description='Python Client for Google Cloud Datastore', long_description=README, namespace_packages=[ From a47f9cc005c93bf3d36059610301e65a343b7856 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 1 Nov 2017 12:43:23 -0700 Subject: [PATCH 145/611] Fixing "Fore" -> "For" typo in README docs. (#4317) Also obeying an 80-column limit for the content and adding a missing "``virtualenv``" in the phrase "``pip`` and ``virtualenv``" in some of the docs. --- packages/google-cloud-datastore/README.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index 725671f35dc8..a36b4321a180 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -18,7 +18,9 @@ Quick Start $ pip install --upgrade google-cloud-datastore -Fore more information on setting up your Python development environment, such as installing ``pip`` and on your system, please refer to `Python Development Environment Setup Guide`_ for Google Cloud Platform. +For more information on setting up your Python development environment, +such as installing ``pip`` and ``virtualenv`` on your system, please refer +to `Python Development Environment Setup Guide`_ for Google Cloud Platform. .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup From 5fb6b25a918a91cb1881e2e1e8d58c9eb37ded9e Mon Sep 17 00:00:00 2001 From: chemelnucfin Date: Wed, 1 Nov 2017 16:53:46 -0700 Subject: [PATCH 146/611] Closes #4319 - shorten test names (#4321) * Closes #4319 - shorten test names * #4319 update docs and config files --- packages/google-cloud-datastore/nox.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py index 8ba58e1f4ddd..584911cc7b2c 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/nox.py @@ -26,15 +26,15 @@ @nox.session -@nox.parametrize('python_version', ['2.7', '3.4', '3.5', '3.6']) -def unit_tests(session, python_version): +@nox.parametrize('py', ['2.7', '3.4', '3.5', '3.6']) +def unit(session, py): """Run the unit test suite.""" # Run unit tests against all supported versions of Python. - session.interpreter = 'python{}'.format(python_version) + session.interpreter = 'python{}'.format(py) # Set the virtualenv dirname. - session.virtualenv_dirname = 'unit-' + python_version + session.virtualenv_dirname = 'unit-' + py # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) @@ -56,8 +56,8 @@ def unit_tests(session, python_version): @nox.session -@nox.parametrize('python_version', ['2.7', '3.6']) -def system_tests(session, python_version): +@nox.parametrize('py', ['2.7', '3.6']) +def system(session, py): """Run the system test suite.""" # Sanity check: Only run system tests if the environment variable is set. @@ -65,10 +65,10 @@ def system_tests(session, python_version): session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. - session.interpreter = 'python{}'.format(python_version) + session.interpreter = 'python{}'.format(py) # Set the virtualenv dirname. - session.virtualenv_dirname = 'sys-' + python_version + session.virtualenv_dirname = 'sys-' + py # Install all test dependencies, then install this package into the # virtualenv's dist-packages. From e007064da067250993979bd680611a98fecf1e02 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 1 Nov 2017 21:47:55 -0700 Subject: [PATCH 147/611] Making a `nox -s default` session for all packages. (#4324) * Making a `nox -s default` session for all packages. * Using "default" `nox` session on AppVeyor. This was 32-bit or 64-bit Python can be used, depending on which is the active `python` / the active `nox.exe`. --- packages/google-cloud-datastore/nox.py | 30 ++++++++++++++++++-------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py index 584911cc7b2c..9faac3e7115a 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/nox.py @@ -26,16 +26,14 @@ @nox.session -@nox.parametrize('py', ['2.7', '3.4', '3.5', '3.6']) -def unit(session, py): - """Run the unit test suite.""" - - # Run unit tests against all supported versions of Python. - session.interpreter = 'python{}'.format(py) - - # Set the virtualenv dirname. - session.virtualenv_dirname = 'unit-' + py +def default(session): + """Default unit test session. + This is intended to be run **without** an interpreter set, so + that the current ``python`` (on the ``PATH``) or the version of + Python corresponding to the ``nox`` binary the ``PATH`` can + run the tests. + """ # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') @@ -55,6 +53,20 @@ def unit(session, py): ) +@nox.session +@nox.parametrize('py', ['2.7', '3.4', '3.5', '3.6']) +def unit(session, py): + """Run the unit test suite.""" + + # Run unit tests against all supported versions of Python. + session.interpreter = 'python{}'.format(py) + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + py + + default(session) + + @nox.session @nox.parametrize('py', ['2.7', '3.6']) def system(session, py): From 58428924cf8c12637107b5acbd4ac3d31c4d1ade Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 8 Nov 2017 14:15:05 -0800 Subject: [PATCH 148/611] New Datastore auto-gen. (#4348) --- .../google/cloud/datastore/_gax.py | 10 +- .../google/cloud/datastore/_http.py | 2 +- .../google/cloud/datastore/batch.py | 2 +- .../google/cloud/datastore/client.py | 7 +- .../google/cloud/datastore/helpers.py | 2 +- .../google/cloud/datastore/key.py | 2 +- .../google/cloud/datastore/query.py | 6 +- .../google/cloud/datastore_v1/__init__.py | 30 + .../cloud/datastore_v1/gapic/__init__.py | 0 .../datastore_v1/gapic/datastore_client.py | 519 +++++ .../gapic/datastore_client_config.py | 58 + .../google/cloud/datastore_v1/gapic/enums.py | 150 ++ .../cloud/datastore_v1/proto/__init__.py | 0 .../cloud/datastore_v1/proto/datastore_pb2.py | 1745 +++++++++++++++++ .../datastore_v1/proto/datastore_pb2_grpc.py | 163 ++ .../cloud/datastore_v1/proto/entity_pb2.py | 661 +++++++ .../datastore_v1/proto/entity_pb2_grpc.py | 3 + .../cloud/datastore_v1/proto/query_pb2.py | 1145 +++++++++++ .../datastore_v1/proto/query_pb2_grpc.py | 3 + .../google/cloud/datastore_v1/types.py | 46 + packages/google-cloud-datastore/setup.py | 6 +- .../tests/unit/test__gax.py | 24 +- .../tests/unit/test__http.py | 56 +- .../tests/unit/test_batch.py | 22 +- .../tests/unit/test_client.py | 77 +- .../tests/unit/test_helpers.py | 64 +- .../tests/unit/test_key.py | 2 +- .../tests/unit/test_query.py | 30 +- .../tests/unit/test_transaction.py | 12 +- 29 files changed, 4703 insertions(+), 144 deletions(-) create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2_grpc.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2_grpc.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2_grpc.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/types.py diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py index 2d3e7459f6dd..4eb54eb7681d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py @@ -18,7 +18,8 @@ import contextlib import sys -from google.cloud.gapic.datastore.v1 import datastore_client +from google.api_core.gapic_v1 import client_info +from google.cloud.datastore_v1.gapic import datastore_client from google.gax.errors import GaxError from google.gax.grpc import exc_to_code from google.gax.utils import metrics @@ -231,4 +232,9 @@ def make_datastore_api(client): channel = insecure_channel(host) return GAPICDatastoreAPI( - channel=channel, lib_name='gccl', lib_version=__version__) + channel=channel, + client_info=client_info.ClientInfo( + client_library_version=__version__, + gapic_version=__version__, + ), + ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 02bce52b730e..a161b9b096c0 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -18,7 +18,7 @@ from google.cloud import _http as connection_module from google.cloud import exceptions -from google.cloud.proto.datastore.v1 import datastore_pb2 as _datastore_pb2 +from google.cloud.datastore_v1.proto import datastore_pb2 as _datastore_pb2 from google.cloud.datastore import __version__ diff --git a/packages/google-cloud-datastore/google/cloud/datastore/batch.py b/packages/google-cloud-datastore/google/cloud/datastore/batch.py index 5dd7a3146e7c..49be09964eb3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/batch.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/batch.py @@ -22,7 +22,7 @@ """ from google.cloud.datastore import helpers -from google.cloud.proto.datastore.v1 import datastore_pb2 as _datastore_pb2 +from google.cloud.datastore_v1.proto import datastore_pb2 as _datastore_pb2 class Batch(object): diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 4a4228a6b7f2..ec522cc5c1cc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -15,7 +15,7 @@ import os -from google.cloud.proto.datastore.v1 import datastore_pb2 as _datastore_pb2 +from google.cloud.datastore_v1.proto import datastore_pb2 as _datastore_pb2 from google.cloud._helpers import _LocalStack from google.cloud._helpers import ( @@ -135,7 +135,10 @@ def _extended_lookup(datastore_api, project, key_pbs, while loop_num < _MAX_LOOPS: # loop against possible deferred. loop_num += 1 lookup_response = datastore_api.lookup( - project, read_options, key_pbs) + project_id=project, + read_options=read_options, + keys=key_pbs, + ) # Accumulate the new results. results.extend(result.entity for result in lookup_response.found) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index 056376965725..11e21aa46da0 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -26,7 +26,7 @@ from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import _pb_timestamp_to_datetime -from google.cloud.proto.datastore.v1 import entity_pb2 as _entity_pb2 +from google.cloud.datastore_v1.proto import entity_pb2 as _entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index f2581b48714c..74d23e49265c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -18,7 +18,7 @@ import copy import six -from google.cloud.proto.datastore.v1 import entity_pb2 as _entity_pb2 +from google.cloud.datastore_v1.proto import entity_pb2 as _entity_pb2 from google.cloud._helpers import _to_bytes from google.cloud.datastore import _app_engine_key_pb2 diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 477eccb04395..28febdd1d422 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -19,9 +19,9 @@ from google.api_core import page_iterator from google.cloud._helpers import _ensure_tuple_or_list -from google.cloud.proto.datastore.v1 import datastore_pb2 as _datastore_pb2 -from google.cloud.proto.datastore.v1 import entity_pb2 as _entity_pb2 -from google.cloud.proto.datastore.v1 import query_pb2 as _query_pb2 +from google.cloud.datastore_v1.proto import datastore_pb2 as _datastore_pb2 +from google.cloud.datastore_v1.proto import entity_pb2 as _entity_pb2 +from google.cloud.datastore_v1.proto import query_pb2 as _query_pb2 from google.cloud.datastore import helpers from google.cloud.datastore.key import Key diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py new file mode 100644 index 000000000000..5157e60d4e80 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py @@ -0,0 +1,30 @@ +# Copyright 2017, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.datastore_v1 import types +from google.cloud.datastore_v1.gapic import datastore_client +from google.cloud.datastore_v1.gapic import enums + + +class DatastoreClient(datastore_client.DatastoreClient): + __doc__ = datastore_client.DatastoreClient.__doc__ + enums = enums + + +__all__ = ( + 'enums', + 'types', + 'DatastoreClient', ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py new file mode 100644 index 000000000000..983e30dce902 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py @@ -0,0 +1,519 @@ +# Copyright 2017, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/datastore/v1/datastore.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.datastore.v1 Datastore API.""" + +import pkg_resources + +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.grpc_helpers +import google.api_core.protobuf_helpers + +from google.cloud.datastore_v1.gapic import datastore_client_config +from google.cloud.datastore_v1.gapic import enums +from google.cloud.datastore_v1.proto import datastore_pb2 +from google.cloud.datastore_v1.proto import entity_pb2 +from google.cloud.datastore_v1.proto import query_pb2 + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + 'google-cloud-datastore', ).version + + +class DatastoreClient(object): + """ + Each RPC normalizes the partition IDs of the keys in its input entities, + and always returns entities with keys with normalized partition IDs. + This applies to all keys and entities, including those in values, except keys + with both an empty path and an empty or unset partition ID. Normalization of + input keys sets the project ID (if not already set) to the project ID from + the request. + """ + + SERVICE_ADDRESS = 'datastore.googleapis.com:443' + """The default address of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/datastore', ) + + # The name of the interface for this client. This is the key used to find + # method configuration in the client_config dictionary + _INTERFACE_NAME = ('google.datastore.v1.Datastore') + + def __init__(self, + channel=None, + credentials=None, + client_config=datastore_client_config.config, + client_info=None): + """Constructor. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. If specified, then the ``credentials`` + argument is ignored. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_config (dict): + A dictionary of call options for each method. If not specified + the default configuration is used. Generally, you only need + to set this if you're developing your own client library. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + if channel is not None and credentials is not None: + raise ValueError( + 'channel and credentials arguments to {} are mutually ' + 'exclusive.'.format(self.__class__.__name__)) + + if channel is None: + channel = google.api_core.grpc_helpers.create_channel( + self.SERVICE_ADDRESS, + credentials=credentials, + scopes=self._DEFAULT_SCOPES) + + self.datastore_stub = (datastore_pb2.DatastoreStub(channel)) + + if client_info is None: + client_info = ( + google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) + + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + + interface_config = client_config['interfaces'][self._INTERFACE_NAME] + method_configs = google.api_core.gapic_v1.config.parse_method_configs( + interface_config) + + self._lookup = google.api_core.gapic_v1.method.wrap_method( + self.datastore_stub.Lookup, + default_retry=method_configs['Lookup'].retry, + default_timeout=method_configs['Lookup'].timeout, + client_info=client_info) + self._run_query = google.api_core.gapic_v1.method.wrap_method( + self.datastore_stub.RunQuery, + default_retry=method_configs['RunQuery'].retry, + default_timeout=method_configs['RunQuery'].timeout, + client_info=client_info) + self._begin_transaction = google.api_core.gapic_v1.method.wrap_method( + self.datastore_stub.BeginTransaction, + default_retry=method_configs['BeginTransaction'].retry, + default_timeout=method_configs['BeginTransaction'].timeout, + client_info=client_info) + self._commit = google.api_core.gapic_v1.method.wrap_method( + self.datastore_stub.Commit, + default_retry=method_configs['Commit'].retry, + default_timeout=method_configs['Commit'].timeout, + client_info=client_info) + self._rollback = google.api_core.gapic_v1.method.wrap_method( + self.datastore_stub.Rollback, + default_retry=method_configs['Rollback'].retry, + default_timeout=method_configs['Rollback'].timeout, + client_info=client_info) + self._allocate_ids = google.api_core.gapic_v1.method.wrap_method( + self.datastore_stub.AllocateIds, + default_retry=method_configs['AllocateIds'].retry, + default_timeout=method_configs['AllocateIds'].timeout, + client_info=client_info) + self._reserve_ids = google.api_core.gapic_v1.method.wrap_method( + self.datastore_stub.ReserveIds, + default_retry=method_configs['ReserveIds'].retry, + default_timeout=method_configs['ReserveIds'].timeout, + client_info=client_info) + + # Service calls + def lookup(self, + project_id, + keys, + read_options=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Looks up entities by key. + + Example: + >>> from google.cloud import datastore_v1 + >>> + >>> client = datastore_v1.DatastoreClient() + >>> + >>> project_id = '' + >>> keys = [] + >>> + >>> response = client.lookup(project_id, keys) + + Args: + project_id (str): The ID of the project against which to make the request. + keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): Keys of entities to look up. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.Key` + read_options (Union[dict, ~google.cloud.datastore_v1.types.ReadOptions]): The options for this lookup request. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.ReadOptions` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.datastore_v1.types.LookupResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = datastore_pb2.LookupRequest( + project_id=project_id, keys=keys, read_options=read_options) + return self._lookup(request, retry=retry, timeout=timeout) + + def run_query(self, + project_id, + partition_id, + read_options=None, + query=None, + gql_query=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Queries for entities. + + Example: + >>> from google.cloud import datastore_v1 + >>> + >>> client = datastore_v1.DatastoreClient() + >>> + >>> project_id = '' + >>> partition_id = {} + >>> + >>> response = client.run_query(project_id, partition_id) + + Args: + project_id (str): The ID of the project against which to make the request. + partition_id (Union[dict, ~google.cloud.datastore_v1.types.PartitionId]): Entities are partitioned into subsets, identified by a partition ID. + Queries are scoped to a single partition. + This partition ID is normalized with the standard default context + partition ID. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.PartitionId` + read_options (Union[dict, ~google.cloud.datastore_v1.types.ReadOptions]): The options for this query. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.ReadOptions` + query (Union[dict, ~google.cloud.datastore_v1.types.Query]): The query to run. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.Query` + gql_query (Union[dict, ~google.cloud.datastore_v1.types.GqlQuery]): The GQL query to run. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.GqlQuery` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.datastore_v1.types.RunQueryResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Sanity check: We have some fields which are mutually exclusive; + # raise ValueError if more than one is sent. + google.api_core.protobuf_helpers.check_oneof( + query=query, + gql_query=gql_query, ) + + request = datastore_pb2.RunQueryRequest( + project_id=project_id, + partition_id=partition_id, + read_options=read_options, + query=query, + gql_query=gql_query) + return self._run_query(request, retry=retry, timeout=timeout) + + def begin_transaction(self, + project_id, + transaction_options=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Begins a new transaction. + + Example: + >>> from google.cloud import datastore_v1 + >>> + >>> client = datastore_v1.DatastoreClient() + >>> + >>> project_id = '' + >>> + >>> response = client.begin_transaction(project_id) + + Args: + project_id (str): The ID of the project against which to make the request. + transaction_options (Union[dict, ~google.cloud.datastore_v1.types.TransactionOptions]): Options for a new transaction. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.TransactionOptions` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.datastore_v1.types.BeginTransactionResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = datastore_pb2.BeginTransactionRequest( + project_id=project_id, transaction_options=transaction_options) + return self._begin_transaction(request, retry=retry, timeout=timeout) + + def commit(self, + project_id, + mode, + mutations, + transaction=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Commits a transaction, optionally creating, deleting or modifying some + entities. + + Example: + >>> from google.cloud import datastore_v1 + >>> from google.cloud.datastore_v1 import enums + >>> + >>> client = datastore_v1.DatastoreClient() + >>> + >>> project_id = '' + >>> mode = enums.CommitRequest.Mode.MODE_UNSPECIFIED + >>> mutations = [] + >>> + >>> response = client.commit(project_id, mode, mutations) + + Args: + project_id (str): The ID of the project against which to make the request. + mode (~google.cloud.datastore_v1.types.Mode): The type of commit to perform. Defaults to ``TRANSACTIONAL``. + mutations (list[Union[dict, ~google.cloud.datastore_v1.types.Mutation]]): The mutations to perform. + + When mode is ``TRANSACTIONAL``, mutations affecting a single entity are + applied in order. The following sequences of mutations affecting a single + entity are not permitted in a single ``Commit`` request: + + - ``insert`` followed by ``insert`` + - ``update`` followed by ``insert`` + - ``upsert`` followed by ``insert`` + - ``delete`` followed by ``update`` + + When mode is ``NON_TRANSACTIONAL``, no two mutations may affect a single + entity. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.Mutation` + transaction (bytes): The identifier of the transaction associated with the commit. A + transaction identifier is returned by a call to + ``Datastore.BeginTransaction``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.datastore_v1.types.CommitResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Sanity check: We have some fields which are mutually exclusive; + # raise ValueError if more than one is sent. + google.api_core.protobuf_helpers.check_oneof(transaction=transaction, ) + + request = datastore_pb2.CommitRequest( + project_id=project_id, + mode=mode, + mutations=mutations, + transaction=transaction) + return self._commit(request, retry=retry, timeout=timeout) + + def rollback(self, + project_id, + transaction, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Rolls back a transaction. + + Example: + >>> from google.cloud import datastore_v1 + >>> + >>> client = datastore_v1.DatastoreClient() + >>> + >>> project_id = '' + >>> transaction = b'' + >>> + >>> response = client.rollback(project_id, transaction) + + Args: + project_id (str): The ID of the project against which to make the request. + transaction (bytes): The transaction identifier, returned by a call to + ``Datastore.BeginTransaction``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.datastore_v1.types.RollbackResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = datastore_pb2.RollbackRequest( + project_id=project_id, transaction=transaction) + return self._rollback(request, retry=retry, timeout=timeout) + + def allocate_ids(self, + project_id, + keys, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Allocates IDs for the given keys, which is useful for referencing an entity + before it is inserted. + + Example: + >>> from google.cloud import datastore_v1 + >>> + >>> client = datastore_v1.DatastoreClient() + >>> + >>> project_id = '' + >>> keys = [] + >>> + >>> response = client.allocate_ids(project_id, keys) + + Args: + project_id (str): The ID of the project against which to make the request. + keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): A list of keys with incomplete key paths for which to allocate IDs. + No key may be reserved/read-only. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.Key` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.datastore_v1.types.AllocateIdsResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = datastore_pb2.AllocateIdsRequest( + project_id=project_id, keys=keys) + return self._allocate_ids(request, retry=retry, timeout=timeout) + + def reserve_ids(self, + project_id, + keys, + database_id=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Prevents the supplied keys' IDs from being auto-allocated by Cloud + Datastore. + + Example: + >>> from google.cloud import datastore_v1 + >>> + >>> client = datastore_v1.DatastoreClient() + >>> + >>> project_id = '' + >>> keys = [] + >>> + >>> response = client.reserve_ids(project_id, keys) + + Args: + project_id (str): The ID of the project against which to make the request. + keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): A list of keys with complete key paths whose numeric IDs should not be + auto-allocated. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.Key` + database_id (str): If not empty, the ID of the database against which to make the request. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.datastore_v1.types.ReserveIdsResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = datastore_pb2.ReserveIdsRequest( + project_id=project_id, keys=keys, database_id=database_id) + return self._reserve_ids(request, retry=retry, timeout=timeout) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py new file mode 100644 index 000000000000..1a3eb9523447 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py @@ -0,0 +1,58 @@ +config = { + "interfaces": { + "google.datastore.v1.Datastore": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "Lookup": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "RunQuery": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "BeginTransaction": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "Commit": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "Rollback": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "AllocateIds": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "ReserveIds": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py new file mode 100644 index 000000000000..31dc31d8e92f --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py @@ -0,0 +1,150 @@ +# Copyright 2017, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Wrappers for protocol buffer enum types.""" + + +class NullValue(object): + """ + ``NullValue`` is a singleton enumeration to represent the null value for the + ``Value`` type union. + + The JSON representation for ``NullValue`` is JSON ``null``. + + Attributes: + NULL_VALUE (int): Null value. + """ + NULL_VALUE = 0 + + +class EntityResult(object): + class ResultType(object): + """ + Specifies what data the 'entity' field contains. + A ``ResultType`` is either implied (for example, in ``LookupResponse.missing`` + from ``datastore.proto``, it is always ``KEY_ONLY``) or specified by context + (for example, in message ``QueryResultBatch``, field ``entity_result_type`` + specifies a ``ResultType`` for all the values in field ``entity_results``). + + Attributes: + RESULT_TYPE_UNSPECIFIED (int): Unspecified. This value is never used. + FULL (int): The key and properties. + PROJECTION (int): A projected subset of properties. The entity may have no key. + KEY_ONLY (int): Only the key. + """ + RESULT_TYPE_UNSPECIFIED = 0 + FULL = 1 + PROJECTION = 2 + KEY_ONLY = 3 + + +class PropertyOrder(object): + class Direction(object): + """ + The sort direction. + + Attributes: + DIRECTION_UNSPECIFIED (int): Unspecified. This value must not be used. + ASCENDING (int): Ascending. + DESCENDING (int): Descending. + """ + DIRECTION_UNSPECIFIED = 0 + ASCENDING = 1 + DESCENDING = 2 + + +class CompositeFilter(object): + class Operator(object): + """ + A composite filter operator. + + Attributes: + OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used. + AND (int): The results are required to satisfy each of the combined filters. + """ + OPERATOR_UNSPECIFIED = 0 + AND = 1 + + +class PropertyFilter(object): + class Operator(object): + """ + A property filter operator. + + Attributes: + OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used. + LESS_THAN (int): Less than. + LESS_THAN_OR_EQUAL (int): Less than or equal. + GREATER_THAN (int): Greater than. + GREATER_THAN_OR_EQUAL (int): Greater than or equal. + EQUAL (int): Equal. + HAS_ANCESTOR (int): Has ancestor. + """ + OPERATOR_UNSPECIFIED = 0 + LESS_THAN = 1 + LESS_THAN_OR_EQUAL = 2 + GREATER_THAN = 3 + GREATER_THAN_OR_EQUAL = 4 + EQUAL = 5 + HAS_ANCESTOR = 11 + + +class QueryResultBatch(object): + class MoreResultsType(object): + """ + The possible values for the ``more_results`` field. + + Attributes: + MORE_RESULTS_TYPE_UNSPECIFIED (int): Unspecified. This value is never used. + NOT_FINISHED (int): There may be additional batches to fetch from this query. + MORE_RESULTS_AFTER_LIMIT (int): The query is finished, but there may be more results after the limit. + MORE_RESULTS_AFTER_CURSOR (int): The query is finished, but there may be more results after the end + cursor. + NO_MORE_RESULTS (int): The query is finished, and there are no more results. + """ + MORE_RESULTS_TYPE_UNSPECIFIED = 0 + NOT_FINISHED = 1 + MORE_RESULTS_AFTER_LIMIT = 2 + MORE_RESULTS_AFTER_CURSOR = 4 + NO_MORE_RESULTS = 3 + + +class CommitRequest(object): + class Mode(object): + """ + The modes available for commits. + + Attributes: + MODE_UNSPECIFIED (int): Unspecified. This value must not be used. + TRANSACTIONAL (int): Transactional: The mutations are either all applied, or none are applied. + Learn about transactions `here `_. + NON_TRANSACTIONAL (int): Non-transactional: The mutations may not apply as all or none. + """ + MODE_UNSPECIFIED = 0 + TRANSACTIONAL = 1 + NON_TRANSACTIONAL = 2 + + +class ReadOptions(object): + class ReadConsistency(object): + """ + The possible values for read consistencies. + + Attributes: + READ_CONSISTENCY_UNSPECIFIED (int): Unspecified. This value must not be used. + STRONG (int): Strong consistency. + EVENTUAL (int): Eventual consistency. + """ + READ_CONSISTENCY_UNSPECIFIED = 0 + STRONG = 1 + EVENTUAL = 2 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py new file mode 100644 index 000000000000..688ff0bcfa4d --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py @@ -0,0 +1,1745 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/datastore_v1/proto/datastore.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.cloud.datastore_v1.proto import entity_pb2 as google_dot_cloud_dot_datastore__v1_dot_proto_dot_entity__pb2 +from google.cloud.datastore_v1.proto import query_pb2 as google_dot_cloud_dot_datastore__v1_dot_proto_dot_query__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/datastore_v1/proto/datastore.proto', + package='google.datastore.v1', + syntax='proto3', + serialized_pb=_b('\n/google/cloud/datastore_v1/proto/datastore.proto\x12\x13google.datastore.v1\x1a\x1cgoogle/api/annotations.proto\x1a,google/cloud/datastore_v1/proto/entity.proto\x1a+google/cloud/datastore_v1/proto/query.proto\"\x83\x01\n\rLookupRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x36\n\x0cread_options\x18\x01 \x01(\x0b\x32 .google.datastore.v1.ReadOptions\x12&\n\x04keys\x18\x03 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\xa2\x01\n\x0eLookupResponse\x12\x30\n\x05\x66ound\x18\x01 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x32\n\x07missing\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12*\n\x08\x64\x65\x66\x65rred\x18\x03 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\x84\x02\n\x0fRunQueryRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x36\n\x0cpartition_id\x18\x02 \x01(\x0b\x32 .google.datastore.v1.PartitionId\x12\x36\n\x0cread_options\x18\x01 \x01(\x0b\x32 .google.datastore.v1.ReadOptions\x12+\n\x05query\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.QueryH\x00\x12\x32\n\tgql_query\x18\x07 \x01(\x0b\x32\x1d.google.datastore.v1.GqlQueryH\x00\x42\x0c\n\nquery_type\"s\n\x10RunQueryResponse\x12\x34\n\x05\x62\x61tch\x18\x01 \x01(\x0b\x32%.google.datastore.v1.QueryResultBatch\x12)\n\x05query\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.Query\"s\n\x17\x42\x65ginTransactionRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x44\n\x13transaction_options\x18\n \x01(\x0b\x32\'.google.datastore.v1.TransactionOptions\"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\":\n\x0fRollbackRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\"\x12\n\x10RollbackResponse\"\x83\x02\n\rCommitRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x35\n\x04mode\x18\x05 \x01(\x0e\x32\'.google.datastore.v1.CommitRequest.Mode\x12\x15\n\x0btransaction\x18\x01 \x01(\x0cH\x00\x12\x30\n\tmutations\x18\x06 \x03(\x0b\x32\x1d.google.datastore.v1.Mutation\"F\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\x11\n\rTRANSACTIONAL\x10\x01\x12\x15\n\x11NON_TRANSACTIONAL\x10\x02\x42\x16\n\x14transaction_selector\"f\n\x0e\x43ommitResponse\x12=\n\x10mutation_results\x18\x03 \x03(\x0b\x32#.google.datastore.v1.MutationResult\x12\x15\n\rindex_updates\x18\x04 \x01(\x05\"P\n\x12\x41llocateIdsRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"=\n\x13\x41llocateIdsResponse\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"d\n\x11ReserveIdsRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x13\n\x0b\x64\x61tabase_id\x18\t \x01(\t\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\x14\n\x12ReserveIdsResponse\"\x87\x02\n\x08Mutation\x12-\n\x06insert\x18\x04 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12-\n\x06update\x18\x05 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12-\n\x06upsert\x18\x06 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12*\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x18.google.datastore.v1.KeyH\x00\x12\x16\n\x0c\x62\x61se_version\x18\x08 \x01(\x03H\x01\x42\x0b\n\toperationB\x1d\n\x1b\x63onflict_detection_strategy\"c\n\x0eMutationResult\x12%\n\x03key\x18\x03 \x01(\x0b\x32\x18.google.datastore.v1.Key\x12\x0f\n\x07version\x18\x04 \x01(\x03\x12\x19\n\x11\x63onflict_detected\x18\x05 \x01(\x08\"\xd5\x01\n\x0bReadOptions\x12L\n\x10read_consistency\x18\x01 \x01(\x0e\x32\x30.google.datastore.v1.ReadOptions.ReadConsistencyH\x00\x12\x15\n\x0btransaction\x18\x02 \x01(\x0cH\x00\"M\n\x0fReadConsistency\x12 \n\x1cREAD_CONSISTENCY_UNSPECIFIED\x10\x00\x12\n\n\x06STRONG\x10\x01\x12\x0c\n\x08\x45VENTUAL\x10\x02\x42\x12\n\x10\x63onsistency_type\"\xe3\x01\n\x12TransactionOptions\x12G\n\nread_write\x18\x01 \x01(\x0b\x32\x31.google.datastore.v1.TransactionOptions.ReadWriteH\x00\x12\x45\n\tread_only\x18\x02 \x01(\x0b\x32\x30.google.datastore.v1.TransactionOptions.ReadOnlyH\x00\x1a)\n\tReadWrite\x12\x1c\n\x14previous_transaction\x18\x01 \x01(\x0c\x1a\n\n\x08ReadOnlyB\x06\n\x04mode2\xec\x07\n\tDatastore\x12~\n\x06Lookup\x12\".google.datastore.v1.LookupRequest\x1a#.google.datastore.v1.LookupResponse\"+\x82\xd3\xe4\x93\x02%\" /v1/projects/{project_id}:lookup:\x01*\x12\x86\x01\n\x08RunQuery\x12$.google.datastore.v1.RunQueryRequest\x1a%.google.datastore.v1.RunQueryResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1/projects/{project_id}:runQuery:\x01*\x12\xa6\x01\n\x10\x42\x65ginTransaction\x12,.google.datastore.v1.BeginTransactionRequest\x1a-.google.datastore.v1.BeginTransactionResponse\"5\x82\xd3\xe4\x93\x02/\"*/v1/projects/{project_id}:beginTransaction:\x01*\x12~\n\x06\x43ommit\x12\".google.datastore.v1.CommitRequest\x1a#.google.datastore.v1.CommitResponse\"+\x82\xd3\xe4\x93\x02%\" /v1/projects/{project_id}:commit:\x01*\x12\x86\x01\n\x08Rollback\x12$.google.datastore.v1.RollbackRequest\x1a%.google.datastore.v1.RollbackResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1/projects/{project_id}:rollback:\x01*\x12\x92\x01\n\x0b\x41llocateIds\x12\'.google.datastore.v1.AllocateIdsRequest\x1a(.google.datastore.v1.AllocateIdsResponse\"0\x82\xd3\xe4\x93\x02*\"%/v1/projects/{project_id}:allocateIds:\x01*\x12\x8e\x01\n\nReserveIds\x12&.google.datastore.v1.ReserveIdsRequest\x1a\'.google.datastore.v1.ReserveIdsResponse\"/\x82\xd3\xe4\x93\x02)\"$/v1/projects/{project_id}:reserveIds:\x01*B\x85\x01\n\x17\x63om.google.datastore.v1B\x0e\x44\x61tastoreProtoP\x01Z=0.15.0.""" + """Each RPC normalizes the partition IDs of the keys in its input entities, + and always returns entities with keys with normalized partition IDs. + This applies to all keys and entities, including those in values, except keys + with both an empty path and an empty or unset partition ID. Normalization of + input keys sets the project ID (if not already set) to the project ID from + the request. + + """ + def Lookup(self, request, context): + """Looks up entities by key. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def RunQuery(self, request, context): + """Queries for entities. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def BeginTransaction(self, request, context): + """Begins a new transaction. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Commit(self, request, context): + """Commits a transaction, optionally creating, deleting or modifying some + entities. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Rollback(self, request, context): + """Rolls back a transaction. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def AllocateIds(self, request, context): + """Allocates IDs for the given keys, which is useful for referencing an entity + before it is inserted. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ReserveIds(self, request, context): + """Prevents the supplied keys' IDs from being auto-allocated by Cloud + Datastore. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaDatastoreStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Each RPC normalizes the partition IDs of the keys in its input entities, + and always returns entities with keys with normalized partition IDs. + This applies to all keys and entities, including those in values, except keys + with both an empty path and an empty or unset partition ID. Normalization of + input keys sets the project ID (if not already set) to the project ID from + the request. + + """ + def Lookup(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Looks up entities by key. + """ + raise NotImplementedError() + Lookup.future = None + def RunQuery(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Queries for entities. + """ + raise NotImplementedError() + RunQuery.future = None + def BeginTransaction(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Begins a new transaction. + """ + raise NotImplementedError() + BeginTransaction.future = None + def Commit(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Commits a transaction, optionally creating, deleting or modifying some + entities. + """ + raise NotImplementedError() + Commit.future = None + def Rollback(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Rolls back a transaction. + """ + raise NotImplementedError() + Rollback.future = None + def AllocateIds(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Allocates IDs for the given keys, which is useful for referencing an entity + before it is inserted. + """ + raise NotImplementedError() + AllocateIds.future = None + def ReserveIds(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Prevents the supplied keys' IDs from being auto-allocated by Cloud + Datastore. + """ + raise NotImplementedError() + ReserveIds.future = None + + + def beta_create_Datastore_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsRequest.FromString, + ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionRequest.FromString, + ('google.datastore.v1.Datastore', 'Commit'): CommitRequest.FromString, + ('google.datastore.v1.Datastore', 'Lookup'): LookupRequest.FromString, + ('google.datastore.v1.Datastore', 'ReserveIds'): ReserveIdsRequest.FromString, + ('google.datastore.v1.Datastore', 'Rollback'): RollbackRequest.FromString, + ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryRequest.FromString, + } + response_serializers = { + ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsResponse.SerializeToString, + ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionResponse.SerializeToString, + ('google.datastore.v1.Datastore', 'Commit'): CommitResponse.SerializeToString, + ('google.datastore.v1.Datastore', 'Lookup'): LookupResponse.SerializeToString, + ('google.datastore.v1.Datastore', 'ReserveIds'): ReserveIdsResponse.SerializeToString, + ('google.datastore.v1.Datastore', 'Rollback'): RollbackResponse.SerializeToString, + ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryResponse.SerializeToString, + } + method_implementations = { + ('google.datastore.v1.Datastore', 'AllocateIds'): face_utilities.unary_unary_inline(servicer.AllocateIds), + ('google.datastore.v1.Datastore', 'BeginTransaction'): face_utilities.unary_unary_inline(servicer.BeginTransaction), + ('google.datastore.v1.Datastore', 'Commit'): face_utilities.unary_unary_inline(servicer.Commit), + ('google.datastore.v1.Datastore', 'Lookup'): face_utilities.unary_unary_inline(servicer.Lookup), + ('google.datastore.v1.Datastore', 'ReserveIds'): face_utilities.unary_unary_inline(servicer.ReserveIds), + ('google.datastore.v1.Datastore', 'Rollback'): face_utilities.unary_unary_inline(servicer.Rollback), + ('google.datastore.v1.Datastore', 'RunQuery'): face_utilities.unary_unary_inline(servicer.RunQuery), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_Datastore_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsRequest.SerializeToString, + ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionRequest.SerializeToString, + ('google.datastore.v1.Datastore', 'Commit'): CommitRequest.SerializeToString, + ('google.datastore.v1.Datastore', 'Lookup'): LookupRequest.SerializeToString, + ('google.datastore.v1.Datastore', 'ReserveIds'): ReserveIdsRequest.SerializeToString, + ('google.datastore.v1.Datastore', 'Rollback'): RollbackRequest.SerializeToString, + ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryRequest.SerializeToString, + } + response_deserializers = { + ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsResponse.FromString, + ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionResponse.FromString, + ('google.datastore.v1.Datastore', 'Commit'): CommitResponse.FromString, + ('google.datastore.v1.Datastore', 'Lookup'): LookupResponse.FromString, + ('google.datastore.v1.Datastore', 'ReserveIds'): ReserveIdsResponse.FromString, + ('google.datastore.v1.Datastore', 'Rollback'): RollbackResponse.FromString, + ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryResponse.FromString, + } + cardinalities = { + 'AllocateIds': cardinality.Cardinality.UNARY_UNARY, + 'BeginTransaction': cardinality.Cardinality.UNARY_UNARY, + 'Commit': cardinality.Cardinality.UNARY_UNARY, + 'Lookup': cardinality.Cardinality.UNARY_UNARY, + 'ReserveIds': cardinality.Cardinality.UNARY_UNARY, + 'Rollback': cardinality.Cardinality.UNARY_UNARY, + 'RunQuery': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.datastore.v1.Datastore', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2_grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2_grpc.py new file mode 100644 index 000000000000..5209ca6e146d --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2_grpc.py @@ -0,0 +1,163 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from google.cloud.datastore_v1.proto import datastore_pb2 as google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2 + + +class DatastoreStub(object): + """Each RPC normalizes the partition IDs of the keys in its input entities, + and always returns entities with keys with normalized partition IDs. + This applies to all keys and entities, including those in values, except keys + with both an empty path and an empty or unset partition ID. Normalization of + input keys sets the project ID (if not already set) to the project ID from + the request. + + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Lookup = channel.unary_unary( + '/google.datastore.v1.Datastore/Lookup', + request_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.LookupRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.LookupResponse.FromString, + ) + self.RunQuery = channel.unary_unary( + '/google.datastore.v1.Datastore/RunQuery', + request_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RunQueryRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RunQueryResponse.FromString, + ) + self.BeginTransaction = channel.unary_unary( + '/google.datastore.v1.Datastore/BeginTransaction', + request_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.BeginTransactionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.BeginTransactionResponse.FromString, + ) + self.Commit = channel.unary_unary( + '/google.datastore.v1.Datastore/Commit', + request_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.CommitRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.CommitResponse.FromString, + ) + self.Rollback = channel.unary_unary( + '/google.datastore.v1.Datastore/Rollback', + request_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RollbackRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RollbackResponse.FromString, + ) + self.AllocateIds = channel.unary_unary( + '/google.datastore.v1.Datastore/AllocateIds', + request_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.AllocateIdsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.AllocateIdsResponse.FromString, + ) + self.ReserveIds = channel.unary_unary( + '/google.datastore.v1.Datastore/ReserveIds', + request_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.ReserveIdsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.ReserveIdsResponse.FromString, + ) + + +class DatastoreServicer(object): + """Each RPC normalizes the partition IDs of the keys in its input entities, + and always returns entities with keys with normalized partition IDs. + This applies to all keys and entities, including those in values, except keys + with both an empty path and an empty or unset partition ID. Normalization of + input keys sets the project ID (if not already set) to the project ID from + the request. + + """ + + def Lookup(self, request, context): + """Looks up entities by key. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RunQuery(self, request, context): + """Queries for entities. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def BeginTransaction(self, request, context): + """Begins a new transaction. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Commit(self, request, context): + """Commits a transaction, optionally creating, deleting or modifying some + entities. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Rollback(self, request, context): + """Rolls back a transaction. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AllocateIds(self, request, context): + """Allocates IDs for the given keys, which is useful for referencing an entity + before it is inserted. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ReserveIds(self, request, context): + """Prevents the supplied keys' IDs from being auto-allocated by Cloud + Datastore. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_DatastoreServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Lookup': grpc.unary_unary_rpc_method_handler( + servicer.Lookup, + request_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.LookupRequest.FromString, + response_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.LookupResponse.SerializeToString, + ), + 'RunQuery': grpc.unary_unary_rpc_method_handler( + servicer.RunQuery, + request_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RunQueryRequest.FromString, + response_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RunQueryResponse.SerializeToString, + ), + 'BeginTransaction': grpc.unary_unary_rpc_method_handler( + servicer.BeginTransaction, + request_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.BeginTransactionRequest.FromString, + response_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.BeginTransactionResponse.SerializeToString, + ), + 'Commit': grpc.unary_unary_rpc_method_handler( + servicer.Commit, + request_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.CommitRequest.FromString, + response_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.CommitResponse.SerializeToString, + ), + 'Rollback': grpc.unary_unary_rpc_method_handler( + servicer.Rollback, + request_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RollbackRequest.FromString, + response_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RollbackResponse.SerializeToString, + ), + 'AllocateIds': grpc.unary_unary_rpc_method_handler( + servicer.AllocateIds, + request_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.AllocateIdsRequest.FromString, + response_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.AllocateIdsResponse.SerializeToString, + ), + 'ReserveIds': grpc.unary_unary_rpc_method_handler( + servicer.ReserveIds, + request_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.ReserveIdsRequest.FromString, + response_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.ReserveIdsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.datastore.v1.Datastore', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py new file mode 100644 index 000000000000..9d864d2519d9 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py @@ -0,0 +1,661 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/datastore_v1/proto/entity.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/datastore_v1/proto/entity.proto', + package='google.datastore.v1', + syntax='proto3', + serialized_pb=_b('\n,google/cloud/datastore_v1/proto/entity.proto\x12\x13google.datastore.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x18google/type/latlng.proto\"7\n\x0bPartitionId\x12\x12\n\nproject_id\x18\x02 \x01(\t\x12\x14\n\x0cnamespace_id\x18\x04 \x01(\t\"\xb7\x01\n\x03Key\x12\x36\n\x0cpartition_id\x18\x01 \x01(\x0b\x32 .google.datastore.v1.PartitionId\x12\x32\n\x04path\x18\x02 \x03(\x0b\x32$.google.datastore.v1.Key.PathElement\x1a\x44\n\x0bPathElement\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x0c\n\x02id\x18\x02 \x01(\x03H\x00\x12\x0e\n\x04name\x18\x03 \x01(\tH\x00\x42\t\n\x07id_type\"8\n\nArrayValue\x12*\n\x06values\x18\x01 \x03(\x0b\x32\x1a.google.datastore.v1.Value\"\xf1\x03\n\x05Value\x12\x30\n\nnull_value\x18\x0b \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x17\n\rboolean_value\x18\x01 \x01(\x08H\x00\x12\x17\n\rinteger_value\x18\x02 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x03 \x01(\x01H\x00\x12\x35\n\x0ftimestamp_value\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12-\n\tkey_value\x18\x05 \x01(\x0b\x32\x18.google.datastore.v1.KeyH\x00\x12\x16\n\x0cstring_value\x18\x11 \x01(\tH\x00\x12\x14\n\nblob_value\x18\x12 \x01(\x0cH\x00\x12.\n\x0fgeo_point_value\x18\x08 \x01(\x0b\x32\x13.google.type.LatLngH\x00\x12\x33\n\x0c\x65ntity_value\x18\x06 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12\x36\n\x0b\x61rray_value\x18\t \x01(\x0b\x32\x1f.google.datastore.v1.ArrayValueH\x00\x12\x0f\n\x07meaning\x18\x0e \x01(\x05\x12\x1c\n\x14\x65xclude_from_indexes\x18\x13 \x01(\x08\x42\x0c\n\nvalue_type\"\xbf\x01\n\x06\x45ntity\x12%\n\x03key\x18\x01 \x01(\x0b\x32\x18.google.datastore.v1.Key\x12?\n\nproperties\x18\x03 \x03(\x0b\x32+.google.datastore.v1.Entity.PropertiesEntry\x1aM\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.Value:\x02\x38\x01\x42\x82\x01\n\x17\x63om.google.datastore.v1B\x0b\x45ntityProtoP\x01Z\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32#.google.datastore.v1.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type\"\xa9\x01\n\x0f\x43ompositeFilter\x12\x39\n\x02op\x18\x01 \x01(\x0e\x32-.google.datastore.v1.CompositeFilter.Operator\x12,\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x1b.google.datastore.v1.Filter\"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\"\xc7\x02\n\x0ePropertyFilter\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x38\n\x02op\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyFilter.Operator\x12)\n\x05value\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.Value\"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b\"\xa5\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12H\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x30.google.datastore.v1.GqlQuery.NamedBindingsEntry\x12\x43\n\x13positional_bindings\x18\x04 \x03(\x0b\x32&.google.datastore.v1.GqlQueryParameter\x1a\\\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.datastore.v1.GqlQueryParameter:\x02\x38\x01\"d\n\x11GqlQueryParameter\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type\"\xde\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12H\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32,.google.datastore.v1.EntityResult.ResultType\x12\x39\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12K\n\x0cmore_results\x18\x05 \x01(\x0e\x32\x35.google.datastore.v1.QueryResultBatch.MoreResultsType\x12\x18\n\x10snapshot_version\x18\x07 \x01(\x03\"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42\x81\x01\n\x17\x63om.google.datastore.v1B\nQueryProtoP\x01Z`__. + end_cursor: + An ending point for the query results. Query cursors are + returned in query result batches and `can only be used to + limit the same query `__. + offset: + The number of results to skip. Applies before limit, but after + all other constraints. Optional. Must be >= 0 if specified. + limit: + The maximum number of results to return. Applies after all + other constraints. Optional. Unspecified is interpreted as no + limit. Must be >= 0 if specified. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.Query) + )) +_sym_db.RegisterMessage(Query) + +KindExpression = _reflection.GeneratedProtocolMessageType('KindExpression', (_message.Message,), dict( + DESCRIPTOR = _KINDEXPRESSION, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """A representation of a kind. + + + Attributes: + name: + The name of the kind. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.KindExpression) + )) +_sym_db.RegisterMessage(KindExpression) + +PropertyReference = _reflection.GeneratedProtocolMessageType('PropertyReference', (_message.Message,), dict( + DESCRIPTOR = _PROPERTYREFERENCE, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """A reference to a property relative to the kind expressions. + + + Attributes: + name: + The name of the property. If name includes "."s, it may be + interpreted as a property name path. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyReference) + )) +_sym_db.RegisterMessage(PropertyReference) + +Projection = _reflection.GeneratedProtocolMessageType('Projection', (_message.Message,), dict( + DESCRIPTOR = _PROJECTION, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """A representation of a property in a projection. + + + Attributes: + property: + The property to project. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.Projection) + )) +_sym_db.RegisterMessage(Projection) + +PropertyOrder = _reflection.GeneratedProtocolMessageType('PropertyOrder', (_message.Message,), dict( + DESCRIPTOR = _PROPERTYORDER, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """The desired order for a specific property. + + + Attributes: + property: + The property to order by. + direction: + The direction to order by. Defaults to ``ASCENDING``. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyOrder) + )) +_sym_db.RegisterMessage(PropertyOrder) + +Filter = _reflection.GeneratedProtocolMessageType('Filter', (_message.Message,), dict( + DESCRIPTOR = _FILTER, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """A holder for any type of filter. + + + Attributes: + filter_type: + The type of filter. + composite_filter: + A composite filter. + property_filter: + A filter on a property. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.Filter) + )) +_sym_db.RegisterMessage(Filter) + +CompositeFilter = _reflection.GeneratedProtocolMessageType('CompositeFilter', (_message.Message,), dict( + DESCRIPTOR = _COMPOSITEFILTER, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """A filter that merges multiple other filters using the given operator. + + + Attributes: + op: + The operator for combining multiple filters. + filters: + The list of filters to combine. Must contain at least one + filter. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.CompositeFilter) + )) +_sym_db.RegisterMessage(CompositeFilter) + +PropertyFilter = _reflection.GeneratedProtocolMessageType('PropertyFilter', (_message.Message,), dict( + DESCRIPTOR = _PROPERTYFILTER, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """A filter on a specific property. + + + Attributes: + property: + The property to filter by. + op: + The operator to filter by. + value: + The value to compare the property to. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyFilter) + )) +_sym_db.RegisterMessage(PropertyFilter) + +GqlQuery = _reflection.GeneratedProtocolMessageType('GqlQuery', (_message.Message,), dict( + + NamedBindingsEntry = _reflection.GeneratedProtocolMessageType('NamedBindingsEntry', (_message.Message,), dict( + DESCRIPTOR = _GQLQUERY_NAMEDBINDINGSENTRY, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery.NamedBindingsEntry) + )) + , + DESCRIPTOR = _GQLQUERY, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """A `GQL + query `__. + + + Attributes: + query_string: + A string of the format described `here `__. + allow_literals: + When false, the query string must not contain any literals and + instead must bind all values. For example, ``SELECT * FROM + Kind WHERE a = 'string literal'`` is not allowed, while + ``SELECT * FROM Kind WHERE a = @value`` is. + named_bindings: + For each non-reserved named binding site in the query string, + there must be a named parameter with that name, but not + necessarily the inverse. Key must match regex ``[A-Za- + z_$][A-Za-z_$0-9]*``, must not match regex ``__.*__``, and + must not be ``""``. + positional_bindings: + Numbered binding site @1 references the first numbered + parameter, effectively using 1-based indexing, rather than the + usual 0. For each binding site numbered i in + ``query_string``, there must be an i-th numbered parameter. + The inverse must also be true. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery) + )) +_sym_db.RegisterMessage(GqlQuery) +_sym_db.RegisterMessage(GqlQuery.NamedBindingsEntry) + +GqlQueryParameter = _reflection.GeneratedProtocolMessageType('GqlQueryParameter', (_message.Message,), dict( + DESCRIPTOR = _GQLQUERYPARAMETER, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """A binding parameter for a GQL query. + + + Attributes: + parameter_type: + The type of parameter. + value: + A value parameter. + cursor: + A query cursor. Query cursors are returned in query result + batches. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQueryParameter) + )) +_sym_db.RegisterMessage(GqlQueryParameter) + +QueryResultBatch = _reflection.GeneratedProtocolMessageType('QueryResultBatch', (_message.Message,), dict( + DESCRIPTOR = _QUERYRESULTBATCH, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """A batch of results produced by a query. + + + Attributes: + skipped_results: + The number of results skipped, typically because of an offset. + skipped_cursor: + A cursor that points to the position after the last skipped + result. Will be set when ``skipped_results`` != 0. + entity_result_type: + The result type for every entity in ``entity_results``. + entity_results: + The results for this batch. + end_cursor: + A cursor that points to the position after the last result in + the batch. + more_results: + The state of the query after the current batch. + snapshot_version: + The version number of the snapshot this batch was returned + from. This applies to the range of results from the query's + ``start_cursor`` (or the beginning of the query if no cursor + was given) to this batch's ``end_cursor`` (not the query's + ``end_cursor``). In a single transaction, subsequent query + result batches for the same query can have a greater snapshot + version number. Each batch's snapshot version is valid for all + preceding batches. The value will be zero for eventually + consistent queries. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.QueryResultBatch) + )) +_sym_db.RegisterMessage(QueryResultBatch) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.google.datastore.v1B\nQueryProtoP\001Z= 0.28.0, < 0.29dev', 'google-api-core >= 0.1.1, < 0.2.0dev', - 'google-gax >= 0.15.7, < 0.16dev', - 'gapic-google-cloud-datastore-v1 >= 0.15.0, < 0.16dev', + 'google-auth >= 1.0.2, < 2.0dev', + 'google-gax >= 0.15.15, < 0.16dev', + 'googleapis-common-protos[grpc] >= 1.5.2, < 2.0dev', + 'requests >= 2.18.4, < 3.0dev', ] setup( diff --git a/packages/google-cloud-datastore/tests/unit/test__gax.py b/packages/google-cloud-datastore/tests/unit/test__gax.py index f81d709c6b65..9f0896058fe3 100644 --- a/packages/google-cloud-datastore/tests/unit/test__gax.py +++ b/packages/google-cloud-datastore/tests/unit/test__gax.py @@ -95,7 +95,7 @@ def _make_one(self, *args, **kwargs): return self._get_target_class()(*args, **kwargs) def test_lookup(self): - from google.cloud.gapic.datastore.v1 import datastore_client + from google.cloud.datastore_v1.gapic import datastore_client patch1 = mock.patch.object( datastore_client.DatastoreClient, '__init__', @@ -115,7 +115,7 @@ def test_lookup(self): mock_catch_rendezvous.assert_called_once_with() def test_run_query(self): - from google.cloud.gapic.datastore.v1 import datastore_client + from google.cloud.datastore_v1.gapic import datastore_client patch1 = mock.patch.object( datastore_client.DatastoreClient, '__init__', @@ -136,7 +136,7 @@ def test_run_query(self): mock_catch_rendezvous.assert_called_once_with() def test_begin_transaction(self): - from google.cloud.gapic.datastore.v1 import datastore_client + from google.cloud.datastore_v1.gapic import datastore_client patch1 = mock.patch.object( datastore_client.DatastoreClient, '__init__', @@ -158,7 +158,7 @@ def test_begin_transaction(self): mock_catch_rendezvous.assert_called_once_with() def test_commit(self): - from google.cloud.gapic.datastore.v1 import datastore_client + from google.cloud.datastore_v1.gapic import datastore_client patch1 = mock.patch.object( datastore_client.DatastoreClient, '__init__', @@ -178,7 +178,7 @@ def test_commit(self): mock_catch_rendezvous.assert_called_once_with() def test_rollback(self): - from google.cloud.gapic.datastore.v1 import datastore_client + from google.cloud.datastore_v1.gapic import datastore_client patch1 = mock.patch.object( datastore_client.DatastoreClient, '__init__', @@ -199,7 +199,7 @@ def test_rollback(self): mock_catch_rendezvous.assert_called_once_with() def test_allocate_ids(self): - from google.cloud.gapic.datastore.v1 import datastore_client + from google.cloud.datastore_v1.gapic import datastore_client patch1 = mock.patch.object( datastore_client.DatastoreClient, '__init__', @@ -236,7 +236,7 @@ def _call_fut(self, client): @mock.patch('google.cloud.datastore._gax.make_secure_channel', return_value=mock.sentinel.channel) def test_live_api(self, make_chan, mock_klass): - from google.cloud.gapic.datastore.v1 import datastore_client + from google.cloud.datastore_v1.gapic import datastore_client from google.cloud._http import DEFAULT_USER_AGENT from google.cloud.datastore import __version__ @@ -252,8 +252,9 @@ def test_live_api(self, make_chan, mock_klass): make_chan.assert_called_once_with( mock.sentinel.credentials, DEFAULT_USER_AGENT, host) mock_klass.assert_called_once_with( - channel=mock.sentinel.channel, lib_name='gccl', - lib_version=__version__) + channel=mock.sentinel.channel, + client_info=mock.ANY, + ) @mock.patch( 'google.cloud.datastore._gax.GAPICDatastoreAPI', @@ -274,5 +275,6 @@ def test_emulator(self, make_chan, mock_klass): make_chan.assert_called_once_with(host) mock_klass.assert_called_once_with( - channel=mock.sentinel.channel, lib_name='gccl', - lib_version=__version__) + channel=mock.sentinel.channel, + client_info=mock.ANY, + ) diff --git a/packages/google-cloud-datastore/tests/unit/test__http.py b/packages/google-cloud-datastore/tests/unit/test__http.py index e840b649cdd1..cceb40419a56 100644 --- a/packages/google-cloud-datastore/tests/unit/test__http.py +++ b/packages/google-cloud-datastore/tests/unit/test__http.py @@ -91,7 +91,7 @@ def _call_fut(*args, **kwargs): return _rpc(*args, **kwargs) def test_it(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 http = object() project = 'projectOK' @@ -128,7 +128,7 @@ def _make_one(self, *args, **kwargs): @staticmethod def _make_query_pb(kind): - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 return query_pb2.Query( kind=[query_pb2.KindExpression(name=kind)], @@ -140,7 +140,7 @@ def test_constructor(self): self.assertIs(ds_api.client, client) def test_lookup_single_key_empty_response(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' key_pb = _make_key_pb(project) @@ -170,7 +170,7 @@ def test_lookup_single_key_empty_response(self): self.assertEqual(request.read_options, read_options) def test_lookup_single_key_empty_response_w_eventual(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' key_pb = _make_key_pb(project) @@ -201,7 +201,7 @@ def test_lookup_single_key_empty_response_w_eventual(self): self.assertEqual(request.read_options, read_options) def test_lookup_single_key_empty_response_w_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' transaction = b'TRANSACTION' @@ -232,8 +232,8 @@ def test_lookup_single_key_empty_response_w_transaction(self): self.assertEqual(request.read_options, read_options) def test_lookup_single_key_nonempty_response(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 project = 'PROJECT' key_pb = _make_key_pb(project) @@ -269,7 +269,7 @@ def test_lookup_single_key_nonempty_response(self): self.assertEqual(request.read_options, read_options) def test_lookup_multiple_keys_empty_response(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' key_pb1 = _make_key_pb(project) @@ -300,7 +300,7 @@ def test_lookup_multiple_keys_empty_response(self): self.assertEqual(request.read_options, read_options) def test_lookup_multiple_keys_w_missing(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' key_pb1 = _make_key_pb(project) @@ -336,7 +336,7 @@ def test_lookup_multiple_keys_w_missing(self): self.assertEqual(request.read_options, read_options) def test_lookup_multiple_keys_w_deferred(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' key_pb1 = _make_key_pb(project) @@ -369,9 +369,9 @@ def test_lookup_multiple_keys_w_deferred(self): self.assertEqual(request.read_options, read_options) def test_run_query_w_eventual_no_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.proto import query_pb2 project = 'PROJECT' kind = 'Nonesuch' @@ -410,9 +410,9 @@ def test_run_query_w_eventual_no_transaction(self): self.assertEqual(request.read_options, read_options) def test_run_query_wo_eventual_w_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.proto import query_pb2 project = 'PROJECT' kind = 'Nonesuch' @@ -451,9 +451,9 @@ def test_run_query_wo_eventual_w_transaction(self): self.assertEqual(request.read_options, read_options) def test_run_query_wo_namespace_empty_result(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.proto import query_pb2 project = 'PROJECT' kind = 'Nonesuch' @@ -491,9 +491,9 @@ def test_run_query_wo_namespace_empty_result(self): self.assertEqual(request.read_options, read_options) def test_run_query_w_namespace_nonempty_result(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.proto import query_pb2 project = 'PROJECT' kind = 'Kind' @@ -533,7 +533,7 @@ def test_run_query_w_namespace_nonempty_result(self): self.assertEqual(request.query, query_pb) def test_begin_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' transaction = b'TRANSACTION' @@ -561,7 +561,7 @@ def test_begin_transaction(self): self.assertEqual(request.project_id, u'') def test_commit_wo_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb project = 'PROJECT' @@ -596,7 +596,7 @@ def test_commit_wo_transaction(self): self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL) def test_commit_w_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb project = 'PROJECT' @@ -631,7 +631,7 @@ def test_commit_w_transaction(self): self.assertEqual(request.mode, rq_class.TRANSACTIONAL) def test_rollback_ok(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' transaction = b'xact' @@ -656,7 +656,7 @@ def test_rollback_ok(self): self.assertEqual(request.transaction, transaction) def test_allocate_ids_empty(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' rsp_pb = datastore_pb2.AllocateIdsResponse() @@ -681,7 +681,7 @@ def test_allocate_ids_empty(self): self.assertEqual(list(request.keys), []) def test_allocate_ids_non_empty(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' before_key_pbs = [ diff --git a/packages/google-cloud-datastore/tests/unit/test_batch.py b/packages/google-cloud-datastore/tests/unit/test_batch.py index 01262ae17ba6..9b854141e553 100644 --- a/packages/google-cloud-datastore/tests/unit/test_batch.py +++ b/packages/google-cloud-datastore/tests/unit/test_batch.py @@ -43,7 +43,7 @@ def test_ctor(self): self.assertEqual(batch._partial_key_entities, []) def test_current(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' client = _Client(project) @@ -219,7 +219,7 @@ def test_rollback_wrong_status(self): self.assertRaises(ValueError, batch.rollback) def test_commit(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' client = _Client(project) @@ -244,7 +244,7 @@ def test_commit_wrong_status(self): self.assertRaises(ValueError, batch.commit) def test_commit_w_partial_key_entities(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' new_id = 1234 @@ -269,7 +269,7 @@ def test_commit_w_partial_key_entities(self): self.assertEqual(entity.key._id, new_id) def test_as_context_mgr_wo_error(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' properties = {'foo': 'bar'} @@ -293,7 +293,7 @@ def test_as_context_mgr_wo_error(self): project, mode, batch.mutations, transaction=None) def test_as_context_mgr_nested(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' properties = {'foo': 'bar'} @@ -381,8 +381,8 @@ def _call_fut(self, commit_response_pb): return _parse_commit_response(commit_response_pb) def test_it(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 index_updates = 1337 keys = [ @@ -434,7 +434,7 @@ def is_partial(self): return self._id is None def to_protobuf(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 key = self._key = entity_pb2.Key() # Don't assign it, because it will just get ripped out @@ -494,8 +494,8 @@ def _mutated_pb(test_case, mutation_pb_list, mutation_type): def _make_mutation(id_): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 key = entity_pb2.Key() key.partition_id.project_id = 'PROJECT' @@ -506,7 +506,7 @@ def _make_mutation(id_): def _make_commit_response(*new_key_ids): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 mutation_results = [ _make_mutation(key_id) for key_id in new_key_ids] diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 6477f53c5fa7..51b3e2651531 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -24,7 +24,7 @@ def _make_credentials(): def _make_entity_pb(project, kind, integer_id, name=None, str_val=None): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() @@ -338,7 +338,7 @@ def test_get_multi_no_keys(self): self.assertEqual(results, []) def test_get_multi_miss(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.key import Key creds = _make_credentials() @@ -352,11 +352,14 @@ def test_get_multi_miss(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, read_options, [key.to_protobuf()]) + project_id=self.PROJECT, + read_options=read_options, + keys=[key.to_protobuf()], + ) def test_get_multi_miss_w_missing(self): - from google.cloud.proto.datastore.v1 import entity_pb2 - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.key import Key KIND = 'Kind' @@ -386,7 +389,10 @@ def test_get_multi_miss_w_missing(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, read_options, [key_pb]) + project_id=self.PROJECT, + read_options=read_options, + keys=[key_pb], + ) def test_get_multi_w_missing_non_empty(self): from google.cloud.datastore.key import Key @@ -411,7 +417,7 @@ def test_get_multi_w_deferred_non_empty(self): [key], deferred=deferred) def test_get_multi_miss_w_deferred(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.key import Key key = Key('Kind', 1234, project=self.PROJECT) @@ -432,11 +438,14 @@ def test_get_multi_miss_w_deferred(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, read_options, [key_pb]) + project_id=self.PROJECT, + read_options=read_options, + keys=[key_pb], + ) def test_get_multi_w_deferred_from_backend_but_not_passed(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key @@ -479,12 +488,18 @@ def test_get_multi_w_deferred_from_backend_but_not_passed(self): self.assertEqual(ds_api.lookup.call_count, 2) read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_any_call( - self.PROJECT, read_options, [key2_pb]) + project_id=self.PROJECT, + read_options=read_options, + keys=[key2_pb], + ) ds_api.lookup.assert_any_call( - self.PROJECT, read_options, [key1_pb, key2_pb]) + project_id=self.PROJECT, + read_options=read_options, + keys=[key1_pb, key2_pb], + ) def test_get_multi_hit(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.key import Key kind = 'Kind' @@ -514,10 +529,13 @@ def test_get_multi_hit(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, read_options, [key.to_protobuf()]) + keys=[key.to_protobuf()], + project_id=self.PROJECT, + read_options=read_options, + ) def test_get_multi_hit_w_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.key import Key txn_id = b'123' @@ -550,10 +568,13 @@ def test_get_multi_hit_w_transaction(self): read_options = datastore_pb2.ReadOptions(transaction=txn_id) ds_api.lookup.assert_called_once_with( - self.PROJECT, read_options, [key.to_protobuf()]) + project_id=self.PROJECT, + keys=[key.to_protobuf()], + read_options=read_options, + ) def test_get_multi_hit_multiple_keys_same_project(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.key import Key kind = 'Kind' @@ -584,8 +605,10 @@ def test_get_multi_hit_multiple_keys_same_project(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, read_options, - [key1.to_protobuf(), key2.to_protobuf()]) + project_id=self.PROJECT, + read_options=read_options, + keys=[key1.to_protobuf(), key2.to_protobuf()], + ) def test_get_multi_hit_multiple_keys_different_project(self): from google.cloud.datastore.key import Key @@ -668,7 +691,7 @@ def test_put_multi_w_single_empty_entity(self): self.assertRaises(ValueError, client.put_multi, Entity()) def test_put_multi_no_batch_w_partial_key(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.helpers import _property_tuples entity = _Entity(foo=u'bar') @@ -750,7 +773,7 @@ def test_delete_multi_no_keys(self): client._datastore_api_internal.commit.assert_not_called() def test_delete_multi_no_batch(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 key = _Key(self.PROJECT) @@ -1022,7 +1045,7 @@ def test_eventual_w_transaction(self): self._call_fut(True, b'123') def test_eventual_wo_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 read_options = self._call_fut(True, None) expected = datastore_pb2.ReadOptions( @@ -1030,7 +1053,7 @@ def test_eventual_wo_transaction(self): self.assertEqual(read_options, expected) def test_default_w_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 txn_id = b'123abc-easy-as' read_options = self._call_fut(False, txn_id) @@ -1038,7 +1061,7 @@ def test_default_w_transaction(self): self.assertEqual(read_options, expected) def test_default_wo_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 read_options = self._call_fut(False, None) expected = datastore_pb2.ReadOptions() @@ -1103,7 +1126,7 @@ def is_partial(self): return self._id is None def to_protobuf(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 key = self._key = entity_pb2.Key() # Don't assign it, because it will just get ripped out @@ -1153,7 +1176,7 @@ def _mutated_pb(test_case, mutation_pb_list, mutation_type): def _make_key(id_): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 key = entity_pb2.Key() elem = key.path.add() @@ -1162,7 +1185,7 @@ def _make_key(id_): def _make_commit_response(*keys): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 mutation_results = [ datastore_pb2.MutationResult(key=key) for key in keys] diff --git a/packages/google-cloud-datastore/tests/unit/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py index be4855d5e48c..5e91de0196f4 100644 --- a/packages/google-cloud-datastore/tests/unit/test_helpers.py +++ b/packages/google-cloud-datastore/tests/unit/test_helpers.py @@ -23,7 +23,7 @@ def _call_fut(self, entity_pb, name): return _new_value_pb(entity_pb, name) def test_it(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 entity_pb = entity_pb2.Entity() name = 'foo' @@ -43,7 +43,7 @@ def _call_fut(self, entity_pb): def test_it(self): import types - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() @@ -66,7 +66,7 @@ def _call_fut(self, val): return entity_from_protobuf(val) def test_it(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb _PROJECT = 'PROJECT' @@ -112,7 +112,7 @@ def test_it(self): self.assertEqual(key.id, _ID) def test_mismatched_value_indexed(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb _PROJECT = 'PROJECT' @@ -136,7 +136,7 @@ def test_mismatched_value_indexed(self): self._call_fut(entity_pb) def test_entity_no_key(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 entity_pb = entity_pb2.Entity() entity = self._call_fut(entity_pb) @@ -145,7 +145,7 @@ def test_entity_no_key(self): self.assertEqual(dict(entity), {}) def test_entity_with_meaning(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() @@ -160,7 +160,7 @@ def test_entity_with_meaning(self): self.assertEqual(entity._meanings, {name: (meaning, val)}) def test_nested_entity_no_key(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb PROJECT = 'FOO' @@ -218,7 +218,7 @@ def _compare_entity_proto(self, entity_pb1, entity_pb2): self.assertEqual(val1, val2) def test_empty(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() @@ -226,7 +226,7 @@ def test_empty(self): self._compare_entity_proto(entity_pb, entity_pb2.Entity()) def test_key_only(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key @@ -245,7 +245,7 @@ def test_key_only(self): self._compare_entity_proto(entity_pb, expected_pb) def test_simple_fields(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -265,7 +265,7 @@ def test_simple_fields(self): self._compare_entity_proto(entity_pb, expected_pb) def test_with_empty_list(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() @@ -275,7 +275,7 @@ def test_with_empty_list(self): self._compare_entity_proto(entity_pb, entity_pb2.Entity()) def test_inverts_to_protobuf(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb from google.cloud.datastore.helpers import entity_from_protobuf @@ -328,7 +328,7 @@ def test_inverts_to_protobuf(self): self._compare_entity_proto(original_pb, new_pb) def test_meaning_with_change(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -346,7 +346,7 @@ def test_meaning_with_change(self): self._compare_entity_proto(entity_pb, expected_pb) def test_variable_meanings(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -372,7 +372,7 @@ def test_variable_meanings(self): self._compare_entity_proto(entity_pb, expected_pb) def test_dict_to_entity(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() @@ -395,7 +395,7 @@ def test_dict_to_entity(self): self.assertEqual(entity_pb, expected_pb) def test_dict_to_entity_recursive(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() @@ -451,7 +451,7 @@ def _call_fut(self, val): return key_from_protobuf(val) def _makePB(self, project=None, namespace=None, path=()): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 pb = entity_pb2.Key() if project is not None: @@ -636,7 +636,7 @@ def _call_fut(self, pb): return _get_value_from_value_pb(pb) def _makePB(self, attr_name, value): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 pb = entity_pb2.Value() setattr(pb, attr_name, value) @@ -646,7 +646,7 @@ def test_datetime(self): import calendar import datetime from google.cloud._helpers import UTC - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 micros = 4375 utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) @@ -656,7 +656,7 @@ def test_datetime(self): self.assertEqual(self._call_fut(pb), utc) def test_key(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.key import Key pb = entity_pb2.Value() @@ -686,7 +686,7 @@ def test_unicode(self): self.assertEqual(self._call_fut(pb), u'str') def test_entity(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -702,7 +702,7 @@ def test_entity(self): self.assertEqual(entity['foo'], 'Foo') def test_array(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 pb = entity_pb2.Value() array_pb = pb.array_value.values @@ -715,7 +715,7 @@ def test_array(self): def test_geo_point(self): from google.type import latlng_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import GeoPoint lat = -3.14 @@ -729,14 +729,14 @@ def test_geo_point(self): def test_null(self): from google.protobuf import struct_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 pb = entity_pb2.Value(null_value=struct_pb2.NULL_VALUE) result = self._call_fut(pb) self.assertIsNone(result) def test_unknown(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 pb = entity_pb2.Value() with self.assertRaises(ValueError): @@ -751,7 +751,7 @@ def _call_fut(self, value_pb, val): return _set_protobuf_value(value_pb, val) def _makePB(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 return entity_pb2.Value() def test_datetime(self): @@ -891,14 +891,14 @@ def _call_fut(self, *args, **kwargs): return _get_meaning(*args, **kwargs) def test_no_meaning(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 value_pb = entity_pb2.Value() result = self._call_fut(value_pb) self.assertIsNone(result) def test_single(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 value_pb = entity_pb2.Value() value_pb.meaning = meaning = 22 @@ -907,7 +907,7 @@ def test_single(self): self.assertEqual(meaning, result) def test_empty_array_value(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 value_pb = entity_pb2.Value() value_pb.array_value.values.add() @@ -917,7 +917,7 @@ def test_empty_array_value(self): self.assertEqual(None, result) def test_array_value(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 value_pb = entity_pb2.Value() meaning = 9 @@ -932,7 +932,7 @@ def test_array_value(self): self.assertEqual(meaning, result) def test_array_value_multiple_meanings(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 value_pb = entity_pb2.Value() meaning1 = 9 @@ -949,7 +949,7 @@ def test_array_value_multiple_meanings(self): self.assertEqual(result, [meaning1, meaning2]) def test_array_value_meaning_partially_unset(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 value_pb = entity_pb2.Value() meaning1 = 9 diff --git a/packages/google-cloud-datastore/tests/unit/test_key.py b/packages/google-cloud-datastore/tests/unit/test_key.py index 3f82412e27cd..e95d756013cc 100644 --- a/packages/google-cloud-datastore/tests/unit/test_key.py +++ b/packages/google-cloud-datastore/tests/unit/test_key.py @@ -332,7 +332,7 @@ def test_completed_key_on_complete(self): self.assertRaises(ValueError, key.completed_key, 5678) def test_to_protobuf_defaults(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 _KIND = 'KIND' key = self._make_one(_KIND, project=self._DEFAULT_PROJECT) diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 111a2ceed0bf..791031c1444e 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -395,7 +395,7 @@ def test_constructor_explicit(self): self.assertTrue(iterator._more_results) def test__build_protobuf_empty(self): - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 from google.cloud.datastore.query import Query client = _Client(None) @@ -407,7 +407,7 @@ def test__build_protobuf_empty(self): self.assertEqual(pb, expected_pb) def test__build_protobuf_all_values(self): - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 from google.cloud.datastore.query import Query client = _Client(None) @@ -435,7 +435,7 @@ def test__build_protobuf_all_values(self): self.assertEqual(pb, expected_pb) def test__process_query_results(self): - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 iterator = self._make_one(None, None, end_cursor='abcd') @@ -458,7 +458,7 @@ def test__process_query_results(self): self.assertTrue(iterator._more_results) def test__process_query_results_done(self): - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 iterator = self._make_one(None, None, end_cursor='abcd') @@ -489,9 +489,9 @@ def test__process_query_results_bad_enum(self): def _next_page_helper(self, txn_id=None): from google.api_core import page_iterator - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.proto import query_pb2 from google.cloud.datastore.query import Query more_enum = query_pb2.QueryResultBatch.NOT_FINISHED @@ -568,7 +568,7 @@ def _call_fut(self, query): return _pb_from_query(query) def test_empty(self): - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 pb = self._call_fut(_Query()) self.assertEqual(list(pb.projection), []) @@ -596,7 +596,7 @@ def test_kind(self): def test_ancestor(self): from google.cloud.datastore.key import Key - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 ancestor = Key('Ancestor', 123, project='PROJECT') pb = self._call_fut(_Query(ancestor=ancestor)) @@ -609,7 +609,7 @@ def test_ancestor(self): self.assertEqual(pfilter.value.key_value, ancestor_pb) def test_filter(self): - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 query = _Query(filters=[('name', '=', u'John')]) query.OPERATORS = { @@ -625,7 +625,7 @@ def test_filter(self): def test_filter_key(self): from google.cloud.datastore.key import Key - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 key = Key('Kind', 123, project='PROJECT') query = _Query(filters=[('__key__', '=', key)]) @@ -642,7 +642,7 @@ def test_filter_key(self): self.assertEqual(pfilter.value.key_value, key_pb) def test_order(self): - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 pb = self._call_fut(_Query(order=['a', '-b', 'c'])) self.assertEqual([item.property.name for item in pb.order], @@ -696,7 +696,7 @@ def current_transaction(self): def _make_entity(kind, id_, project): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 key = entity_pb2.Key() key.partition_id.project_id = project @@ -708,8 +708,8 @@ def _make_entity(kind, id_, project): def _make_query_response( entity_pbs, cursor_as_bytes, more_results_enum, skipped_results): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import query_pb2 return datastore_pb2.RunQueryResponse( batch=query_pb2.QueryResultBatch( diff --git a/packages/google-cloud-datastore/tests/unit/test_transaction.py b/packages/google-cloud-datastore/tests/unit/test_transaction.py index 5479fbf80812..2c72f01dc34a 100644 --- a/packages/google-cloud-datastore/tests/unit/test_transaction.py +++ b/packages/google-cloud-datastore/tests/unit/test_transaction.py @@ -40,7 +40,7 @@ def test_ctor_defaults(self): self.assertEqual(len(xact._partial_key_entities), 0) def test_current(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' id_ = 678 @@ -130,7 +130,7 @@ def test_rollback(self): ds_api.begin_transaction.assert_called_once_with(project) def test_commit_no_partial_keys(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' id_ = 1002930 @@ -147,7 +147,7 @@ def test_commit_no_partial_keys(self): ds_api.begin_transaction.assert_called_once_with(project) def test_commit_w_partial_keys(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' kind = 'KIND' @@ -170,7 +170,7 @@ def test_commit_w_partial_keys(self): ds_api.begin_transaction.assert_called_once_with(project) def test_context_manager_no_raise(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' id_ = 912830 @@ -214,7 +214,7 @@ class Foo(Exception): def _make_key(kind, id_, project): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 key = entity_pb2.Key() key.partition_id.project_id = project @@ -271,7 +271,7 @@ def __exit__(self, *args): def _make_commit_response(*keys): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 mutation_results = [ datastore_pb2.MutationResult(key=key) for key in keys] From 98302b6e2e3dc225d7c59735d0f969f341160cf1 Mon Sep 17 00:00:00 2001 From: chemelnucfin Date: Wed, 8 Nov 2017 21:51:38 -0800 Subject: [PATCH 149/611] Allow specifying read consistency (#4343) * Closes #4340 - Specify Read Consistency * review changes * merge conflicts; correct imports --- .../google/cloud/datastore/client.py | 79 ++++++++----------- .../google/cloud/datastore/helpers.py | 42 ++++++++-- .../google/cloud/datastore/query.py | 66 ++++++++++------ .../tests/unit/test_client.py | 35 -------- .../tests/unit/test_helpers.py | 35 ++++++++ 5 files changed, 146 insertions(+), 111 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index ec522cc5c1cc..d1091e119a67 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -15,23 +15,21 @@ import os -from google.cloud.datastore_v1.proto import datastore_pb2 as _datastore_pb2 - from google.cloud._helpers import _LocalStack -from google.cloud._helpers import ( - _determine_default_project as _base_default_project) +from google.cloud._helpers import (_determine_default_project as + _base_default_project) from google.cloud.client import ClientWithProject -from google.cloud.environment_vars import DISABLE_GRPC -from google.cloud.environment_vars import GCD_DATASET -from google.cloud.environment_vars import GCD_HOST - -from google.cloud.datastore._http import HTTPDatastoreAPI from google.cloud.datastore import helpers +from google.cloud.datastore._http import HTTPDatastoreAPI from google.cloud.datastore.batch import Batch from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key from google.cloud.datastore.query import Query from google.cloud.datastore.transaction import Transaction +from google.cloud.environment_vars import DISABLE_GRPC +from google.cloud.environment_vars import GCD_DATASET +from google.cloud.environment_vars import GCD_HOST + try: from google.cloud.datastore._gax import make_datastore_api _HAVE_GRPC = True @@ -131,7 +129,7 @@ def _extended_lookup(datastore_api, project, key_pbs, results = [] loop_num = 0 - read_options = _get_read_options(eventual, transaction_id) + read_options = helpers.get_read_options(eventual, transaction_id) while loop_num < _MAX_LOOPS: # loop against possible deferred. loop_num += 1 lookup_response = datastore_api.lookup( @@ -279,7 +277,8 @@ def current_transaction(self): if isinstance(transaction, Transaction): return transaction - def get(self, key, missing=None, deferred=None, transaction=None): + def get(self, key, missing=None, deferred=None, + transaction=None, eventual=False): """Retrieve an entity from a single key (if it exists). .. note:: @@ -305,15 +304,26 @@ def get(self, key, missing=None, deferred=None, transaction=None): :param transaction: (Optional) Transaction to use for read consistency. If not passed, uses current transaction, if set. + :type eventual: bool + :param eventual: (Optional) Defaults to strongly consistent (False). + Setting True will use eventual consistency, but cannot + be used inside a transaction or will raise ValueError. + :rtype: :class:`google.cloud.datastore.entity.Entity` or ``NoneType`` :returns: The requested entity if it exists. + + :raises: :class:`ValueError` if eventual is True and in a transaction. """ - entities = self.get_multi(keys=[key], missing=missing, - deferred=deferred, transaction=transaction) + entities = self.get_multi(keys=[key], + missing=missing, + deferred=deferred, + transaction=transaction, + eventual=eventual) if entities: return entities[0] - def get_multi(self, keys, missing=None, deferred=None, transaction=None): + def get_multi(self, keys, missing=None, deferred=None, + transaction=None, eventual=False): """Retrieve entities, along with their attributes. :type keys: list of :class:`google.cloud.datastore.key.Key` @@ -334,10 +344,17 @@ def get_multi(self, keys, missing=None, deferred=None, transaction=None): :param transaction: (Optional) Transaction to use for read consistency. If not passed, uses current transaction, if set. + :type eventual: bool + :param eventual: (Optional) Defaults to strongly consistent (False). + Setting True will use eventual consistency, + but cannot be used inside a transaction or + will raise ValueError. + :rtype: list of :class:`google.cloud.datastore.entity.Entity` :returns: The requested entities. :raises: :class:`ValueError` if one or more of ``keys`` has a project which does not match our project. + :raises: :class:`ValueError` if eventual is True and in a transaction. """ if not keys: return [] @@ -353,7 +370,8 @@ def get_multi(self, keys, missing=None, deferred=None, transaction=None): entity_pbs = _extended_lookup( datastore_api=self._datastore_api, project=self.project, - key_pbs=[k.to_protobuf() for k in keys], + key_pbs=[key.to_protobuf() for key in keys], + eventual=eventual, missing=missing, deferred=deferred, transaction_id=transaction and transaction.id, @@ -581,34 +599,3 @@ def do_something(entity): if 'namespace' not in kwargs: kwargs['namespace'] = self.namespace return Query(self, **kwargs) - - -def _get_read_options(eventual, transaction_id): - """Validate rules for read options, and assign to the request. - - Helper method for ``lookup()`` and ``run_query``. - - :type eventual: bool - :param eventual: Flag indicating if ``EVENTUAL`` or ``STRONG`` - consistency should be used. - - :type transaction_id: bytes - :param transaction_id: A transaction identifier (may be null). - - :rtype: :class:`.datastore_pb2.ReadOptions` - :returns: The read options corresponding to the inputs. - :raises: :class:`ValueError` if ``eventual`` is ``True`` and the - ``transaction_id`` is not ``None``. - """ - if transaction_id is None: - if eventual: - return _datastore_pb2.ReadOptions( - read_consistency=_datastore_pb2.ReadOptions.EVENTUAL) - else: - return _datastore_pb2.ReadOptions() - else: - if eventual: - raise ValueError('eventual must be False when in a transaction') - else: - return _datastore_pb2.ReadOptions( - transaction=transaction_id) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index 11e21aa46da0..942819403a7f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -19,17 +19,18 @@ import datetime import itertools - -from google.protobuf import struct_pb2 -from google.type import latlng_pb2 import six from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import _pb_timestamp_to_datetime -from google.cloud.datastore_v1.proto import entity_pb2 as _entity_pb2 +from google.cloud.datastore_v1.proto import datastore_pb2 +from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key +from google.protobuf import struct_pb2 +from google.type import latlng_pb2 + def _get_meaning(value_pb, is_list=False): """Get the meaning from a protobuf value. @@ -204,7 +205,7 @@ def entity_to_protobuf(entity): :rtype: :class:`.entity_pb2.Entity` :returns: The protobuf representing the entity. """ - entity_pb = _entity_pb2.Entity() + entity_pb = entity_pb2.Entity() if entity.key is not None: key_pb = entity.key.to_protobuf() entity_pb.key.CopyFrom(key_pb) @@ -233,6 +234,37 @@ def entity_to_protobuf(entity): return entity_pb +def get_read_options(eventual, transaction_id): + """Validate rules for read options, and assign to the request. + + Helper method for ``lookup()`` and ``run_query``. + + :type eventual: bool + :param eventual: Flag indicating if ``EVENTUAL`` or ``STRONG`` + consistency should be used. + + :type transaction_id: bytes + :param transaction_id: A transaction identifier (may be null). + + :rtype: :class:`.datastore_pb2.ReadOptions` + :returns: The read options corresponding to the inputs. + :raises: :class:`ValueError` if ``eventual`` is ``True`` and the + ``transaction_id`` is not ``None``. + """ + if transaction_id is None: + if eventual: + return datastore_pb2.ReadOptions( + read_consistency=datastore_pb2.ReadOptions.EVENTUAL) + else: + return datastore_pb2.ReadOptions() + else: + if eventual: + raise ValueError('eventual must be False when in a transaction') + else: + return datastore_pb2.ReadOptions( + transaction=transaction_id) + + def key_from_protobuf(pb): """Factory method for creating a key based on a protobuf. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 28febdd1d422..ea16d4ee0690 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -19,19 +19,18 @@ from google.api_core import page_iterator from google.cloud._helpers import _ensure_tuple_or_list -from google.cloud.datastore_v1.proto import datastore_pb2 as _datastore_pb2 -from google.cloud.datastore_v1.proto import entity_pb2 as _entity_pb2 -from google.cloud.datastore_v1.proto import query_pb2 as _query_pb2 +from google.cloud.datastore_v1.proto import entity_pb2 +from google.cloud.datastore_v1.proto import query_pb2 from google.cloud.datastore import helpers from google.cloud.datastore.key import Key -_NOT_FINISHED = _query_pb2.QueryResultBatch.NOT_FINISHED +_NOT_FINISHED = query_pb2.QueryResultBatch.NOT_FINISHED _FINISHED = ( - _query_pb2.QueryResultBatch.NO_MORE_RESULTS, - _query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT, - _query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_CURSOR, + query_pb2.QueryResultBatch.NO_MORE_RESULTS, + query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT, + query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_CURSOR, ) @@ -81,11 +80,11 @@ class Query(object): """ OPERATORS = { - '<=': _query_pb2.PropertyFilter.LESS_THAN_OR_EQUAL, - '>=': _query_pb2.PropertyFilter.GREATER_THAN_OR_EQUAL, - '<': _query_pb2.PropertyFilter.LESS_THAN, - '>': _query_pb2.PropertyFilter.GREATER_THAN, - '=': _query_pb2.PropertyFilter.EQUAL, + '<=': query_pb2.PropertyFilter.LESS_THAN_OR_EQUAL, + '>=': query_pb2.PropertyFilter.GREATER_THAN_OR_EQUAL, + '<': query_pb2.PropertyFilter.LESS_THAN, + '>': query_pb2.PropertyFilter.GREATER_THAN, + '=': query_pb2.PropertyFilter.EQUAL, } """Mapping of operator strings and their protobuf equivalents.""" @@ -331,7 +330,7 @@ def distinct_on(self, value): self._distinct_on[:] = value def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, - client=None): + client=None, eventual=False): """Execute the Query; return an iterator for the matching entities. For example:: @@ -358,18 +357,28 @@ def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, :param end_cursor: (Optional) cursor passed through to the iterator. :type client: :class:`google.cloud.datastore.client.Client` - :param client: client used to connect to datastore. + :param client: (Optional) client used to connect to datastore. If not supplied, uses the query's value. + :type eventual: bool + :param eventual: (Optional) Defaults to strongly consistent (False). + Setting True will use eventual consistency, + but cannot be used inside a transaction or + will raise ValueError. + :rtype: :class:`Iterator` :returns: The iterator for the query. """ if client is None: client = self._client - return Iterator( - self, client, limit=limit, offset=offset, - start_cursor=start_cursor, end_cursor=end_cursor) + return Iterator(self, + client, + limit=limit, + offset=offset, + start_cursor=start_cursor, + end_cursor=end_cursor, + eventual=eventual) class Iterator(page_iterator.Iterator): @@ -396,18 +405,25 @@ class Iterator(page_iterator.Iterator): :type end_cursor: bytes :param end_cursor: (Optional) Cursor to end paging through query results. + + :type eventual: bool + :param eventual: (Optional) Defaults to strongly consistent (False). + Setting True will use eventual consistency, + but cannot be used inside a transaction or + will raise ValueError. """ next_page_token = None def __init__(self, query, client, limit=None, offset=None, - start_cursor=None, end_cursor=None): + start_cursor=None, end_cursor=None, eventual=False): super(Iterator, self).__init__( client=client, item_to_value=_item_to_entity, page_token=start_cursor, max_results=limit) self._query = query self._offset = offset self._end_cursor = end_cursor + self._eventual = eventual # The attributes below will change over the life of the iterator. self._more_results = True self._skipped_results = 0 @@ -483,12 +499,12 @@ def _next_page(self): query_pb = self._build_protobuf() transaction = self.client.current_transaction if transaction is None: - read_options = _datastore_pb2.ReadOptions() + transaction_id = None else: - read_options = _datastore_pb2.ReadOptions( - transaction=transaction.id) + transaction_id = transaction.id + read_options = helpers.get_read_options(self._eventual, transaction_id) - partition_id = _entity_pb2.PartitionId( + partition_id = entity_pb2.PartitionId( project_id=self._query.project, namespace_id=self._query.namespace) response_pb = self.client._datastore_api.run_query( @@ -512,7 +528,7 @@ def _pb_from_query(query): it does not contain "in-flight" fields for ongoing query executions (cursors, offset, limit). """ - pb = _query_pb2.Query() + pb = query_pb2.Query() for projection_name in query.projection: pb.projection.add().property.name = projection_name @@ -521,7 +537,7 @@ def _pb_from_query(query): pb.kind.add().name = query.kind composite_filter = pb.filter.composite_filter - composite_filter.op = _query_pb2.CompositeFilter.AND + composite_filter.op = query_pb2.CompositeFilter.AND if query.ancestor: ancestor_pb = query.ancestor.to_protobuf() @@ -529,7 +545,7 @@ def _pb_from_query(query): # Filter on __key__ HAS_ANCESTOR == ancestor. ancestor_filter = composite_filter.filters.add().property_filter ancestor_filter.property.name = '__key__' - ancestor_filter.op = _query_pb2.PropertyFilter.HAS_ANCESTOR + ancestor_filter.op = query_pb2.PropertyFilter.HAS_ANCESTOR ancestor_filter.value.key_value.CopyFrom(ancestor_pb) for property_name, operator, value in query.filters: diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 51b3e2651531..53a32f59252b 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -1033,41 +1033,6 @@ def test_query_w_namespace_collision(self): client, project=self.PROJECT, namespace=namespace2, kind=kind) -class Test__get_read_options(unittest.TestCase): - - def _call_fut(self, eventual, transaction_id): - from google.cloud.datastore.client import _get_read_options - - return _get_read_options(eventual, transaction_id) - - def test_eventual_w_transaction(self): - with self.assertRaises(ValueError): - self._call_fut(True, b'123') - - def test_eventual_wo_transaction(self): - from google.cloud.datastore_v1.proto import datastore_pb2 - - read_options = self._call_fut(True, None) - expected = datastore_pb2.ReadOptions( - read_consistency=datastore_pb2.ReadOptions.EVENTUAL) - self.assertEqual(read_options, expected) - - def test_default_w_transaction(self): - from google.cloud.datastore_v1.proto import datastore_pb2 - - txn_id = b'123abc-easy-as' - read_options = self._call_fut(False, txn_id) - expected = datastore_pb2.ReadOptions(transaction=txn_id) - self.assertEqual(read_options, expected) - - def test_default_wo_transaction(self): - from google.cloud.datastore_v1.proto import datastore_pb2 - - read_options = self._call_fut(False, None) - expected = datastore_pb2.ReadOptions() - self.assertEqual(read_options, expected) - - class _NoCommitBatch(object): def __init__(self, client): diff --git a/packages/google-cloud-datastore/tests/unit/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py index 5e91de0196f4..3624665a2a05 100644 --- a/packages/google-cloud-datastore/tests/unit/test_helpers.py +++ b/packages/google-cloud-datastore/tests/unit/test_helpers.py @@ -498,6 +498,41 @@ def test_w_nothing_in_pb(self): self.assertRaises(ValueError, self._call_fut, pb) +class Test__get_read_options(unittest.TestCase): + + def _call_fut(self, eventual, transaction_id): + from google.cloud.datastore.helpers import get_read_options + + return get_read_options(eventual, transaction_id) + + def test_eventual_w_transaction(self): + with self.assertRaises(ValueError): + self._call_fut(True, b'123') + + def test_eventual_wo_transaction(self): + from google.cloud.datastore_v1.proto import datastore_pb2 + + read_options = self._call_fut(True, None) + expected = datastore_pb2.ReadOptions( + read_consistency=datastore_pb2.ReadOptions.EVENTUAL) + self.assertEqual(read_options, expected) + + def test_default_w_transaction(self): + from google.cloud.datastore_v1.proto import datastore_pb2 + + txn_id = b'123abc-easy-as' + read_options = self._call_fut(False, txn_id) + expected = datastore_pb2.ReadOptions(transaction=txn_id) + self.assertEqual(read_options, expected) + + def test_default_wo_transaction(self): + from google.cloud.datastore_v1.proto import datastore_pb2 + + read_options = self._call_fut(False, None) + expected = datastore_pb2.ReadOptions() + self.assertEqual(read_options, expected) + + class Test__pb_attr_value(unittest.TestCase): def _call_fut(self, val): From f53f740353a01ad21ffc20795f8b4b0661e9aba1 Mon Sep 17 00:00:00 2001 From: chemelnucfin Date: Thu, 9 Nov 2017 14:06:08 -0800 Subject: [PATCH 150/611] cosmetic changes to address #4343 (#4376) --- .../google/cloud/datastore/client.py | 9 ++++----- .../google/cloud/datastore/helpers.py | 6 +++--- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index d1091e119a67..fc4a107022ee 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -167,10 +167,10 @@ class Client(ClientWithProject): >>> client = datastore.Client() :type project: str - :param project: (optional) The project to pass to proxied API methods. + :param project: (Optional) The project to pass to proxied API methods. :type namespace: str - :param namespace: (optional) namespace to pass to proxied API methods. + :param namespace: (Optional) namespace to pass to proxied API methods. :type credentials: :class:`~google.auth.credentials.Credentials` :param credentials: (Optional) The OAuth2 Credentials to use for this @@ -346,9 +346,8 @@ def get_multi(self, keys, missing=None, deferred=None, :type eventual: bool :param eventual: (Optional) Defaults to strongly consistent (False). - Setting True will use eventual consistency, - but cannot be used inside a transaction or - will raise ValueError. + Setting True will use eventual consistency, but cannot + be used inside a transaction or will raise ValueError. :rtype: list of :class:`google.cloud.datastore.entity.Entity` :returns: The requested entities. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index 942819403a7f..f3838668fe3d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -19,6 +19,9 @@ import datetime import itertools + +from google.protobuf import struct_pb2 +from google.type import latlng_pb2 import six from google.cloud._helpers import _datetime_to_pb_timestamp @@ -28,9 +31,6 @@ from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key -from google.protobuf import struct_pb2 -from google.type import latlng_pb2 - def _get_meaning(value_pb, is_list=False): """Get the meaning from a protobuf value. From 5bdd49f096ac31c13f57133a8ca3523529693864 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matej=20=C4=8Camaj?= Date: Fri, 10 Nov 2017 17:42:19 +0100 Subject: [PATCH 151/611] Set next_page_token to none if there are no more result (#4349) * Update _process_query_results_done Datastore test Response ``end_cursor`` might be non-empty as reported in #4347. * Set next_page_token to None using MoreResults enum (#4347) Depending on empty value of ``response_pb.batch.end_cursor`` is not reliable as the behaviour is not documented. It's better to check API response field ``moreResults``, see https://cloud.google.com/datastore/docs/reference/rpc/google.datastore.v1#google.datastore.v1.QueryResultBatch. --- .../google-cloud-datastore/google/cloud/datastore/query.py | 5 +++-- packages/google-cloud-datastore/tests/unit/test_query.py | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index ea16d4ee0690..5d968562f421 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -26,9 +26,10 @@ _NOT_FINISHED = query_pb2.QueryResultBatch.NOT_FINISHED +_NO_MORE_RESULTS = query_pb2.QueryResultBatch.NO_MORE_RESULTS _FINISHED = ( - query_pb2.QueryResultBatch.NO_MORE_RESULTS, + _NO_MORE_RESULTS, query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT, query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_CURSOR, ) @@ -470,7 +471,7 @@ def _process_query_results(self, response_pb): """ self._skipped_results = response_pb.batch.skipped_results - if response_pb.batch.end_cursor == b'': # Empty-value for bytes. + if response_pb.batch.more_results == _NO_MORE_RESULTS: self.next_page_token = None else: self.next_page_token = base64.urlsafe_b64encode( diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 791031c1444e..70945895ba99 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -467,7 +467,7 @@ def test__process_query_results_done(self): entity_pbs = [ _make_entity('World', 1234, 'PROJECT'), ] - cursor_as_bytes = b'' + cursor_as_bytes = b'\x9ai\xe7' skipped_results = 44 more_results_enum = query_pb2.QueryResultBatch.NO_MORE_RESULTS response_pb = _make_query_response( From 66ad0871969dd6d28b02afd7c4bf5c7e6533573c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Sat, 11 Nov 2017 14:23:24 -0800 Subject: [PATCH 152/611] Update datastore doctests to reflect change in cursor behavior. (#4382) Follow up to #4349. --- .../google-cloud-datastore/google/cloud/datastore/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index fc4a107022ee..a3d1f9d43d29 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -580,8 +580,8 @@ def do_something(entity): >>> >>> first_page = next(pages) >>> first_page_entities = list(first_page) - >>> query_iter.next_page_token - b'...' + >>> query_iter.next_page_token is None + True :type kwargs: dict :param kwargs: Parameters for initializing and instance of From 91bd34570a98ba7f7bdd1f66b8f0aba36efbb459 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 13 Nov 2017 10:14:47 -0800 Subject: [PATCH 153/611] Updating datastore HTTP wrapper. (#4388) - Making the method arguments (both in position and name) match the arguments for `google.cloud.datastore_v1.gapic.datastore_client.DatastoreClient` - Passing positional as positional and keyword as keyword when using the low-level API client in `Client.get()` (was previously using all keyword arguments to `.lookup()`, which caused this issue) - Updating mock call assertions to match the change in calling behavior. Fixes #4387. --- .../google/cloud/datastore/_http.py | 96 ++++++++++--------- .../google/cloud/datastore/client.py | 4 +- .../tests/unit/test__http.py | 17 ++-- .../tests/unit/test_client.py | 32 +++---- 4 files changed, 77 insertions(+), 72 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index a161b9b096c0..ef02bbbff4ce 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -148,48 +148,47 @@ class HTTPDatastoreAPI(object): def __init__(self, client): self.client = client - def lookup(self, project, read_options, key_pbs): + def lookup(self, project_id, keys, read_options=None): """Perform a ``lookup`` request. - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. + :type project_id: str + :param project_id: The project to connect to. This is + usually your project name in the cloud console. + + :type keys: List[.entity_pb2.Key] + :param keys: The keys to retrieve from the datastore. :type read_options: :class:`.datastore_pb2.ReadOptions` - :param read_options: The options for this lookup. Contains a + :param read_options: (Optional) The options for this lookup. Contains either the transaction for the read or ``STRONG`` or ``EVENTUAL`` read consistency. - :type key_pbs: list of - :class:`.entity_pb2.Key` - :param key_pbs: The keys to retrieve from the datastore. - :rtype: :class:`.datastore_pb2.LookupResponse` :returns: The returned protobuf response object. """ request_pb = _datastore_pb2.LookupRequest( - project_id=project, + project_id=project_id, read_options=read_options, - keys=key_pbs, + keys=keys, ) - return _rpc(self.client._http, project, 'lookup', + return _rpc(self.client._http, project_id, 'lookup', self.client._base_url, request_pb, _datastore_pb2.LookupResponse) - def run_query(self, project, partition_id, read_options, + def run_query(self, project_id, partition_id, read_options=None, query=None, gql_query=None): """Perform a ``runQuery`` request. - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. + :type project_id: str + :param project_id: The project to connect to. This is + usually your project name in the cloud console. :type partition_id: :class:`.entity_pb2.PartitionId` :param partition_id: Partition ID corresponding to an optional namespace and project ID. :type read_options: :class:`.datastore_pb2.ReadOptions` - :param read_options: The options for this query. Contains a + :param read_options: (Optional) The options for this query. Contains either the transaction for the read or ``STRONG`` or ``EVENTUAL`` read consistency. @@ -205,37 +204,40 @@ def run_query(self, project, partition_id, read_options, :returns: The returned protobuf response object. """ request_pb = _datastore_pb2.RunQueryRequest( - project_id=project, + project_id=project_id, partition_id=partition_id, read_options=read_options, query=query, gql_query=gql_query, ) - return _rpc(self.client._http, project, 'runQuery', + return _rpc(self.client._http, project_id, 'runQuery', self.client._base_url, request_pb, _datastore_pb2.RunQueryResponse) - def begin_transaction(self, project): + def begin_transaction(self, project_id, transaction_options=None): """Perform a ``beginTransaction`` request. - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. + :type project_id: str + :param project_id: The project to connect to. This is + usually your project name in the cloud console. + + :type transaction_options: ~.datastore_v1.types.TransactionOptions + :param transaction_options: (Optional) Options for a new transaction. :rtype: :class:`.datastore_pb2.BeginTransactionResponse` :returns: The returned protobuf response object. """ request_pb = _datastore_pb2.BeginTransactionRequest() - return _rpc(self.client._http, project, 'beginTransaction', + return _rpc(self.client._http, project_id, 'beginTransaction', self.client._base_url, request_pb, _datastore_pb2.BeginTransactionResponse) - def commit(self, project, mode, mutations, transaction=None): + def commit(self, project_id, mode, mutations, transaction=None): """Perform a ``commit`` request. - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. + :type project_id: str + :param project_id: The project to connect to. This is + usually your project name in the cloud console. :type mode: :class:`.gapic.datastore.v1.enums.CommitRequest.Mode` :param mode: The type of commit to perform. Expected to be one of @@ -254,51 +256,51 @@ def commit(self, project, mode, mutations, transaction=None): :returns: The returned protobuf response object. """ request_pb = _datastore_pb2.CommitRequest( - project_id=project, + project_id=project_id, mode=mode, transaction=transaction, mutations=mutations, ) - return _rpc(self.client._http, project, 'commit', + return _rpc(self.client._http, project_id, 'commit', self.client._base_url, request_pb, _datastore_pb2.CommitResponse) - def rollback(self, project, transaction_id): + def rollback(self, project_id, transaction): """Perform a ``rollback`` request. - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. + :type project_id: str + :param project_id: The project to connect to. This is + usually your project name in the cloud console. - :type transaction_id: bytes - :param transaction_id: The transaction ID to rollback. + :type transaction: bytes + :param transaction: The transaction ID to rollback. :rtype: :class:`.datastore_pb2.RollbackResponse` :returns: The returned protobuf response object. """ request_pb = _datastore_pb2.RollbackRequest( - project_id=project, - transaction=transaction_id, + project_id=project_id, + transaction=transaction, ) # Response is empty (i.e. no fields) but we return it anyway. - return _rpc(self.client._http, project, 'rollback', + return _rpc(self.client._http, project_id, 'rollback', self.client._base_url, request_pb, _datastore_pb2.RollbackResponse) - def allocate_ids(self, project, key_pbs): + def allocate_ids(self, project_id, keys): """Perform an ``allocateIds`` request. - :type project: str - :param project: The project to connect to. This is - usually your project name in the cloud console. + :type project_id: str + :param project_id: The project to connect to. This is + usually your project name in the cloud console. - :type key_pbs: list of :class:`.entity_pb2.Key` - :param key_pbs: The keys for which the backend should allocate IDs. + :type keys: List[.entity_pb2.Key] + :param keys: The keys for which the backend should allocate IDs. :rtype: :class:`.datastore_pb2.AllocateIdsResponse` :returns: The returned protobuf response object. """ - request_pb = _datastore_pb2.AllocateIdsRequest(keys=key_pbs) - return _rpc(self.client._http, project, 'allocateIds', + request_pb = _datastore_pb2.AllocateIdsRequest(keys=keys) + return _rpc(self.client._http, project_id, 'allocateIds', self.client._base_url, request_pb, _datastore_pb2.AllocateIdsResponse) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index a3d1f9d43d29..1b8cb2ae51bb 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -133,9 +133,9 @@ def _extended_lookup(datastore_api, project, key_pbs, while loop_num < _MAX_LOOPS: # loop against possible deferred. loop_num += 1 lookup_response = datastore_api.lookup( - project_id=project, + project, + key_pbs, read_options=read_options, - keys=key_pbs, ) # Accumulate the new results. diff --git a/packages/google-cloud-datastore/tests/unit/test__http.py b/packages/google-cloud-datastore/tests/unit/test__http.py index cceb40419a56..9610e966d419 100644 --- a/packages/google-cloud-datastore/tests/unit/test__http.py +++ b/packages/google-cloud-datastore/tests/unit/test__http.py @@ -155,7 +155,7 @@ def test_lookup_single_key_empty_response(self): # Make request. ds_api = self._make_one(client) - response = ds_api.lookup(project, read_options, [key_pb]) + response = ds_api.lookup(project, [key_pb], read_options=read_options) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) @@ -186,7 +186,7 @@ def test_lookup_single_key_empty_response_w_eventual(self): # Make request. ds_api = self._make_one(client) - response = ds_api.lookup(project, read_options, [key_pb]) + response = ds_api.lookup(project, [key_pb], read_options=read_options) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) @@ -217,7 +217,7 @@ def test_lookup_single_key_empty_response_w_transaction(self): # Make request. ds_api = self._make_one(client) - response = ds_api.lookup(project, read_options, [key_pb]) + response = ds_api.lookup(project, [key_pb], read_options=read_options) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) @@ -251,7 +251,7 @@ def test_lookup_single_key_nonempty_response(self): # Make request. ds_api = self._make_one(client) - response = ds_api.lookup(project, read_options, [key_pb]) + response = ds_api.lookup(project, [key_pb], read_options=read_options) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) @@ -285,7 +285,8 @@ def test_lookup_multiple_keys_empty_response(self): # Make request. ds_api = self._make_one(client) - response = ds_api.lookup(project, read_options, [key_pb1, key_pb2]) + response = ds_api.lookup( + project, [key_pb1, key_pb2], read_options=read_options) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) @@ -320,7 +321,8 @@ def test_lookup_multiple_keys_w_missing(self): # Make request. ds_api = self._make_one(client) - response = ds_api.lookup(project, read_options, [key_pb1, key_pb2]) + response = ds_api.lookup( + project, [key_pb1, key_pb2], read_options=read_options) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) @@ -354,7 +356,8 @@ def test_lookup_multiple_keys_w_deferred(self): # Make request. ds_api = self._make_one(client) - response = ds_api.lookup(project, read_options, [key_pb1, key_pb2]) + response = ds_api.lookup( + project, [key_pb1, key_pb2], read_options=read_options) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 53a32f59252b..ded960b5a650 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -352,9 +352,9 @@ def test_get_multi_miss(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - project_id=self.PROJECT, + self.PROJECT, + [key.to_protobuf()], read_options=read_options, - keys=[key.to_protobuf()], ) def test_get_multi_miss_w_missing(self): @@ -389,9 +389,9 @@ def test_get_multi_miss_w_missing(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - project_id=self.PROJECT, + self.PROJECT, + [key_pb], read_options=read_options, - keys=[key_pb], ) def test_get_multi_w_missing_non_empty(self): @@ -438,9 +438,9 @@ def test_get_multi_miss_w_deferred(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - project_id=self.PROJECT, + self.PROJECT, + [key_pb], read_options=read_options, - keys=[key_pb], ) def test_get_multi_w_deferred_from_backend_but_not_passed(self): @@ -488,14 +488,14 @@ def test_get_multi_w_deferred_from_backend_but_not_passed(self): self.assertEqual(ds_api.lookup.call_count, 2) read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_any_call( - project_id=self.PROJECT, + self.PROJECT, + [key2_pb], read_options=read_options, - keys=[key2_pb], ) ds_api.lookup.assert_any_call( - project_id=self.PROJECT, + self.PROJECT, + [key1_pb, key2_pb], read_options=read_options, - keys=[key1_pb, key2_pb], ) def test_get_multi_hit(self): @@ -529,8 +529,8 @@ def test_get_multi_hit(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - keys=[key.to_protobuf()], - project_id=self.PROJECT, + self.PROJECT, + [key.to_protobuf()], read_options=read_options, ) @@ -568,8 +568,8 @@ def test_get_multi_hit_w_transaction(self): read_options = datastore_pb2.ReadOptions(transaction=txn_id) ds_api.lookup.assert_called_once_with( - project_id=self.PROJECT, - keys=[key.to_protobuf()], + self.PROJECT, + [key.to_protobuf()], read_options=read_options, ) @@ -605,9 +605,9 @@ def test_get_multi_hit_multiple_keys_same_project(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - project_id=self.PROJECT, + self.PROJECT, + [key1.to_protobuf(), key2.to_protobuf()], read_options=read_options, - keys=[key1.to_protobuf(), key2.to_protobuf()], ) def test_get_multi_hit_multiple_keys_different_project(self): From e144d1d88c24a3a71398b8ace22d7ed2070085df Mon Sep 17 00:00:00 2001 From: chemelnucfin Date: Mon, 11 Dec 2017 09:51:16 -0800 Subject: [PATCH 154/611] Closes #4278 Datastore: Transaction Options (#4357) --- .../google/cloud/datastore/client.py | 10 +++++-- .../google/cloud/datastore/transaction.py | 30 ++++++++++++++++++- .../tests/system/test_system.py | 1 + .../tests/unit/test_client.py | 16 ++++++++++ .../tests/unit/test_transaction.py | 29 +++++++++++++++++- 5 files changed, 81 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 1b8cb2ae51bb..b53e2deb54f3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -511,9 +511,13 @@ def batch(self): """Proxy to :class:`google.cloud.datastore.batch.Batch`.""" return Batch(self) - def transaction(self): - """Proxy to :class:`google.cloud.datastore.transaction.Transaction`.""" - return Transaction(self) + def transaction(self, **kwargs): + """Proxy to :class:`google.cloud.datastore.transaction.Transaction`. + + :type kwargs: dict + :param kwargs: Keyword arguments to be passed in. + """ + return Transaction(self, **kwargs) def query(self, **kwargs): """Proxy to :class:`google.cloud.datastore.query.Query`. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index b10aa2a2e64e..8fa71db25d54 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -15,6 +15,7 @@ """Create / interact with Google Cloud Datastore transactions.""" from google.cloud.datastore.batch import Batch +from google.cloud.datastore_v1.types import TransactionOptions class Transaction(Batch): @@ -152,13 +153,22 @@ def Entity(*args, **kwargs): :type client: :class:`google.cloud.datastore.client.Client` :param client: the client used to connect to datastore. + + :type read_only: bool + :param read_only: indicates the transaction is read only. """ _status = None - def __init__(self, client): + def __init__(self, client, read_only=False): super(Transaction, self).__init__(client) self._id = None + if read_only: + options = TransactionOptions( + read_only=TransactionOptions.ReadOnly()) + else: + options = TransactionOptions() + self._options = options @property def id(self): @@ -234,3 +244,21 @@ def commit(self): finally: # Clear our own ID in case this gets accidentally reused. self._id = None + + def put(self, entity): + """Adds an entity to be committed. + + Ensures the transaction is not marked readonly. + Please see documentation at + :meth:`~google.cloud.datastore.batch.Batch.put` + + :type entity: :class:`~google.cloud.datastore.entity.Entity` + :param entity: the entity to be saved. + + :raises: :class:`RuntimeError` if the transaction + is marked ReadOnly + """ + if self._options.HasField('read_only'): + raise RuntimeError("Transaction is read only") + else: + super(Transaction, self).put(entity) diff --git a/packages/google-cloud-datastore/tests/system/test_system.py b/packages/google-cloud-datastore/tests/system/test_system.py index a5e46c6dd8da..3ab7295f50c4 100644 --- a/packages/google-cloud-datastore/tests/system/test_system.py +++ b/packages/google-cloud-datastore/tests/system/test_system.py @@ -22,6 +22,7 @@ from google.cloud._helpers import UTC from google.cloud import datastore from google.cloud.datastore.helpers import GeoPoint +from google.cloud.datastore_v1 import types from google.cloud.environment_vars import GCD_DATASET from google.cloud.exceptions import Conflict diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index ded960b5a650..949753e75f3b 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -934,6 +934,22 @@ def test_transaction_defaults(self): self.assertIs(xact, mock_klass.return_value) mock_klass.assert_called_once_with(client) + def test_read_only_transaction_defaults(self): + from google.cloud.datastore.transaction import Transaction + from google.cloud.datastore_v1.types import TransactionOptions + creds = _make_credentials() + client = self._make_one(credentials=creds) + xact = client.transaction(read_only=True) + self.assertEqual(xact._options, + TransactionOptions( + read_only=TransactionOptions.ReadOnly() + ) + ) + self.assertFalse(xact._options.HasField("read_write")) + self.assertTrue(xact._options.HasField("read_only")) + self.assertEqual(xact._options.read_only, + TransactionOptions.ReadOnly()) + def test_query_w_client(self): KIND = 'KIND' diff --git a/packages/google-cloud-datastore/tests/unit/test_transaction.py b/packages/google-cloud-datastore/tests/unit/test_transaction.py index 2c72f01dc34a..cef178a00243 100644 --- a/packages/google-cloud-datastore/tests/unit/test_transaction.py +++ b/packages/google-cloud-datastore/tests/unit/test_transaction.py @@ -22,12 +22,18 @@ class TestTransaction(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.datastore.transaction import Transaction - return Transaction + def _get_options_class(self, **kw): + from google.cloud.datastore_v1.types import TransactionOptions + return TransactionOptions + def _make_one(self, client, **kw): return self._get_target_class()(client, **kw) + def _make_options(self, **kw): + return self._get_options_class()(**kw) + def test_ctor_defaults(self): project = 'PROJECT' client = _Client(project) @@ -212,6 +218,27 @@ class Foo(Exception): self.assertIsNone(xact.id) self.assertEqual(ds_api.begin_transaction.call_count, 1) + def test_constructor_read_only(self): + project = 'PROJECT' + id_ = 850302 + ds_api = _make_datastore_api(xact=id_) + client = _Client(project, datastore_api=ds_api) + read_only = self._get_options_class().ReadOnly() + options = self._make_options(read_only=read_only) + xact = self._make_one(client, read_only=True) + self.assertEqual(xact._options, options) + + def test_put_read_only(self): + project = 'PROJECT' + id_ = 943243 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) + entity = _Entity() + xact = self._make_one(client, read_only=True) + xact.begin() + with self.assertRaises(RuntimeError): + xact.put(entity) + def _make_key(kind, id_, project): from google.cloud.datastore_v1.proto import entity_pb2 From c8d65303cbe4e2393c806ce09ff932626482c708 Mon Sep 17 00:00:00 2001 From: egalpin Date: Wed, 20 Dec 2017 14:50:27 -0500 Subject: [PATCH 155/611] Adds optional location_prefix kwarg in to_legacy_urlsafe (#4635) --- .../google/cloud/datastore/key.py | 19 ++++++++++++++++--- .../tests/unit/test_key.py | 19 +++++++++++++++++++ 2 files changed, 35 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index 74d23e49265c..2824e57d3ebf 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -298,7 +298,7 @@ def to_protobuf(self): return key - def to_legacy_urlsafe(self): + def to_legacy_urlsafe(self, location_prefix=None): """Convert to a base64 encode urlsafe string for App Engine. This is intended to work with the "legacy" representation of a @@ -310,13 +310,26 @@ def to_legacy_urlsafe(self): .. note:: The string returned by ``to_legacy_urlsafe`` is equivalent, but - not identical, to the string returned by ``ndb``. + not identical, to the string returned by ``ndb``. The location + prefix may need to be specified to obtain identical urlsafe + keys. + + :type location_prefix: str + :param location_prefix: The location prefix of an App Engine project + ID. Often this value is 's~', but may also be + 'e~', or other location prefixes currently + unknown. :rtype: bytes :returns: A bytestring containing the key encoded as URL-safe base64. """ + if location_prefix is None: + project_id = self.project + else: + project_id = location_prefix + self.project + reference = _app_engine_key_pb2.Reference( - app=self.project, + app=project_id, path=_to_legacy_path(self._path), # Avoid the copy. name_space=self.namespace, ) diff --git a/packages/google-cloud-datastore/tests/unit/test_key.py b/packages/google-cloud-datastore/tests/unit/test_key.py index e95d756013cc..ddd04d161967 100644 --- a/packages/google-cloud-datastore/tests/unit/test_key.py +++ b/packages/google-cloud-datastore/tests/unit/test_key.py @@ -35,6 +35,9 @@ class TestKey(unittest.TestCase): _URLSAFE_EXAMPLE2 = b'agZzfmZpcmVyDwsSBEtpbmQiBVRoaW5nDA' _URLSAFE_APP2 = 's~fire' _URLSAFE_FLAT_PATH2 = ('Kind', 'Thing') + _URLSAFE_EXAMPLE3 = b'ahhzfnNhbXBsZS1hcHAtbm8tbG9jYXRpb25yCgsSBFpvcnAYWAw' + _URLSAFE_APP3 = 'sample-app-no-location' + _URLSAFE_FLAT_PATH3 = ('Zorp', 88) @staticmethod def _get_target_class(): @@ -408,6 +411,13 @@ def test_to_legacy_urlsafe_strip_padding(self): # Make sure it started with base64 padding. self.assertNotEqual(len(self._URLSAFE_EXAMPLE2) % 4, 0) + def test_to_legacy_urlsafe_with_location_prefix(self): + key = self._make_one( + *self._URLSAFE_FLAT_PATH3, + project=self._URLSAFE_APP3) + urlsafe = key.to_legacy_urlsafe(location_prefix='s~') + self.assertEqual(urlsafe, self._URLSAFE_EXAMPLE3) + def test_from_legacy_urlsafe(self): klass = self._get_target_class() key = klass.from_legacy_urlsafe(self._URLSAFE_EXAMPLE1) @@ -430,6 +440,15 @@ def test_from_legacy_urlsafe_needs_padding(self): self.assertIsNone(key.namespace) self.assertEqual(key.flat_path, self._URLSAFE_FLAT_PATH2) + def test_from_legacy_urlsafe_with_location_prefix(self): + klass = self._get_target_class() + # Make sure it will have base64 padding added. + key = klass.from_legacy_urlsafe(self._URLSAFE_EXAMPLE3) + + self.assertEqual(key.project, self._URLSAFE_APP3) + self.assertIsNone(key.namespace) + self.assertEqual(key.flat_path, self._URLSAFE_FLAT_PATH3) + def test_is_partial_no_name_or_id(self): key = self._make_one('KIND', project=self._DEFAULT_PROJECT) self.assertTrue(key.is_partial) From b06674699424408fed30a31d97c90c075d7b8ca3 Mon Sep 17 00:00:00 2001 From: chemelnucfin Date: Wed, 20 Dec 2017 15:12:29 -0800 Subject: [PATCH 156/611] Datastore: id from #3832 pull request with unit test (#4640) --- .../google/cloud/datastore/entity.py | 16 ++++++++++++++++ .../tests/unit/test_entity.py | 13 +++++++++++++ 2 files changed, 29 insertions(+) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/entity.py b/packages/google-cloud-datastore/google/cloud/datastore/entity.py index 649da274aba3..0ddcc75a1ac1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/entity.py @@ -183,6 +183,22 @@ def kind(self): if self.key: return self.key.kind + @property + def id(self): + """Get the ID of the current entity. + + .. note:: + + This relies entirely on the :class:`google.cloud.datastore.key.Key` + set on the entity. That means that we're not storing the ID + of the entity at all, just the properties and a pointer to a + Key which knows its ID. + """ + if self.key is None: + return None + else: + return self.key.id + def __repr__(self): if self.key: return '' % (self.key._flat_path, diff --git a/packages/google-cloud-datastore/tests/unit/test_entity.py b/packages/google-cloud-datastore/tests/unit/test_entity.py index f018a89fc7ea..6157f5075ea6 100644 --- a/packages/google-cloud-datastore/tests/unit/test_entity.py +++ b/packages/google-cloud-datastore/tests/unit/test_entity.py @@ -192,6 +192,19 @@ def test__eq__same_value_different_meanings(self): self.assertFalse(entity1 == entity2) + def test_id(self): + from google.cloud.datastore.key import Key + + key = Key(_KIND, _ID, project=_PROJECT) + entity = self._make_one(key=key) + self.assertEqual(entity.id, _ID) + + def test_id_none(self): + from google.cloud.datastore.key import Key + + entity = self._make_one(key=None) + self.assertEqual(entity.id, None) + def test___repr___no_key_empty(self): entity = self._make_one() self.assertEqual(repr(entity), '') From eb783aca0833482cc9efd58b1dfe50b229e059d8 Mon Sep 17 00:00:00 2001 From: chemelnucfin Date: Wed, 20 Dec 2017 15:46:23 -0800 Subject: [PATCH 157/611] Datastore: Entity doc consistency (#4641) --- .../google/cloud/datastore/entity.py | 39 ++++++++++--------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/entity.py b/packages/google-cloud-datastore/google/cloud/datastore/entity.py index 0ddcc75a1ac1..6f76e869645d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/entity.py @@ -101,28 +101,28 @@ class Entity(dict): .. testsetup:: entity-dict - from google.cloud import datastore + from google.cloud import datastore - entity = datastore.Entity() - entity['age'] = 20 - entity['name'] = 'JJ' + entity = datastore.Entity() + entity['age'] = 20 + entity['name'] = 'JJ' .. doctest:: entity-dict - >>> sorted(entity.keys()) - ['age', 'name'] - >>> sorted(entity.items()) - [('age', 20), ('name', 'JJ')] + >>> sorted(entity.keys()) + ['age', 'name'] + >>> sorted(entity.items()) + [('age', 20), ('name', 'JJ')] .. note:: - When saving an entity to the backend, values which are "text" - (``unicode`` in Python2, ``str`` in Python3) will be saved using - the 'text_value' field, after being encoded to UTF-8. When - retrieved from the back-end, such values will be decoded to "text" - again. Values which are "bytes" (``str`` in Python2, ``bytes`` in - Python3), will be saved using the 'blob_value' field, without - any decoding / encoding step. + When saving an entity to the backend, values which are "text" + (``unicode`` in Python2, ``str`` in Python3) will be saved using + the 'text_value' field, after being encoded to UTF-8. When + retrieved from the back-end, such values will be decoded to "text" + again. Values which are "bytes" (``str`` in Python2, ``bytes`` in + Python3), will be saved using the 'blob_value' field, without + any decoding / encoding step. :type key: :class:`google.cloud.datastore.key.Key` :param key: Optional key to be set on entity. @@ -175,10 +175,11 @@ def kind(self): """Get the kind of the current entity. .. note:: - This relies entirely on the :class:`google.cloud.datastore.key.Key` - set on the entity. That means that we're not storing the kind - of the entity at all, just the properties and a pointer to a - Key which knows its Kind. + + This relies entirely on the :class:`google.cloud.datastore.key.Key` + set on the entity. That means that we're not storing the kind + of the entity at all, just the properties and a pointer to a + Key which knows its Kind. """ if self.key: return self.key.kind From 40be8b0fc05208875f7a37dfaf67e79623b1af47 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 5 Jan 2018 12:59:07 -0800 Subject: [PATCH 158/611] api_core: Make PageIterator.item_to_value public. (#4702) --- packages/google-cloud-datastore/google/cloud/datastore/query.py | 2 +- packages/google-cloud-datastore/tests/unit/test_query.py | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 5d968562f421..6cb0810cd6d2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -515,7 +515,7 @@ def _next_page(self): query=query_pb, ) entity_pbs = self._process_query_results(response_pb) - return page_iterator.Page(self, entity_pbs, self._item_to_value) + return page_iterator.Page(self, entity_pbs, self.item_to_value) def _pb_from_query(query): diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 70945895ba99..3a7aa9300a56 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -361,7 +361,6 @@ def test_constructor_defaults(self): self.assertFalse(iterator._started) self.assertIs(iterator.client, client) - self.assertIsNotNone(iterator._item_to_value) self.assertIsNone(iterator.max_results) self.assertEqual(iterator.page_number, 0) self.assertIsNone(iterator.next_page_token,) @@ -384,7 +383,6 @@ def test_constructor_explicit(self): self.assertFalse(iterator._started) self.assertIs(iterator.client, client) - self.assertIsNotNone(iterator._item_to_value) self.assertEqual(iterator.max_results, limit) self.assertEqual(iterator.page_number, 0) self.assertEqual(iterator.next_page_token, start_cursor) From 971489e10aa49af8dcdb1b5b471a588f5f44919b Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 10 Jan 2018 16:54:48 -0800 Subject: [PATCH 159/611] Revert "api_core: Make PageIterator.item_to_value public. (#4702)" (#4731) --- packages/google-cloud-datastore/google/cloud/datastore/query.py | 2 +- packages/google-cloud-datastore/tests/unit/test_query.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 6cb0810cd6d2..5d968562f421 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -515,7 +515,7 @@ def _next_page(self): query=query_pb, ) entity_pbs = self._process_query_results(response_pb) - return page_iterator.Page(self, entity_pbs, self.item_to_value) + return page_iterator.Page(self, entity_pbs, self._item_to_value) def _pb_from_query(query): diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 3a7aa9300a56..70945895ba99 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -361,6 +361,7 @@ def test_constructor_defaults(self): self.assertFalse(iterator._started) self.assertIs(iterator.client, client) + self.assertIsNotNone(iterator._item_to_value) self.assertIsNone(iterator.max_results) self.assertEqual(iterator.page_number, 0) self.assertIsNone(iterator.next_page_token,) @@ -383,6 +384,7 @@ def test_constructor_explicit(self): self.assertFalse(iterator._started) self.assertIs(iterator.client, client) + self.assertIsNotNone(iterator._item_to_value) self.assertEqual(iterator.max_results, limit) self.assertEqual(iterator.page_number, 0) self.assertEqual(iterator.next_page_token, start_cursor) From 5137f145e648fc12bb88a648030d9fd6bbe84b78 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 14 Feb 2018 09:54:51 -0800 Subject: [PATCH 160/611] Regenerate gapic for datastore and remove gax. (#4877) * Regenerate gapic for datastore. Comannd log: ``` set DEST ~/workspace/google-cloud-python/datastore set GOOGLEAPIS_REV 4568cba600c72347542d7fa9c826c25584ef55a6 cd /tmp git clone https://github.com/googleapis/googleapis.git cd googleapis git reset --hard $GOOGLEAPIS_REV artman --config google/datastore/artman_datastore.yaml generate python_gapic set SRC ./artman-genfiles/python/datastore-v1 set IMPORT_PKG datastore_v1 cp -r $SRC/docs $DEST cp -r $SRC/google/cloud/$IMPORT_PKG $DEST/google/cloud/ mkdir -p $DEST/tests/unit/gapic cp -r $SRC/tests/unit/gapic $DEST/tests/unit ``` * Remove all references to gax --- packages/google-cloud-datastore/docs/conf.py | 310 ++++++++++++++++++ .../docs/gapic/v1/api.rst | 6 + .../docs/gapic/v1/types.rst | 5 + .../google-cloud-datastore/docs/index.rst | 84 +++++ .../google/cloud/datastore/_gapic.py | 52 +++ .../google/cloud/datastore/_gax.py | 240 -------------- .../google/cloud/datastore/client.py | 4 +- .../google/cloud/datastore_v1/__init__.py | 7 +- .../datastore_v1/gapic/datastore_client.py | 182 ++++++---- .../google/cloud/datastore_v1/gapic/enums.py | 4 +- .../cloud/datastore_v1/proto/datastore_pb2.py | 6 +- .../datastore_v1/proto/datastore_pb2_grpc.py | 2 +- .../cloud/datastore_v1/proto/entity_pb2.py | 6 +- .../cloud/datastore_v1/proto/query_pb2.py | 12 +- .../google/cloud/datastore_v1/types.py | 7 +- packages/google-cloud-datastore/setup.py | 6 +- .../unit/gapic/v1/test_datastore_client_v1.py | 305 +++++++++++++++++ .../tests/unit/test__gapic.py | 75 +++++ .../tests/unit/test__gax.py | 280 ---------------- .../tests/unit/test_client.py | 2 +- 20 files changed, 991 insertions(+), 604 deletions(-) create mode 100644 packages/google-cloud-datastore/docs/conf.py create mode 100644 packages/google-cloud-datastore/docs/gapic/v1/api.rst create mode 100644 packages/google-cloud-datastore/docs/gapic/v1/types.rst create mode 100644 packages/google-cloud-datastore/docs/index.rst create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_gapic.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore/_gax.py create mode 100644 packages/google-cloud-datastore/tests/unit/gapic/v1/test_datastore_client_v1.py create mode 100644 packages/google-cloud-datastore/tests/unit/test__gapic.py delete mode 100644 packages/google-cloud-datastore/tests/unit/test__gax.py diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py new file mode 100644 index 000000000000..de92e20469fd --- /dev/null +++ b/packages/google-cloud-datastore/docs/conf.py @@ -0,0 +1,310 @@ +# -*- coding: utf-8 -*- +# +# google-cloud-datastore documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath('..')) + +__version__ = '0.90.4' + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.intersphinx', + 'sphinx.ext.coverage', + 'sphinx.ext.napoleon', + 'sphinx.ext.viewcode', +] + +# autodoc/autosummary flags +autoclass_content = 'both' +autodoc_default_flags = ['members'] +autosummary_generate = True + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'google-cloud-datastore' +copyright = u'2017, Google' +author = u'Google APIs' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = '.'.join(release.split('.')[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = [] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +#html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +#html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +#html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'google-cloud-datastore-doc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + #'preamble': '', + + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'google-cloud-datastore.tex', + u'google-cloud-datastore Documentation', author, 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [(master_doc, 'google-cloud-datastore', + u'google-cloud-datastore Documentation', [author], 1)] + +# If true, show URL addresses after external links. +#man_show_urls = False + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'google-cloud-datastore', + u'google-cloud-datastore Documentation', author, 'google-cloud-datastore', + 'GAPIC library for the {metadata.shortName} v1 service', 'APIs'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + 'python': ('http://python.readthedocs.org/en/latest/', None), + 'gax': ('https://gax-python.readthedocs.org/en/latest/', None), +} + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-datastore/docs/gapic/v1/api.rst b/packages/google-cloud-datastore/docs/gapic/v1/api.rst new file mode 100644 index 000000000000..9d987a96747a --- /dev/null +++ b/packages/google-cloud-datastore/docs/gapic/v1/api.rst @@ -0,0 +1,6 @@ +Client for Google Cloud Datastore API +===================================== + +.. automodule:: google.cloud.datastore_v1 + :members: + :inherited-members: \ No newline at end of file diff --git a/packages/google-cloud-datastore/docs/gapic/v1/types.rst b/packages/google-cloud-datastore/docs/gapic/v1/types.rst new file mode 100644 index 000000000000..840085452eb4 --- /dev/null +++ b/packages/google-cloud-datastore/docs/gapic/v1/types.rst @@ -0,0 +1,5 @@ +Types for Google Cloud Datastore API Client +=========================================== + +.. automodule:: google.cloud.datastore_v1.types + :members: \ No newline at end of file diff --git a/packages/google-cloud-datastore/docs/index.rst b/packages/google-cloud-datastore/docs/index.rst new file mode 100644 index 000000000000..08466ac45a67 --- /dev/null +++ b/packages/google-cloud-datastore/docs/index.rst @@ -0,0 +1,84 @@ +Python Client for Google Cloud Datastore API (`Beta`_) +====================================================== + +`Google Cloud Datastore API`_: Accesses the schemaless NoSQL database to provide fully managed, robust, +scalable storage for your application. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. _Beta: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst +.. _Google Cloud Datastore API: https://cloud.google.com/datastore +.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore/usage.html +.. _Product Documentation: https://cloud.google.com/datastore + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Cloud Datastore API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Cloud Datastore API.: https://cloud.google.com/datastore +.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/stable/core/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-datastore + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-datastore + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Google Cloud Datastore API + API to see other available methods on the client. +- Read the `Google Cloud Datastore API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `repository’s main README`_ to see the full list of Cloud + APIs that we cover. + +.. _Google Cloud Datastore API Product documentation: https://cloud.google.com/datastore +.. _repository’s main README: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst + +Api Reference +------------- +.. toctree:: + :maxdepth: 2 + + gapic/v1/api + gapic/v1/types \ No newline at end of file diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py b/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py new file mode 100644 index 000000000000..b56ba5e89c64 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py @@ -0,0 +1,52 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for making API requests via gapic / gRPC.""" + +from google.api_core.gapic_v1 import client_info +from google.cloud.datastore_v1.gapic import datastore_client +from grpc import insecure_channel +import six + +from google.cloud._helpers import make_secure_channel +from google.cloud._http import DEFAULT_USER_AGENT + +from google.cloud.datastore import __version__ + + +def make_datastore_api(client): + """Create an instance of the GAPIC Datastore API. + + :type client: :class:`~google.cloud.datastore.client.Client` + :param client: The client that holds configuration details. + + :rtype: :class:`.datastore.v1.datastore_client.DatastoreClient` + :returns: A datastore API instance with the proper credentials. + """ + parse_result = six.moves.urllib_parse.urlparse( + client._base_url) + host = parse_result.netloc + if parse_result.scheme == 'https': + channel = make_secure_channel( + client._credentials, DEFAULT_USER_AGENT, host) + else: + channel = insecure_channel(host) + + return datastore_client.DatastoreClient( + channel=channel, + client_info=client_info.ClientInfo( + client_library_version=__version__, + gapic_version=__version__, + ), + ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py b/packages/google-cloud-datastore/google/cloud/datastore/_gax.py deleted file mode 100644 index 4eb54eb7681d..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gax.py +++ /dev/null @@ -1,240 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Helpers for making API requests via GAX / gRPC.""" - - -import contextlib -import sys - -from google.api_core.gapic_v1 import client_info -from google.cloud.datastore_v1.gapic import datastore_client -from google.gax.errors import GaxError -from google.gax.grpc import exc_to_code -from google.gax.utils import metrics -from grpc import insecure_channel -from grpc import StatusCode -import six - -from google.cloud._helpers import make_secure_channel -from google.cloud._http import DEFAULT_USER_AGENT -from google.cloud import exceptions - -from google.cloud.datastore import __version__ - - -_METRICS_HEADERS = ( - ('gccl', __version__), -) -_HEADER_STR = metrics.stringify(metrics.fill(_METRICS_HEADERS)) -_GRPC_EXTRA_OPTIONS = ( - ('x-goog-api-client', _HEADER_STR), -) -_GRPC_ERROR_MAPPING = { - StatusCode.UNKNOWN: exceptions.InternalServerError, - StatusCode.INVALID_ARGUMENT: exceptions.BadRequest, - StatusCode.DEADLINE_EXCEEDED: exceptions.GatewayTimeout, - StatusCode.NOT_FOUND: exceptions.NotFound, - StatusCode.ALREADY_EXISTS: exceptions.Conflict, - StatusCode.PERMISSION_DENIED: exceptions.Forbidden, - StatusCode.UNAUTHENTICATED: exceptions.Unauthorized, - StatusCode.RESOURCE_EXHAUSTED: exceptions.TooManyRequests, - StatusCode.FAILED_PRECONDITION: exceptions.PreconditionFailed, - StatusCode.ABORTED: exceptions.Conflict, - StatusCode.OUT_OF_RANGE: exceptions.BadRequest, - StatusCode.UNIMPLEMENTED: exceptions.MethodNotImplemented, - StatusCode.INTERNAL: exceptions.InternalServerError, - StatusCode.UNAVAILABLE: exceptions.ServiceUnavailable, - StatusCode.DATA_LOSS: exceptions.InternalServerError, -} - - -@contextlib.contextmanager -def _catch_remap_gax_error(): - """Remap GAX exceptions that happen in context. - - .. _code.proto: https://github.com/googleapis/googleapis/blob/\ - master/google/rpc/code.proto - - Remaps gRPC exceptions to the classes defined in - :mod:`~google.cloud.exceptions` (according to the description - in `code.proto`_). - """ - try: - yield - except GaxError as exc: - error_code = exc_to_code(exc.cause) - error_class = _GRPC_ERROR_MAPPING.get(error_code) - if error_class is None: - raise - else: - new_exc = error_class(exc.cause.details()) - six.reraise(error_class, new_exc, sys.exc_info()[2]) - - -class GAPICDatastoreAPI(datastore_client.DatastoreClient): - """An API object that sends proto-over-gRPC requests. - - A light wrapper around the parent class, with exception re-mapping - provided (from GaxError to our native errors). - - :type args: tuple - :param args: Positional arguments to pass to constructor. - - :type kwargs: dict - :param kwargs: Keyword arguments to pass to constructor. - """ - - def lookup(self, *args, **kwargs): - """Perform a ``lookup`` request. - - A light wrapper around the the base method from the parent class. - Intended to provide exception re-mapping (from GaxError to our - native errors). - - :type args: tuple - :param args: Positional arguments to pass to base method. - - :type kwargs: dict - :param kwargs: Keyword arguments to pass to base method. - - :rtype: :class:`.datastore_pb2.LookupResponse` - :returns: The returned protobuf response object. - """ - with _catch_remap_gax_error(): - return super(GAPICDatastoreAPI, self).lookup(*args, **kwargs) - - def run_query(self, *args, **kwargs): - """Perform a ``runQuery`` request. - - A light wrapper around the the base method from the parent class. - Intended to provide exception re-mapping (from GaxError to our - native errors). - - :type args: tuple - :param args: Positional arguments to pass to base method. - - :type kwargs: dict - :param kwargs: Keyword arguments to pass to base method. - - :rtype: :class:`.datastore_pb2.RunQueryResponse` - :returns: The returned protobuf response object. - """ - with _catch_remap_gax_error(): - return super(GAPICDatastoreAPI, self).run_query(*args, **kwargs) - - def begin_transaction(self, *args, **kwargs): - """Perform a ``beginTransaction`` request. - - A light wrapper around the the base method from the parent class. - Intended to provide exception re-mapping (from GaxError to our - native errors). - - :type args: tuple - :param args: Positional arguments to pass to base method. - - :type kwargs: dict - :param kwargs: Keyword arguments to pass to base method. - - :rtype: :class:`.datastore_pb2.BeginTransactionResponse` - :returns: The returned protobuf response object. - """ - with _catch_remap_gax_error(): - return super(GAPICDatastoreAPI, self).begin_transaction( - *args, **kwargs) - - def commit(self, *args, **kwargs): - """Perform a ``commit`` request. - - A light wrapper around the the base method from the parent class. - Intended to provide exception re-mapping (from GaxError to our - native errors). - - :type args: tuple - :param args: Positional arguments to pass to base method. - - :type kwargs: dict - :param kwargs: Keyword arguments to pass to base method. - - :rtype: :class:`.datastore_pb2.CommitResponse` - :returns: The returned protobuf response object. - """ - with _catch_remap_gax_error(): - return super(GAPICDatastoreAPI, self).commit(*args, **kwargs) - - def rollback(self, *args, **kwargs): - """Perform a ``rollback`` request. - - A light wrapper around the the base method from the parent class. - Intended to provide exception re-mapping (from GaxError to our - native errors). - - :type args: tuple - :param args: Positional arguments to pass to base method. - - :type kwargs: dict - :param kwargs: Keyword arguments to pass to base method. - - :rtype: :class:`.datastore_pb2.RollbackResponse` - :returns: The returned protobuf response object. - """ - with _catch_remap_gax_error(): - return super(GAPICDatastoreAPI, self).rollback(*args, **kwargs) - - def allocate_ids(self, *args, **kwargs): - """Perform an ``allocateIds`` request. - - A light wrapper around the the base method from the parent class. - Intended to provide exception re-mapping (from GaxError to our - native errors). - - :type args: tuple - :param args: Positional arguments to pass to base method. - - :type kwargs: dict - :param kwargs: Keyword arguments to pass to base method. - - :rtype: :class:`.datastore_pb2.AllocateIdsResponse` - :returns: The returned protobuf response object. - """ - with _catch_remap_gax_error(): - return super(GAPICDatastoreAPI, self).allocate_ids( - *args, **kwargs) - - -def make_datastore_api(client): - """Create an instance of the GAPIC Datastore API. - - :type client: :class:`~google.cloud.datastore.client.Client` - :param client: The client that holds configuration details. - - :rtype: :class:`.datastore.v1.datastore_client.DatastoreClient` - :returns: A datastore API instance with the proper credentials. - """ - parse_result = six.moves.urllib_parse.urlparse( - client._base_url) - host = parse_result.netloc - if parse_result.scheme == 'https': - channel = make_secure_channel( - client._credentials, DEFAULT_USER_AGENT, host) - else: - channel = insecure_channel(host) - - return GAPICDatastoreAPI( - channel=channel, - client_info=client_info.ClientInfo( - client_library_version=__version__, - gapic_version=__version__, - ), - ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index b53e2deb54f3..ac93f8480d6f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -31,7 +31,7 @@ from google.cloud.environment_vars import GCD_HOST try: - from google.cloud.datastore._gax import make_datastore_api + from google.cloud.datastore._gapic import make_datastore_api _HAVE_GRPC = True except ImportError: # pragma: NO COVER make_datastore_api = None @@ -86,7 +86,7 @@ def _extended_lookup(datastore_api, project, key_pbs, :type datastore_api: :class:`google.cloud.datastore._http.HTTPDatastoreAPI` - or :class:`google.cloud.datastore._gax.GAPICDatastoreAPI` + or :class:`google.cloud.datastore_v1.gapic.DatastoreClient` :param datastore_api: The datastore API object used to connect to datastore. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py index 5157e60d4e80..5297ae3121d7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py @@ -1,10 +1,10 @@ -# Copyright 2017, Google LLC All rights reserved. +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -27,4 +27,5 @@ class DatastoreClient(datastore_client.DatastoreClient): __all__ = ( 'enums', 'types', - 'DatastoreClient', ) + 'DatastoreClient', +) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py index 983e30dce902..e747909477cc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py @@ -1,25 +1,16 @@ -# Copyright 2017, Google LLC All rights reserved. +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -# -# EDITING INSTRUCTIONS -# This file was generated from the file -# https://github.com/google/googleapis/blob/master/google/datastore/v1/datastore.proto, -# and updates to that file get reflected here through a refresh process. -# For the short term, the refresh process will only be runnable by Google engineers. -# -# The only allowed edits are to method and file documentation. A 3-way -# merge preserves those additions if the generated source changes. """Accesses the google.datastore.v1 Datastore API.""" import pkg_resources @@ -57,11 +48,12 @@ class DatastoreClient(object): # this service _DEFAULT_SCOPES = ( 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', ) + 'https://www.googleapis.com/auth/datastore', + ) # The name of the interface for this client. This is the key used to find - # method configuration in the client_config dictionary - _INTERFACE_NAME = ('google.datastore.v1.Datastore') + # method configuration in the client_config dictionary. + _INTERFACE_NAME = 'google.datastore.v1.Datastore' def __init__(self, channel=None, @@ -72,81 +64,97 @@ def __init__(self, Args: channel (grpc.Channel): A ``Channel`` instance through - which to make calls. If specified, then the ``credentials`` - argument is ignored. + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. credentials (google.auth.credentials.Credentials): The authorization credentials to attach to requests. These credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - client_config (dict): - A dictionary of call options for each method. If not specified - the default configuration is used. Generally, you only need - to set this if you're developing your own client library. + client_config (dict): A dictionary of call options for each + method. If not specified, the default configuration is used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - 'channel and credentials arguments to {} are mutually ' - 'exclusive.'.format(self.__class__.__name__)) + 'The `channel` and `credentials` arguments to {} are mutually ' + 'exclusive.'.format(self.__class__.__name__), ) + # Create the channel. if channel is None: channel = google.api_core.grpc_helpers.create_channel( self.SERVICE_ADDRESS, credentials=credentials, - scopes=self._DEFAULT_SCOPES) + scopes=self._DEFAULT_SCOPES, + ) + # Create the gRPC stubs. self.datastore_stub = (datastore_pb2.DatastoreStub(channel)) if client_info is None: client_info = ( google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - interface_config = client_config['interfaces'][self._INTERFACE_NAME] + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) method_configs = google.api_core.gapic_v1.config.parse_method_configs( - interface_config) + client_config['interfaces'][self._INTERFACE_NAME], ) + # Write the "inner API call" methods to the class. + # These are wrapped versions of the gRPC stub methods, with retry and + # timeout configuration applied, called by the public methods on + # this class. self._lookup = google.api_core.gapic_v1.method.wrap_method( self.datastore_stub.Lookup, default_retry=method_configs['Lookup'].retry, default_timeout=method_configs['Lookup'].timeout, - client_info=client_info) + client_info=client_info, + ) self._run_query = google.api_core.gapic_v1.method.wrap_method( self.datastore_stub.RunQuery, default_retry=method_configs['RunQuery'].retry, default_timeout=method_configs['RunQuery'].timeout, - client_info=client_info) + client_info=client_info, + ) self._begin_transaction = google.api_core.gapic_v1.method.wrap_method( self.datastore_stub.BeginTransaction, default_retry=method_configs['BeginTransaction'].retry, default_timeout=method_configs['BeginTransaction'].timeout, - client_info=client_info) + client_info=client_info, + ) self._commit = google.api_core.gapic_v1.method.wrap_method( self.datastore_stub.Commit, default_retry=method_configs['Commit'].retry, default_timeout=method_configs['Commit'].timeout, - client_info=client_info) + client_info=client_info, + ) self._rollback = google.api_core.gapic_v1.method.wrap_method( self.datastore_stub.Rollback, default_retry=method_configs['Rollback'].retry, default_timeout=method_configs['Rollback'].timeout, - client_info=client_info) + client_info=client_info, + ) self._allocate_ids = google.api_core.gapic_v1.method.wrap_method( self.datastore_stub.AllocateIds, default_retry=method_configs['AllocateIds'].retry, default_timeout=method_configs['AllocateIds'].timeout, - client_info=client_info) + client_info=client_info, + ) self._reserve_ids = google.api_core.gapic_v1.method.wrap_method( self.datastore_stub.ReserveIds, default_retry=method_configs['ReserveIds'].retry, default_timeout=method_configs['ReserveIds'].timeout, - client_info=client_info) + client_info=client_info, + ) # Service calls def lookup(self, @@ -154,7 +162,8 @@ def lookup(self, keys, read_options=None, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Looks up entities by key. @@ -182,6 +191,8 @@ def lookup(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.datastore_v1.types.LookupResponse` instance. @@ -193,9 +204,16 @@ def lookup(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + if metadata is None: + metadata = [] + metadata = list(metadata) request = datastore_pb2.LookupRequest( - project_id=project_id, keys=keys, read_options=read_options) - return self._lookup(request, retry=retry, timeout=timeout) + project_id=project_id, + keys=keys, + read_options=read_options, + ) + return self._lookup( + request, retry=retry, timeout=timeout, metadata=metadata) def run_query(self, project_id, @@ -204,7 +222,8 @@ def run_query(self, query=None, gql_query=None, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Queries for entities. @@ -241,6 +260,8 @@ def run_query(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.datastore_v1.types.RunQueryResponse` instance. @@ -252,25 +273,32 @@ def run_query(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + if metadata is None: + metadata = [] + metadata = list(metadata) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. google.api_core.protobuf_helpers.check_oneof( query=query, - gql_query=gql_query, ) + gql_query=gql_query, + ) request = datastore_pb2.RunQueryRequest( project_id=project_id, partition_id=partition_id, read_options=read_options, query=query, - gql_query=gql_query) - return self._run_query(request, retry=retry, timeout=timeout) + gql_query=gql_query, + ) + return self._run_query( + request, retry=retry, timeout=timeout, metadata=metadata) def begin_transaction(self, project_id, transaction_options=None, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Begins a new transaction. @@ -294,6 +322,8 @@ def begin_transaction(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.datastore_v1.types.BeginTransactionResponse` instance. @@ -305,9 +335,15 @@ def begin_transaction(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + if metadata is None: + metadata = [] + metadata = list(metadata) request = datastore_pb2.BeginTransactionRequest( - project_id=project_id, transaction_options=transaction_options) - return self._begin_transaction(request, retry=retry, timeout=timeout) + project_id=project_id, + transaction_options=transaction_options, + ) + return self._begin_transaction( + request, retry=retry, timeout=timeout, metadata=metadata) def commit(self, project_id, @@ -315,7 +351,8 @@ def commit(self, mutations, transaction=None, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Commits a transaction, optionally creating, deleting or modifying some entities. @@ -359,6 +396,8 @@ def commit(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.datastore_v1.types.CommitResponse` instance. @@ -370,6 +409,9 @@ def commit(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + if metadata is None: + metadata = [] + metadata = list(metadata) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. google.api_core.protobuf_helpers.check_oneof(transaction=transaction, ) @@ -378,14 +420,17 @@ def commit(self, project_id=project_id, mode=mode, mutations=mutations, - transaction=transaction) - return self._commit(request, retry=retry, timeout=timeout) + transaction=transaction, + ) + return self._commit( + request, retry=retry, timeout=timeout, metadata=metadata) def rollback(self, project_id, transaction, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Rolls back a transaction. @@ -409,6 +454,8 @@ def rollback(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.datastore_v1.types.RollbackResponse` instance. @@ -420,15 +467,22 @@ def rollback(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + if metadata is None: + metadata = [] + metadata = list(metadata) request = datastore_pb2.RollbackRequest( - project_id=project_id, transaction=transaction) - return self._rollback(request, retry=retry, timeout=timeout) + project_id=project_id, + transaction=transaction, + ) + return self._rollback( + request, retry=retry, timeout=timeout, metadata=metadata) def allocate_ids(self, project_id, keys, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. @@ -455,6 +509,8 @@ def allocate_ids(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.datastore_v1.types.AllocateIdsResponse` instance. @@ -466,16 +522,23 @@ def allocate_ids(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + if metadata is None: + metadata = [] + metadata = list(metadata) request = datastore_pb2.AllocateIdsRequest( - project_id=project_id, keys=keys) - return self._allocate_ids(request, retry=retry, timeout=timeout) + project_id=project_id, + keys=keys, + ) + return self._allocate_ids( + request, retry=retry, timeout=timeout, metadata=metadata) def reserve_ids(self, project_id, keys, database_id=None, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Prevents the supplied keys' IDs from being auto-allocated by Cloud Datastore. @@ -503,6 +566,8 @@ def reserve_ids(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.datastore_v1.types.ReserveIdsResponse` instance. @@ -514,6 +579,13 @@ def reserve_ids(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + if metadata is None: + metadata = [] + metadata = list(metadata) request = datastore_pb2.ReserveIdsRequest( - project_id=project_id, keys=keys, database_id=database_id) - return self._reserve_ids(request, retry=retry, timeout=timeout) + project_id=project_id, + keys=keys, + database_id=database_id, + ) + return self._reserve_ids( + request, retry=retry, timeout=timeout, metadata=metadata) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py index 31dc31d8e92f..893c098c92dc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py @@ -1,10 +1,10 @@ -# Copyright 2017, Google LLC All rights reserved. +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py index 688ff0bcfa4d..316bb6cce1eb 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py @@ -22,9 +22,10 @@ name='google/cloud/datastore_v1/proto/datastore.proto', package='google.datastore.v1', syntax='proto3', - serialized_pb=_b('\n/google/cloud/datastore_v1/proto/datastore.proto\x12\x13google.datastore.v1\x1a\x1cgoogle/api/annotations.proto\x1a,google/cloud/datastore_v1/proto/entity.proto\x1a+google/cloud/datastore_v1/proto/query.proto\"\x83\x01\n\rLookupRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x36\n\x0cread_options\x18\x01 \x01(\x0b\x32 .google.datastore.v1.ReadOptions\x12&\n\x04keys\x18\x03 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\xa2\x01\n\x0eLookupResponse\x12\x30\n\x05\x66ound\x18\x01 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x32\n\x07missing\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12*\n\x08\x64\x65\x66\x65rred\x18\x03 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\x84\x02\n\x0fRunQueryRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x36\n\x0cpartition_id\x18\x02 \x01(\x0b\x32 .google.datastore.v1.PartitionId\x12\x36\n\x0cread_options\x18\x01 \x01(\x0b\x32 .google.datastore.v1.ReadOptions\x12+\n\x05query\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.QueryH\x00\x12\x32\n\tgql_query\x18\x07 \x01(\x0b\x32\x1d.google.datastore.v1.GqlQueryH\x00\x42\x0c\n\nquery_type\"s\n\x10RunQueryResponse\x12\x34\n\x05\x62\x61tch\x18\x01 \x01(\x0b\x32%.google.datastore.v1.QueryResultBatch\x12)\n\x05query\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.Query\"s\n\x17\x42\x65ginTransactionRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x44\n\x13transaction_options\x18\n \x01(\x0b\x32\'.google.datastore.v1.TransactionOptions\"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\":\n\x0fRollbackRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\"\x12\n\x10RollbackResponse\"\x83\x02\n\rCommitRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x35\n\x04mode\x18\x05 \x01(\x0e\x32\'.google.datastore.v1.CommitRequest.Mode\x12\x15\n\x0btransaction\x18\x01 \x01(\x0cH\x00\x12\x30\n\tmutations\x18\x06 \x03(\x0b\x32\x1d.google.datastore.v1.Mutation\"F\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\x11\n\rTRANSACTIONAL\x10\x01\x12\x15\n\x11NON_TRANSACTIONAL\x10\x02\x42\x16\n\x14transaction_selector\"f\n\x0e\x43ommitResponse\x12=\n\x10mutation_results\x18\x03 \x03(\x0b\x32#.google.datastore.v1.MutationResult\x12\x15\n\rindex_updates\x18\x04 \x01(\x05\"P\n\x12\x41llocateIdsRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"=\n\x13\x41llocateIdsResponse\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"d\n\x11ReserveIdsRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x13\n\x0b\x64\x61tabase_id\x18\t \x01(\t\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\x14\n\x12ReserveIdsResponse\"\x87\x02\n\x08Mutation\x12-\n\x06insert\x18\x04 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12-\n\x06update\x18\x05 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12-\n\x06upsert\x18\x06 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12*\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x18.google.datastore.v1.KeyH\x00\x12\x16\n\x0c\x62\x61se_version\x18\x08 \x01(\x03H\x01\x42\x0b\n\toperationB\x1d\n\x1b\x63onflict_detection_strategy\"c\n\x0eMutationResult\x12%\n\x03key\x18\x03 \x01(\x0b\x32\x18.google.datastore.v1.Key\x12\x0f\n\x07version\x18\x04 \x01(\x03\x12\x19\n\x11\x63onflict_detected\x18\x05 \x01(\x08\"\xd5\x01\n\x0bReadOptions\x12L\n\x10read_consistency\x18\x01 \x01(\x0e\x32\x30.google.datastore.v1.ReadOptions.ReadConsistencyH\x00\x12\x15\n\x0btransaction\x18\x02 \x01(\x0cH\x00\"M\n\x0fReadConsistency\x12 \n\x1cREAD_CONSISTENCY_UNSPECIFIED\x10\x00\x12\n\n\x06STRONG\x10\x01\x12\x0c\n\x08\x45VENTUAL\x10\x02\x42\x12\n\x10\x63onsistency_type\"\xe3\x01\n\x12TransactionOptions\x12G\n\nread_write\x18\x01 \x01(\x0b\x32\x31.google.datastore.v1.TransactionOptions.ReadWriteH\x00\x12\x45\n\tread_only\x18\x02 \x01(\x0b\x32\x30.google.datastore.v1.TransactionOptions.ReadOnlyH\x00\x1a)\n\tReadWrite\x12\x1c\n\x14previous_transaction\x18\x01 \x01(\x0c\x1a\n\n\x08ReadOnlyB\x06\n\x04mode2\xec\x07\n\tDatastore\x12~\n\x06Lookup\x12\".google.datastore.v1.LookupRequest\x1a#.google.datastore.v1.LookupResponse\"+\x82\xd3\xe4\x93\x02%\" /v1/projects/{project_id}:lookup:\x01*\x12\x86\x01\n\x08RunQuery\x12$.google.datastore.v1.RunQueryRequest\x1a%.google.datastore.v1.RunQueryResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1/projects/{project_id}:runQuery:\x01*\x12\xa6\x01\n\x10\x42\x65ginTransaction\x12,.google.datastore.v1.BeginTransactionRequest\x1a-.google.datastore.v1.BeginTransactionResponse\"5\x82\xd3\xe4\x93\x02/\"*/v1/projects/{project_id}:beginTransaction:\x01*\x12~\n\x06\x43ommit\x12\".google.datastore.v1.CommitRequest\x1a#.google.datastore.v1.CommitResponse\"+\x82\xd3\xe4\x93\x02%\" /v1/projects/{project_id}:commit:\x01*\x12\x86\x01\n\x08Rollback\x12$.google.datastore.v1.RollbackRequest\x1a%.google.datastore.v1.RollbackResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1/projects/{project_id}:rollback:\x01*\x12\x92\x01\n\x0b\x41llocateIds\x12\'.google.datastore.v1.AllocateIdsRequest\x1a(.google.datastore.v1.AllocateIdsResponse\"0\x82\xd3\xe4\x93\x02*\"%/v1/projects/{project_id}:allocateIds:\x01*\x12\x8e\x01\n\nReserveIds\x12&.google.datastore.v1.ReserveIdsRequest\x1a\'.google.datastore.v1.ReserveIdsResponse\"/\x82\xd3\xe4\x93\x02)\"$/v1/projects/{project_id}:reserveIds:\x01*B\x85\x01\n\x17\x63om.google.datastore.v1B\x0e\x44\x61tastoreProtoP\x01Z\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32#.google.datastore.v1.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type\"\xa9\x01\n\x0f\x43ompositeFilter\x12\x39\n\x02op\x18\x01 \x01(\x0e\x32-.google.datastore.v1.CompositeFilter.Operator\x12,\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x1b.google.datastore.v1.Filter\"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\"\xc7\x02\n\x0ePropertyFilter\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x38\n\x02op\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyFilter.Operator\x12)\n\x05value\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.Value\"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b\"\xa5\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12H\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x30.google.datastore.v1.GqlQuery.NamedBindingsEntry\x12\x43\n\x13positional_bindings\x18\x04 \x03(\x0b\x32&.google.datastore.v1.GqlQueryParameter\x1a\\\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.datastore.v1.GqlQueryParameter:\x02\x38\x01\"d\n\x11GqlQueryParameter\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type\"\xde\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12H\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32,.google.datastore.v1.EntityResult.ResultType\x12\x39\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12K\n\x0cmore_results\x18\x05 \x01(\x0e\x32\x35.google.datastore.v1.QueryResultBatch.MoreResultsType\x12\x18\n\x10snapshot_version\x18\x07 \x01(\x03\"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42\x81\x01\n\x17\x63om.google.datastore.v1B\nQueryProtoP\x01Z\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32#.google.datastore.v1.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type\"\xa9\x01\n\x0f\x43ompositeFilter\x12\x39\n\x02op\x18\x01 \x01(\x0e\x32-.google.datastore.v1.CompositeFilter.Operator\x12,\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x1b.google.datastore.v1.Filter\"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\"\xc7\x02\n\x0ePropertyFilter\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x38\n\x02op\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyFilter.Operator\x12)\n\x05value\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.Value\"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b\"\xa5\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12H\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x30.google.datastore.v1.GqlQuery.NamedBindingsEntry\x12\x43\n\x13positional_bindings\x18\x04 \x03(\x0b\x32&.google.datastore.v1.GqlQueryParameter\x1a\\\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.datastore.v1.GqlQueryParameter:\x02\x38\x01\"d\n\x11GqlQueryParameter\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type\"\xde\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12H\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32,.google.datastore.v1.EntityResult.ResultType\x12\x39\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12K\n\x0cmore_results\x18\x05 \x01(\x0e\x32\x35.google.datastore.v1.QueryResultBatch.MoreResultsType\x12\x18\n\x10snapshot_version\x18\x07 \x01(\x03\"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42\x9d\x01\n\x17\x63om.google.datastore.v1B\nQueryProtoP\x01Z= 0.28.0, < 0.29dev', - 'google-api-core >= 0.1.1, < 0.2.0dev', - 'google-auth >= 1.0.2, < 2.0dev', - 'google-gax >= 0.15.15, < 0.16dev', - 'googleapis-common-protos[grpc] >= 1.5.2, < 2.0dev', - 'requests >= 2.18.4, < 3.0dev', + 'google-api-core[grpc] >= 0.1.1, < 0.2.0dev', ] setup( diff --git a/packages/google-cloud-datastore/tests/unit/gapic/v1/test_datastore_client_v1.py b/packages/google-cloud-datastore/tests/unit/gapic/v1/test_datastore_client_v1.py new file mode 100644 index 000000000000..0ac8a955480a --- /dev/null +++ b/packages/google-cloud-datastore/tests/unit/gapic/v1/test_datastore_client_v1.py @@ -0,0 +1,305 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import pytest + +from google.cloud import datastore_v1 +from google.cloud.datastore_v1 import enums +from google.cloud.datastore_v1.proto import datastore_pb2 +from google.cloud.datastore_v1.proto import entity_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, + method, + request_serializer=None, + response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestDatastoreClient(object): + def test_lookup(self): + # Setup Expected Response + expected_response = {} + expected_response = datastore_pb2.LookupResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = datastore_v1.DatastoreClient(channel=channel) + + # Setup Request + project_id = 'projectId-1969970175' + keys = [] + + response = client.lookup(project_id, keys) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = datastore_pb2.LookupRequest( + project_id=project_id, keys=keys) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_lookup_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = datastore_v1.DatastoreClient(channel=channel) + + # Setup request + project_id = 'projectId-1969970175' + keys = [] + + with pytest.raises(CustomException): + client.lookup(project_id, keys) + + def test_run_query(self): + # Setup Expected Response + expected_response = {} + expected_response = datastore_pb2.RunQueryResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = datastore_v1.DatastoreClient(channel=channel) + + # Setup Request + project_id = 'projectId-1969970175' + partition_id = {} + + response = client.run_query(project_id, partition_id) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = datastore_pb2.RunQueryRequest( + project_id=project_id, partition_id=partition_id) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_run_query_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = datastore_v1.DatastoreClient(channel=channel) + + # Setup request + project_id = 'projectId-1969970175' + partition_id = {} + + with pytest.raises(CustomException): + client.run_query(project_id, partition_id) + + def test_begin_transaction(self): + # Setup Expected Response + transaction = b'-34' + expected_response = {'transaction': transaction} + expected_response = datastore_pb2.BeginTransactionResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = datastore_v1.DatastoreClient(channel=channel) + + # Setup Request + project_id = 'projectId-1969970175' + + response = client.begin_transaction(project_id) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = datastore_pb2.BeginTransactionRequest( + project_id=project_id) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_begin_transaction_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = datastore_v1.DatastoreClient(channel=channel) + + # Setup request + project_id = 'projectId-1969970175' + + with pytest.raises(CustomException): + client.begin_transaction(project_id) + + def test_commit(self): + # Setup Expected Response + index_updates = 1425228195 + expected_response = {'index_updates': index_updates} + expected_response = datastore_pb2.CommitResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = datastore_v1.DatastoreClient(channel=channel) + + # Setup Request + project_id = 'projectId-1969970175' + mode = enums.CommitRequest.Mode.MODE_UNSPECIFIED + mutations = [] + + response = client.commit(project_id, mode, mutations) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = datastore_pb2.CommitRequest( + project_id=project_id, mode=mode, mutations=mutations) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_commit_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = datastore_v1.DatastoreClient(channel=channel) + + # Setup request + project_id = 'projectId-1969970175' + mode = enums.CommitRequest.Mode.MODE_UNSPECIFIED + mutations = [] + + with pytest.raises(CustomException): + client.commit(project_id, mode, mutations) + + def test_rollback(self): + # Setup Expected Response + expected_response = {} + expected_response = datastore_pb2.RollbackResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = datastore_v1.DatastoreClient(channel=channel) + + # Setup Request + project_id = 'projectId-1969970175' + transaction = b'-34' + + response = client.rollback(project_id, transaction) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = datastore_pb2.RollbackRequest( + project_id=project_id, transaction=transaction) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_rollback_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = datastore_v1.DatastoreClient(channel=channel) + + # Setup request + project_id = 'projectId-1969970175' + transaction = b'-34' + + with pytest.raises(CustomException): + client.rollback(project_id, transaction) + + def test_allocate_ids(self): + # Setup Expected Response + expected_response = {} + expected_response = datastore_pb2.AllocateIdsResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = datastore_v1.DatastoreClient(channel=channel) + + # Setup Request + project_id = 'projectId-1969970175' + keys = [] + + response = client.allocate_ids(project_id, keys) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = datastore_pb2.AllocateIdsRequest( + project_id=project_id, keys=keys) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_allocate_ids_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = datastore_v1.DatastoreClient(channel=channel) + + # Setup request + project_id = 'projectId-1969970175' + keys = [] + + with pytest.raises(CustomException): + client.allocate_ids(project_id, keys) + + def test_reserve_ids(self): + # Setup Expected Response + expected_response = {} + expected_response = datastore_pb2.ReserveIdsResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = datastore_v1.DatastoreClient(channel=channel) + + # Setup Request + project_id = 'projectId-1969970175' + keys = [] + + response = client.reserve_ids(project_id, keys) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = datastore_pb2.ReserveIdsRequest( + project_id=project_id, keys=keys) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_reserve_ids_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = datastore_v1.DatastoreClient(channel=channel) + + # Setup request + project_id = 'projectId-1969970175' + keys = [] + + with pytest.raises(CustomException): + client.reserve_ids(project_id, keys) diff --git a/packages/google-cloud-datastore/tests/unit/test__gapic.py b/packages/google-cloud-datastore/tests/unit/test__gapic.py new file mode 100644 index 000000000000..f971eef7c4d9 --- /dev/null +++ b/packages/google-cloud-datastore/tests/unit/test__gapic.py @@ -0,0 +1,75 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import mock + +from google.cloud.datastore.client import _HAVE_GRPC + + +@unittest.skipUnless(_HAVE_GRPC, 'No gRPC') +class Test_make_datastore_api(unittest.TestCase): + + def _call_fut(self, client): + from google.cloud.datastore._gapic import make_datastore_api + + return make_datastore_api(client) + + @mock.patch( + 'google.cloud.datastore_v1.gapic.datastore_client.DatastoreClient', + return_value=mock.sentinel.ds_client) + @mock.patch('google.cloud.datastore._gapic.make_secure_channel', + return_value=mock.sentinel.channel) + def test_live_api(self, make_chan, mock_klass): + from google.cloud._http import DEFAULT_USER_AGENT + + base_url = 'https://datastore.googleapis.com:443' + client = mock.Mock( + _base_url=base_url, + _credentials=mock.sentinel.credentials, + spec=['_base_url', '_credentials']) + ds_api = self._call_fut(client) + self.assertIs(ds_api, mock.sentinel.ds_client) + + make_chan.assert_called_once_with( + mock.sentinel.credentials, + DEFAULT_USER_AGENT, + 'datastore.googleapis.com:443') + mock_klass.assert_called_once_with( + channel=mock.sentinel.channel, + client_info=mock.ANY, + ) + + @mock.patch( + 'google.cloud.datastore_v1.gapic.datastore_client.DatastoreClient', + return_value=mock.sentinel.ds_client) + @mock.patch('google.cloud.datastore._gapic.insecure_channel', + return_value=mock.sentinel.channel) + def test_emulator(self, make_chan, mock_klass): + + host = 'localhost:8901' + base_url = 'http://' + host + client = mock.Mock( + _base_url=base_url, + _credentials=mock.sentinel.credentials, + spec=['_base_url', '_credentials']) + ds_api = self._call_fut(client) + self.assertIs(ds_api, mock.sentinel.ds_client) + + make_chan.assert_called_once_with(host) + mock_klass.assert_called_once_with( + channel=mock.sentinel.channel, + client_info=mock.ANY, + ) diff --git a/packages/google-cloud-datastore/tests/unit/test__gax.py b/packages/google-cloud-datastore/tests/unit/test__gax.py deleted file mode 100644 index 9f0896058fe3..000000000000 --- a/packages/google-cloud-datastore/tests/unit/test__gax.py +++ /dev/null @@ -1,280 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - -from google.cloud.datastore.client import _HAVE_GRPC - - -@unittest.skipUnless(_HAVE_GRPC, 'No gRPC') -class Test__catch_remap_gax_error(unittest.TestCase): - - def _call_fut(self): - from google.cloud.datastore._gax import _catch_remap_gax_error - - return _catch_remap_gax_error() - - @staticmethod - def _fake_method(exc, result=None): - if exc is None: - return result - else: - raise exc - - @staticmethod - def _make_rendezvous(status_code, details): - from grpc._channel import _RPCState - from google.cloud.exceptions import GrpcRendezvous - - exc_state = _RPCState((), None, None, status_code, details) - return GrpcRendezvous(exc_state, None, None, None) - - def test_success(self): - expected = object() - with self._call_fut(): - result = self._fake_method(None, expected) - self.assertIs(result, expected) - - def test_non_grpc_err(self): - exc = RuntimeError('Not a gRPC error') - with self.assertRaises(RuntimeError): - with self._call_fut(): - self._fake_method(exc) - - def test_gax_error(self): - from google.gax.errors import GaxError - from grpc import StatusCode - from google.cloud.exceptions import Forbidden - - # First, create low-level GrpcRendezvous exception. - details = 'Some error details.' - cause = self._make_rendezvous(StatusCode.PERMISSION_DENIED, details) - # Then put it into a high-level GaxError. - msg = 'GAX Error content.' - exc = GaxError(msg, cause=cause) - - with self.assertRaises(Forbidden): - with self._call_fut(): - self._fake_method(exc) - - def test_gax_error_not_mapped(self): - from google.gax.errors import GaxError - from grpc import StatusCode - - cause = self._make_rendezvous(StatusCode.CANCELLED, None) - exc = GaxError(None, cause=cause) - - with self.assertRaises(GaxError): - with self._call_fut(): - self._fake_method(exc) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gRPC') -class TestGAPICDatastoreAPI(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.datastore._gax import GAPICDatastoreAPI - - return GAPICDatastoreAPI - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_lookup(self): - from google.cloud.datastore_v1.gapic import datastore_client - - patch1 = mock.patch.object( - datastore_client.DatastoreClient, '__init__', - return_value=None) - patch2 = mock.patch.object(datastore_client.DatastoreClient, 'lookup') - patch3 = mock.patch( - 'google.cloud.datastore._gax._catch_remap_gax_error') - - with patch1 as mock_constructor: - ds_api = self._make_one() - mock_constructor.assert_called_once_with() - with patch2 as mock_lookup: - with patch3 as mock_catch_rendezvous: - mock_catch_rendezvous.assert_not_called() - ds_api.lookup(None, True, bb='cc') - mock_lookup.assert_called_once_with(None, True, bb='cc') - mock_catch_rendezvous.assert_called_once_with() - - def test_run_query(self): - from google.cloud.datastore_v1.gapic import datastore_client - - patch1 = mock.patch.object( - datastore_client.DatastoreClient, '__init__', - return_value=None) - patch2 = mock.patch.object( - datastore_client.DatastoreClient, 'run_query') - patch3 = mock.patch( - 'google.cloud.datastore._gax._catch_remap_gax_error') - - with patch1 as mock_constructor: - ds_api = self._make_one() - mock_constructor.assert_called_once_with() - with patch2 as mock_run_query: - with patch3 as mock_catch_rendezvous: - mock_catch_rendezvous.assert_not_called() - ds_api.run_query('47a', none=None) - mock_run_query.assert_called_once_with('47a', none=None) - mock_catch_rendezvous.assert_called_once_with() - - def test_begin_transaction(self): - from google.cloud.datastore_v1.gapic import datastore_client - - patch1 = mock.patch.object( - datastore_client.DatastoreClient, '__init__', - return_value=None) - patch2 = mock.patch.object( - datastore_client.DatastoreClient, 'begin_transaction') - patch3 = mock.patch( - 'google.cloud.datastore._gax._catch_remap_gax_error') - - with patch1 as mock_constructor: - ds_api = self._make_one() - mock_constructor.assert_called_once_with() - with patch2 as mock_begin_transaction: - with patch3 as mock_catch_rendezvous: - mock_catch_rendezvous.assert_not_called() - ds_api.begin_transaction('a', 'b', [], key='kei') - mock_begin_transaction.assert_called_once_with( - 'a', 'b', [], key='kei') - mock_catch_rendezvous.assert_called_once_with() - - def test_commit(self): - from google.cloud.datastore_v1.gapic import datastore_client - - patch1 = mock.patch.object( - datastore_client.DatastoreClient, '__init__', - return_value=None) - patch2 = mock.patch.object(datastore_client.DatastoreClient, 'commit') - patch3 = mock.patch( - 'google.cloud.datastore._gax._catch_remap_gax_error') - - with patch1 as mock_constructor: - ds_api = self._make_one() - mock_constructor.assert_called_once_with() - with patch2 as mock_commit: - with patch3 as mock_catch_rendezvous: - mock_catch_rendezvous.assert_not_called() - ds_api.commit(1, 2, a=3) - mock_commit.assert_called_once_with(1, 2, a=3) - mock_catch_rendezvous.assert_called_once_with() - - def test_rollback(self): - from google.cloud.datastore_v1.gapic import datastore_client - - patch1 = mock.patch.object( - datastore_client.DatastoreClient, '__init__', - return_value=None) - patch2 = mock.patch.object( - datastore_client.DatastoreClient, 'rollback') - patch3 = mock.patch( - 'google.cloud.datastore._gax._catch_remap_gax_error') - - with patch1 as mock_constructor: - ds_api = self._make_one() - mock_constructor.assert_called_once_with() - with patch2 as mock_rollback: - with patch3 as mock_catch_rendezvous: - mock_catch_rendezvous.assert_not_called() - ds_api.rollback(11, 12, arp='marp') - mock_rollback.assert_called_once_with(11, 12, arp='marp') - mock_catch_rendezvous.assert_called_once_with() - - def test_allocate_ids(self): - from google.cloud.datastore_v1.gapic import datastore_client - - patch1 = mock.patch.object( - datastore_client.DatastoreClient, '__init__', - return_value=None) - patch2 = mock.patch.object( - datastore_client.DatastoreClient, 'allocate_ids') - patch3 = mock.patch( - 'google.cloud.datastore._gax._catch_remap_gax_error') - - with patch1 as mock_constructor: - ds_api = self._make_one() - mock_constructor.assert_called_once_with() - with patch2 as mock_allocate_ids: - with patch3 as mock_catch_rendezvous: - mock_catch_rendezvous.assert_not_called() - ds_api.allocate_ids( - 'hey', 'bai', bye=(47, 4), shy={'a': 4}) - mock_allocate_ids.assert_called_once_with( - 'hey', 'bai', bye=(47, 4), shy={'a': 4}) - mock_catch_rendezvous.assert_called_once_with() - - -@unittest.skipUnless(_HAVE_GRPC, 'No gRPC') -class Test_make_datastore_api(unittest.TestCase): - - def _call_fut(self, client): - from google.cloud.datastore._gax import make_datastore_api - - return make_datastore_api(client) - - @mock.patch( - 'google.cloud.datastore._gax.GAPICDatastoreAPI', - return_value=mock.sentinel.ds_client) - @mock.patch('google.cloud.datastore._gax.make_secure_channel', - return_value=mock.sentinel.channel) - def test_live_api(self, make_chan, mock_klass): - from google.cloud.datastore_v1.gapic import datastore_client - from google.cloud._http import DEFAULT_USER_AGENT - from google.cloud.datastore import __version__ - - host = datastore_client.DatastoreClient.SERVICE_ADDRESS - base_url = 'https://' + host - client = mock.Mock( - _base_url=base_url, - _credentials=mock.sentinel.credentials, - spec=['_base_url', '_credentials']) - ds_api = self._call_fut(client) - self.assertIs(ds_api, mock.sentinel.ds_client) - - make_chan.assert_called_once_with( - mock.sentinel.credentials, DEFAULT_USER_AGENT, host) - mock_klass.assert_called_once_with( - channel=mock.sentinel.channel, - client_info=mock.ANY, - ) - - @mock.patch( - 'google.cloud.datastore._gax.GAPICDatastoreAPI', - return_value=mock.sentinel.ds_client) - @mock.patch('google.cloud.datastore._gax.insecure_channel', - return_value=mock.sentinel.channel) - def test_emulator(self, make_chan, mock_klass): - from google.cloud.datastore import __version__ - - host = 'localhost:8901' - base_url = 'http://' + host - client = mock.Mock( - _base_url=base_url, - _credentials=mock.sentinel.credentials, - spec=['_base_url', '_credentials']) - ds_api = self._call_fut(client) - self.assertIs(ds_api, mock.sentinel.ds_client) - - make_chan.assert_called_once_with(host) - mock_klass.assert_called_once_with( - channel=mock.sentinel.channel, - client_info=mock.ANY, - ) diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 949753e75f3b..e43a61022ab6 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -232,7 +232,7 @@ def test_constructor_gcd_host(self): project=project, credentials=creds, _http=http) self.assertEqual(client._base_url, 'http://' + host) - def test__datastore_api_property_gax(self): + def test__datastore_api_property_gapic(self): client = self._make_one( project='prahj-ekt', credentials=_make_credentials(), _http=object(), _use_grpc=True) From d8311b055e17e2c1c8198faa6479257e9c46597b Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 14 Feb 2018 14:51:00 -0800 Subject: [PATCH 161/611] Release datastore-1.5.0 (#4883) --- packages/google-cloud-datastore/CHANGELOG.md | 27 ++++++++++++++++++++ packages/google-cloud-datastore/setup.py | 2 +- 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 08c6d71efd4a..f75f5f358292 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,33 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## 1.5.0 + +### Interface additions + +- Added `Entity.id` property (#4640) +- Added optional `location_prefix` kwarg in `to_legacy_urlsafe` (#4635) +- Added support for transaction options (#4357) +- Added the ability to specify read consistency (#4343, #4376) + +### Implementation changes + +- The underlying autogenerated code was rengereated to pick up new features and bugfixes. (#4348, #4877) +- Updated the HTTP implementation to match the gRPC implementation. (#4388) +- Set `next_page_token` to `None` if there are no more results (#4349) + +### Documentation + +- Entity doc consistency (#4641) +- Fixing "Fore" -> "For" typo in README docs. (#4317) + +### Testing + +- Update datastore doctests to reflect change in cursor behavior. (#4382) +- Making a `nox -s default` session for all packages. (#4324) +- Shorten test names (#4321) + + ## 1.4.0 ### Interface changes / additions diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 957553f8a27c..1b95f6a5a121 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -57,7 +57,7 @@ setup( name='google-cloud-datastore', - version='1.4.1.dev1', + version='1.5.0', description='Python Client for Google Cloud Datastore', long_description=README, namespace_packages=[ From 3cff86a4818121ac9c571cc00c18e0b2ef1ff89f Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 20 Feb 2018 17:08:12 -0500 Subject: [PATCH 162/611] Pass '*session.posargs' through on command line for system tests. (#4904) --- packages/google-cloud-datastore/nox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py index 9faac3e7115a..311a47f8729f 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/nox.py @@ -89,7 +89,7 @@ def system(session, py): session.install('.') # Run py.test against the system tests. - session.run('py.test', '--quiet', 'tests/system') + session.run('py.test', '--quiet', 'tests/system', *session.posargs) @nox.session From 44f59ad1d97b5dd4b55a0e710e0ad9778f6d4a8d Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 21 Feb 2018 16:16:23 -0500 Subject: [PATCH 163/611] Exercise datastore query result paging (#4905) Add system test which attempts to reproduce #4264. --- .../tests/system/test_system.py | 17 ++++++++++++ .../tests/system/utils/clear_datastore.py | 1 + .../tests/system/utils/populate_datastore.py | 27 +++++++++++++++++++ 3 files changed, 45 insertions(+) diff --git a/packages/google-cloud-datastore/tests/system/test_system.py b/packages/google-cloud-datastore/tests/system/test_system.py index 3ab7295f50c4..7ac3f2f36c1a 100644 --- a/packages/google-cloud-datastore/tests/system/test_system.py +++ b/packages/google-cloud-datastore/tests/system/test_system.py @@ -364,6 +364,23 @@ def test_projection_query(self): sansa_dict = dict(sansa_entity) self.assertEqual(sansa_dict, {'name': 'Sansa', 'family': 'Stark'}) + def test_query_paginate_simple(self): + + # See issue #4264 + page_query = self.CLIENT.query(kind='uuid_key') + iterator = page_query.fetch() + + seen = set() + page_count = 0 + for page in iterator.pages: + page_count += 1 + for entity in page: + uuid_str = entity.key.name + self.assertNotIn(uuid_str, seen, uuid_str) + seen.add(uuid_str) + + self.assertTrue(page_count > 1) + def test_query_paginate_with_offset(self): page_query = self._base_query() page_query.order = 'appearances' diff --git a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py index fb8d50b2ade6..3a09bff19898 100644 --- a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py @@ -30,6 +30,7 @@ 'Kind', 'Person', 'Post', + 'uuid_key', ) TRANSACTION_MAX_GROUPS = 5 diff --git a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py index 27a31caf1337..29223fc9c5b6 100644 --- a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py @@ -103,5 +103,32 @@ def add_characters(client=None): character['family'])) +def add_uid_keys(): + with self.CLIENT.batch() as batch: + for index in range(1000): + uid = str(uuid.uuid4()) + key = self.CLIENT.key('uuid_key', uid) + entity = datastore.Entity(key=key) + batch.put(entity) + + +def add_uid_keys(): + num_batches = 2 + batch_size = 500 + + keys = [] + for batch_num in range(num_batches): + with self.CLIENT.batch() as batch: + for seq_no in range(batch_size): + uid = str(uuid.uuid4()) + key = self.CLIENT.key('uuid_key', uid) + keys.append(key) + entity = datastore.Entity(key=key) + entity['batch_num'] = batch_num + entity['seq_no'] = seq_no + batch.put(entity) + + if __name__ == '__main__': add_characters() + add_uid_keys() From 28f77b680e39d58bfaed4a00b52aab8a973d6a73 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 22 Feb 2018 10:28:50 -0800 Subject: [PATCH 164/611] Normalize all setup.py files (#4909) --- packages/google-cloud-datastore/setup.py | 99 ++++++++++++++---------- 1 file changed, 59 insertions(+), 40 deletions(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 1b95f6a5a121..e6a65339b790 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google LLC +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,59 +12,78 @@ # See the License for the specific language governing permissions and # limitations under the License. +import io import os -from setuptools import find_packages -from setuptools import setup +import setuptools -PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) +# Package metadata. + +name = 'google-cloud-datastore' +description = 'Google Cloud Datastore API client library' +version = '1.5.0' +# Should be one of: +# 'Development Status :: 3 - Alpha' +# 'Development Status :: 4 - Beta' +# 'Development Status :: 5 - Stable' +release_status = 'Development Status :: 5 - Production/Stable' +dependencies = [ + 'google-cloud-core<0.29dev,>=0.28.0', + 'google-api-core[grpc]<0.2.0dev,>=0.1.1', +] +extras = { +} + + +# Setup boilerplate below this line. + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +# Only include packages under the 'google' namespace. Do not include tests, +# benchmarks, etc. +packages = [ + package for package in setuptools.find_packages() + if package.startswith('google')] + +# Determine which namespaces are needed. +namespaces = ['google'] +if 'google.cloud' in packages: + namespaces.append('google.cloud') -with open(os.path.join(PACKAGE_ROOT, 'README.rst')) as file_obj: - README = file_obj.read() -# NOTE: This is duplicated throughout and we should try to -# consolidate. -SETUP_BASE = { - 'author': 'Google Cloud Platform', - 'author_email': 'googleapis-publisher@google.com', - 'scripts': [], - 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', - 'license': 'Apache 2.0', - 'platforms': 'Posix; MacOS X; Windows', - 'include_package_data': True, - 'zip_safe': False, - 'classifiers': [ - 'Development Status :: 5 - Production/Stable', +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author='Google LLC', + author_email='googleapis-packages@google.com', + license='Apache 2.0', + url='https://github.com/GoogleCloudPlatform/google-cloud-python', + classifiers=[ + release_status, 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', - 'Operating System :: OS Independent', + 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', + 'Operating System :: OS Independent', 'Topic :: Internet', ], -} - - -REQUIREMENTS = [ - 'google-cloud-core >= 0.28.0, < 0.29dev', - 'google-api-core[grpc] >= 0.1.1, < 0.2.0dev', -] - -setup( - name='google-cloud-datastore', - version='1.5.0', - description='Python Client for Google Cloud Datastore', - long_description=README, - namespace_packages=[ - 'google', - 'google.cloud', - ], - packages=find_packages(exclude=('tests*',)), - install_requires=REQUIREMENTS, - **SETUP_BASE + platforms='Posix; MacOS X; Windows', + packages=packages, + namespace_packages=namespaces, + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, ) From a67525a20e6bc9884af2c73dcb19f6939e4b5739 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 22 Feb 2018 16:15:04 -0500 Subject: [PATCH 165/611] Don't check 'exclude_from_indexes' for empty lists. (#4915) Closes #3152. --- .../google/cloud/datastore/helpers.py | 2 +- .../tests/unit/test_helpers.py | 22 +++++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index f3838668fe3d..964eb2a37204 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -134,7 +134,7 @@ def entity_from_protobuf(pb): # Check if ``value_pb`` was excluded from index. Lists need to be # special-cased and we require all ``exclude_from_indexes`` values # in a list agree. - if is_list: + if is_list and len(value) > 0: exclude_values = set(value_pb.exclude_from_indexes for value_pb in value_pb.array_value.values) if len(exclude_values) != 1: diff --git a/packages/google-cloud-datastore/tests/unit/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py index 3624665a2a05..91a693455b7d 100644 --- a/packages/google-cloud-datastore/tests/unit/test_helpers.py +++ b/packages/google-cloud-datastore/tests/unit/test_helpers.py @@ -191,6 +191,28 @@ def test_nested_entity_no_key(self): self.assertEqual(len(inside_entity), 1) self.assertEqual(inside_entity[INSIDE_NAME], INSIDE_VALUE) + def test_index_mismatch_ignores_empty_list(self): + from google.cloud.datastore_v1.proto import entity_pb2 + + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _ID = 1234 + + array_val_pb = entity_pb2.Value( + array_value=entity_pb2.ArrayValue(values=[])) + + entity_pb = entity_pb2.Entity( + properties={ + 'baz': array_val_pb, + }, + ) + entity_pb.key.partition_id.project_id = _PROJECT + entity_pb.key.path.add(kind=_KIND, id=_ID) + + entity = self._call_fut(entity_pb) + entity_dict = dict(entity) + self.assertEqual(entity_dict['baz'], []) + class Test_entity_to_protobuf(unittest.TestCase): From 75d1d75337af24934d66f7a87a25dff3d9f31faa Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 23 Feb 2018 16:34:37 -0800 Subject: [PATCH 166/611] Re-enable lint for tests, remove usage of pylint (#4921) --- packages/google-cloud-datastore/.flake8 | 7 ++++--- packages/google-cloud-datastore/nox.py | 11 ++--------- .../google-cloud-datastore/tests/doctests.py | 2 -- .../tests/system/test_system.py | 3 +-- .../tests/system/utils/populate_datastore.py | 18 +++++++----------- .../tests/unit/test_client.py | 10 +++++----- .../tests/unit/test_entity.py | 1 - .../tests/unit/test_key.py | 3 ++- 8 files changed, 21 insertions(+), 34 deletions(-) diff --git a/packages/google-cloud-datastore/.flake8 b/packages/google-cloud-datastore/.flake8 index 2feb7fefea2a..1f44a90f8195 100644 --- a/packages/google-cloud-datastore/.flake8 +++ b/packages/google-cloud-datastore/.flake8 @@ -1,8 +1,9 @@ [flake8] exclude = - # Datastore includes generated code in the manual layer; - # do not lint this. - google/cloud/datastore/_app_engine_key_pb2.py, + # Exclude generated code. + **/proto/** + **/gapic/** + *_pb2.py # Standard linting exemptions. __pycache__, diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py index 311a47f8729f..958830c70e56 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/nox.py @@ -124,16 +124,9 @@ def lint(session): serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) + session.install('flake8') session.install('.') - session.run('flake8', 'google/cloud/datastore') - session.run( - 'gcp-devrel-py-tools', 'run-pylint', - '--config', 'pylint.config.py', - '--library-filesets', 'google', - '--test-filesets', 'tests', - # Temporarily allow this to fail. - success_codes=range(0, 100)) + session.run('flake8', 'google', 'tests') @nox.session diff --git a/packages/google-cloud-datastore/tests/doctests.py b/packages/google-cloud-datastore/tests/doctests.py index 2f93bae29d54..4901d8de538f 100644 --- a/packages/google-cloud-datastore/tests/doctests.py +++ b/packages/google-cloud-datastore/tests/doctests.py @@ -19,8 +19,6 @@ import six -import sphinx - from google.cloud import datastore diff --git a/packages/google-cloud-datastore/tests/system/test_system.py b/packages/google-cloud-datastore/tests/system/test_system.py index 7ac3f2f36c1a..4532cd3a9737 100644 --- a/packages/google-cloud-datastore/tests/system/test_system.py +++ b/packages/google-cloud-datastore/tests/system/test_system.py @@ -22,7 +22,6 @@ from google.cloud._helpers import UTC from google.cloud import datastore from google.cloud.datastore.helpers import GeoPoint -from google.cloud.datastore_v1 import types from google.cloud.environment_vars import GCD_DATASET from google.cloud.exceptions import Conflict @@ -366,7 +365,7 @@ def test_projection_query(self): def test_query_paginate_simple(self): - # See issue #4264 + # See issue #4264 page_query = self.CLIENT.query(kind='uuid_key') iterator = page_query.fetch() diff --git a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py index 29223fc9c5b6..636b9a4af0cd 100644 --- a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py @@ -18,6 +18,7 @@ from __future__ import print_function import os +import uuid import six @@ -103,25 +104,20 @@ def add_characters(client=None): character['family'])) -def add_uid_keys(): - with self.CLIENT.batch() as batch: - for index in range(1000): - uid = str(uuid.uuid4()) - key = self.CLIENT.key('uuid_key', uid) - entity = datastore.Entity(key=key) - batch.put(entity) - +def add_uid_keys(client): + if client is None: + # Get a client that uses the test dataset. + client = datastore.Client() -def add_uid_keys(): num_batches = 2 batch_size = 500 keys = [] for batch_num in range(num_batches): - with self.CLIENT.batch() as batch: + with client.batch() as batch: for seq_no in range(batch_size): uid = str(uuid.uuid4()) - key = self.CLIENT.key('uuid_key', uid) + key = client.key('uuid_key', uid) keys.append(key) entity = datastore.Entity(key=key) entity['batch_num'] = batch_num diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index e43a61022ab6..d3c4ea34d240 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -935,15 +935,15 @@ def test_transaction_defaults(self): mock_klass.assert_called_once_with(client) def test_read_only_transaction_defaults(self): - from google.cloud.datastore.transaction import Transaction from google.cloud.datastore_v1.types import TransactionOptions creds = _make_credentials() client = self._make_one(credentials=creds) xact = client.transaction(read_only=True) - self.assertEqual(xact._options, - TransactionOptions( - read_only=TransactionOptions.ReadOnly() - ) + self.assertEqual( + xact._options, + TransactionOptions( + read_only=TransactionOptions.ReadOnly() + ) ) self.assertFalse(xact._options.HasField("read_write")) self.assertTrue(xact._options.HasField("read_only")) diff --git a/packages/google-cloud-datastore/tests/unit/test_entity.py b/packages/google-cloud-datastore/tests/unit/test_entity.py index 6157f5075ea6..37faec861e02 100644 --- a/packages/google-cloud-datastore/tests/unit/test_entity.py +++ b/packages/google-cloud-datastore/tests/unit/test_entity.py @@ -200,7 +200,6 @@ def test_id(self): self.assertEqual(entity.id, _ID) def test_id_none(self): - from google.cloud.datastore.key import Key entity = self._make_one(key=None) self.assertEqual(entity.id, None) diff --git a/packages/google-cloud-datastore/tests/unit/test_key.py b/packages/google-cloud-datastore/tests/unit/test_key.py index ddd04d161967..db66be08e065 100644 --- a/packages/google-cloud-datastore/tests/unit/test_key.py +++ b/packages/google-cloud-datastore/tests/unit/test_key.py @@ -709,7 +709,8 @@ def test_two_pairs(self): kind2 = 'child' name2 = 'naem' - dict_path = [{'kind': kind1, 'id': id1}, {'kind': kind2, 'name': name2}] + dict_path = [ + {'kind': kind1, 'id': id1}, {'kind': kind2, 'name': name2}] path_pb = self._call_fut(dict_path) element_pb1 = _make_element_pb(type=kind1, id=id1) From 5814ae8959782b2b50413e3340d72280f4afdbd2 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 26 Feb 2018 14:24:04 -0800 Subject: [PATCH 167/611] Install local dependencies when running lint (#4936) --- packages/google-cloud-datastore/nox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py index 958830c70e56..1c825175a3bc 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/nox.py @@ -124,7 +124,7 @@ def lint(session): serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8') + session.install('flake8', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google', 'tests') From 19dbc1d139a5014284690c4a384e70aa1adbd868 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 27 Feb 2018 10:50:49 -0800 Subject: [PATCH 168/611] Make `api_core.page_iterator.PageIterator.item_to_value` public --- packages/google-cloud-datastore/google/cloud/datastore/query.py | 2 +- packages/google-cloud-datastore/setup.py | 2 +- packages/google-cloud-datastore/tests/unit/test_query.py | 2 -- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 5d968562f421..6cb0810cd6d2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -515,7 +515,7 @@ def _next_page(self): query=query_pb, ) entity_pbs = self._process_query_results(response_pb) - return page_iterator.Page(self, entity_pbs, self._item_to_value) + return page_iterator.Page(self, entity_pbs, self.item_to_value) def _pb_from_query(query): diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index e6a65339b790..24a48e91cf01 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -30,7 +30,7 @@ release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ 'google-cloud-core<0.29dev,>=0.28.0', - 'google-api-core[grpc]<0.2.0dev,>=0.1.1', + 'google-api-core[grpc]<0.2.0dev,>=0.1.5.dev1', ] extras = { } diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 70945895ba99..3a7aa9300a56 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -361,7 +361,6 @@ def test_constructor_defaults(self): self.assertFalse(iterator._started) self.assertIs(iterator.client, client) - self.assertIsNotNone(iterator._item_to_value) self.assertIsNone(iterator.max_results) self.assertEqual(iterator.page_number, 0) self.assertIsNone(iterator.next_page_token,) @@ -384,7 +383,6 @@ def test_constructor_explicit(self): self.assertFalse(iterator._started) self.assertIs(iterator.client, client) - self.assertIsNotNone(iterator._item_to_value) self.assertEqual(iterator.max_results, limit) self.assertEqual(iterator.page_number, 0) self.assertEqual(iterator.next_page_token, start_cursor) From 47a21d1fa73f0582ea4af7d694355465ca4e5c67 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 27 Feb 2018 11:17:14 -0800 Subject: [PATCH 169/611] Update dependency range for api-core to include v1.0.0 releases (#4944) --- packages/google-cloud-datastore/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 24a48e91cf01..3df20defb90a 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -30,7 +30,7 @@ release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ 'google-cloud-core<0.29dev,>=0.28.0', - 'google-api-core[grpc]<0.2.0dev,>=0.1.5.dev1', + 'google-api-core[grpc]<2.0.0dev,>=0.1.5.dev1', ] extras = { } From 33ad6a5877a08eca91bf6508644a64655721a721 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 27 Feb 2018 13:45:05 -0800 Subject: [PATCH 170/611] Update minimum api-core version to 1.0.0 for Datastore, BigQuery, Trace, Logging, and Spanner (#4946) --- packages/google-cloud-datastore/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 3df20defb90a..667814e2851c 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -30,7 +30,7 @@ release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ 'google-cloud-core<0.29dev,>=0.28.0', - 'google-api-core[grpc]<2.0.0dev,>=0.1.5.dev1', + 'google-api-core[grpc]<2.0.0dev,>=1.0.0', ] extras = { } From 84747b5ac22b3faac1b3cfebe55c735d1c650662 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 28 Feb 2018 09:03:26 -0800 Subject: [PATCH 171/611] Release datastore 1.6.0 (#4955) --- packages/google-cloud-datastore/CHANGELOG.md | 18 ++++++++++++++++++ packages/google-cloud-datastore/setup.py | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index f75f5f358292..4c47905a4a53 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,24 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## 1.6.0 + +### Implementation changes + +- Don't check 'exclude_from_indexes' for empty lists. (#4915) + +### Dependencies + +- The minimum version for `google-api-core` has been updated to version 1.0.0. This may cause some incompatibility with older google-cloud libraries, you will need to update those libraries if you have a dependency conflict. (#4944, #4946) + +### Testing and internal changes + +- Install local dependencies when running lint (#4936) +- Re-enable lint for tests, remove usage of pylint (#4921) +- Normalize all setup.py files (#4909) +- Exercise datastore query result paging (#4905) +- Pass '*session.posargs' through on command line for system tests. (#4904) + ## 1.5.0 ### Interface additions diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 667814e2851c..1b3965b79f65 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-datastore' description = 'Google Cloud Datastore API client library' -version = '1.5.0' +version = '1.6.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From d49fcaa8e97103fb45f840babeee242089e80093 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 15 Mar 2018 08:52:22 -0700 Subject: [PATCH 172/611] Fix bad trove classifier --- packages/google-cloud-datastore/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 1b3965b79f65..139aae2e6852 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -26,7 +26,7 @@ # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' -# 'Development Status :: 5 - Stable' +# 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ 'google-cloud-core<0.29dev,>=0.28.0', From 5b0db7af7e9826b5584df383cff6c12ece53ef1b Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 30 Apr 2018 12:43:11 -0400 Subject: [PATCH 173/611] Add example showing explicit unicode for text values in entities. (#5263) Closes #5262. --- .../google-cloud-datastore/google/cloud/datastore/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py index 12ff017e6e40..53494f9f8785 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py @@ -25,6 +25,7 @@ >>> key >>> entity = datastore.Entity(key) + >>> entity['question'] = u'Life, universe?' # Explicit unicode for text >>> entity['answer'] = 42 >>> entity From eee9ac63a67115e1ddee454588c676c67765e4fd Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Fri, 4 May 2018 09:01:24 -0700 Subject: [PATCH 174/611] Add Test runs for Python 3.7 and remove 3.4 (#5295) * remove 3.4 from unit test runs * add 3.7 to most packages. PubSub, Monitoring, BigQuery not enabled * Fix #5292 by draining queue in a way compatible with SimpleQueue and Queue --- packages/google-cloud-datastore/nox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py index 1c825175a3bc..f6a123835a5f 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/nox.py @@ -54,7 +54,7 @@ def default(session): @nox.session -@nox.parametrize('py', ['2.7', '3.4', '3.5', '3.6']) +@nox.parametrize('py', ['2.7', '3.5', '3.6', '3.7']) def unit(session, py): """Run the unit test suite.""" From ec32d900cec88fc8fbc1ec542b1692d4dc373ee4 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 16 May 2018 10:14:30 -0700 Subject: [PATCH 175/611] Modify system tests to use prerelease versions of grpcio (#5304) --- .../google-cloud-datastore/google/cloud/datastore/__init__.py | 2 +- packages/google-cloud-datastore/nox.py | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py index 53494f9f8785..8ceee43f0f74 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py @@ -28,7 +28,7 @@ >>> entity['question'] = u'Life, universe?' # Explicit unicode for text >>> entity['answer'] = 42 >>> entity - + >>> query = client.query(kind='EntityKind') The main concepts with this API are: diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py index f6a123835a5f..e59206d485fc 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/nox.py @@ -82,6 +82,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) From dd81695f3e745e887489d5dd8d45765756df046e Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 22 May 2018 16:03:07 -0400 Subject: [PATCH 176/611] Avoid overwriting '__module__' of messages from shared modules. (#5364) Note that we *are* still overwriting it for messages from modules defined within the current package. See #4715. --- .../google/cloud/datastore_v1/types.py | 44 ++++++++++++------- 1 file changed, 28 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types.py index a07136cd8624..6f9f243b3a84 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types.py @@ -15,30 +15,42 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.datastore_v1.proto import datastore_pb2 -from google.cloud.datastore_v1.proto import entity_pb2 -from google.cloud.datastore_v1.proto import query_pb2 from google.protobuf import descriptor_pb2 from google.protobuf import struct_pb2 from google.protobuf import timestamp_pb2 from google.protobuf import wrappers_pb2 from google.type import latlng_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.datastore_v1.proto import datastore_pb2 +from google.cloud.datastore_v1.proto import entity_pb2 +from google.cloud.datastore_v1.proto import query_pb2 + + +_shared_modules = [ + http_pb2, + descriptor_pb2, + struct_pb2, + timestamp_pb2, + wrappers_pb2, + latlng_pb2, +] + +_local_modules = [ + datastore_pb2, + entity_pb2, + query_pb2, +] + names = [] -for module in ( - http_pb2, - datastore_pb2, - entity_pb2, - query_pb2, - descriptor_pb2, - struct_pb2, - timestamp_pb2, - wrappers_pb2, - latlng_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.datastore_v1.types' setattr(sys.modules[__name__], name, message) From 5aa48f9f72f44f4ecad942debd5e0ff692ed7de1 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 29 May 2018 18:13:51 -0400 Subject: [PATCH 177/611] Attempt again to reproduce #4264. (#5403) * Add utility to populate / clear entities w/ timestamp (int) keys. * Clean up the logic for deleting kinds with more than 500 entities. --- .../tests/system/test_system.py | 19 +++++- .../tests/system/utils/clear_datastore.py | 58 +++++++++---------- .../tests/system/utils/populate_datastore.py | 52 +++++++++++++++-- 3 files changed, 94 insertions(+), 35 deletions(-) diff --git a/packages/google-cloud-datastore/tests/system/test_system.py b/packages/google-cloud-datastore/tests/system/test_system.py index 4532cd3a9737..cadfbc8403f4 100644 --- a/packages/google-cloud-datastore/tests/system/test_system.py +++ b/packages/google-cloud-datastore/tests/system/test_system.py @@ -363,7 +363,7 @@ def test_projection_query(self): sansa_dict = dict(sansa_entity) self.assertEqual(sansa_dict, {'name': 'Sansa', 'family': 'Stark'}) - def test_query_paginate_simple(self): + def test_query_paginate_simple_uuid_keys(self): # See issue #4264 page_query = self.CLIENT.query(kind='uuid_key') @@ -380,6 +380,23 @@ def test_query_paginate_simple(self): self.assertTrue(page_count > 1) + def test_query_paginate_simple_timestamp_keys(self): + + # See issue #4264 + page_query = self.CLIENT.query(kind='timestamp_key') + iterator = page_query.fetch() + + seen = set() + page_count = 0 + for page in iterator.pages: + page_count += 1 + for entity in page: + timestamp = entity.key.id + self.assertNotIn(timestamp, seen, timestamp) + seen.add(timestamp) + + self.assertTrue(page_count > 1) + def test_query_paginate_with_offset(self): page_query = self._base_query() page_query.order = 'appearances' diff --git a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py index 3a09bff19898..5820fbf7d2b1 100644 --- a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py @@ -17,6 +17,7 @@ from __future__ import print_function import os +import sys import six @@ -31,8 +32,10 @@ 'Person', 'Post', 'uuid_key', + 'timestamp_key', ) TRANSACTION_MAX_GROUPS = 5 +MAX_DEL_ENTITIES = 500 def print_func(message): @@ -40,16 +43,6 @@ def print_func(message): print(message) -def fetch_keys(kind, client, fetch_max=FETCH_MAX, query=None, cursor=None): - if query is None: - query = client.query(kind=kind) - query.keys_only() - - iterator = query.fetch(limit=fetch_max, start_cursor=cursor) - page = six.next(iterator.pages) - return query, list(page), iterator.next_page_token - - def get_ancestors(entities): # NOTE: A key will always have at least one path element. key_roots = [entity.key.flat_path[:2] for entity in entities] @@ -57,15 +50,16 @@ def get_ancestors(entities): return list(set(key_roots)) -def remove_kind(kind, client): - results = [] +def delete_chunks(client, results): + while results: + chunk, results = results[:MAX_DEL_ENTITIES], results[MAX_DEL_ENTITIES:] + client.delete_multi([result.key for result in chunk]) - query, curr_results, cursor = fetch_keys(kind, client) - results.extend(curr_results) - while curr_results: - query, curr_results, cursor = fetch_keys( - kind, client, query=query, cursor=cursor) - results.extend(curr_results) + +def remove_kind(kind, client): + query = client.query(kind=kind) + query.keys_only() + results = list(query.fetch()) if not results: return @@ -80,26 +74,32 @@ def remove_kind(kind, client): if len(ancestors) > TRANSACTION_MAX_GROUPS: delete_outside_transaction = True else: - client.delete_multi([result.key for result in results]) + delete_chunks(client, results) if delete_outside_transaction: - client.delete_multi([result.key for result in results]) + delete_chunks(client, results) -def remove_all_entities(client=None): - if client is None: - # Get a client that uses the test dataset. - client = datastore.Client() - for kind in ALL_KINDS: - remove_kind(kind, client) +def main(): + client = datastore.Client() + kinds = sys.argv[1:] + if len(kinds) == 0: + kinds = ALL_KINDS -if __name__ == '__main__': print_func('This command will remove all entities for ' 'the following kinds:') - print_func('\n'.join('- ' + val for val in ALL_KINDS)) + print_func('\n'.join('- ' + val for val in kinds)) response = six.moves.input('Is this OK [y/n]? ') + if response.lower() == 'y': - remove_all_entities() + + for kind in kinds: + remove_kind(kind, client) + else: print_func('Doing nothing.') + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py index 636b9a4af0cd..9dcadd00dfb1 100644 --- a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py @@ -18,6 +18,8 @@ from __future__ import print_function import os +import sys +import time import uuid import six @@ -104,7 +106,7 @@ def add_characters(client=None): character['family'])) -def add_uid_keys(client): +def add_uid_keys(client=None): if client is None: # Get a client that uses the test dataset. client = datastore.Client() @@ -112,19 +114,59 @@ def add_uid_keys(client): num_batches = 2 batch_size = 500 - keys = [] for batch_num in range(num_batches): with client.batch() as batch: for seq_no in range(batch_size): uid = str(uuid.uuid4()) key = client.key('uuid_key', uid) - keys.append(key) entity = datastore.Entity(key=key) entity['batch_num'] = batch_num entity['seq_no'] = seq_no batch.put(entity) +def add_timestamp_keys(client=None): + if client is None: + # Get a client that uses the test dataset. + client = datastore.Client() + + num_batches = 2 + batch_size = 500 + + timestamp_micros = set() + for batch_num in range(num_batches): + with client.batch() as batch: + for seq_no in range(batch_size): + print( + "time_time: batch: {}, sequence: {}".format( + batch_num, seq_no)) + now_micros = int(time.time() * 1e6) + while now_micros in timestamp_micros: + now_micros = int(time.time() * 1e6) + timestamp_micros.add(now_micros) + key = client.key('timestamp_key', now_micros) + entity = datastore.Entity(key=key) + entity['batch_num'] = batch_num + entity['seq_no'] = seq_no + batch.put(entity) + + +def main(): + client = datastore.Client() + flags = sys.argv[1:] + + if len(flags) == 0: + flags = ['--characters', '--uuid', '--timestamps'] + + if '--characters' in flags: + add_characters(client) + + if '--uuid' in flags: + add_uid_keys(client) + + if '--timestamps' in flags: + add_timestamp_keys(client) + + if __name__ == '__main__': - add_characters() - add_uid_keys() + main() From 4872e8fb41bf4c298d2b683db1473e59f754ed57 Mon Sep 17 00:00:00 2001 From: salrashid123 Date: Thu, 7 Jun 2018 10:31:29 -0700 Subject: [PATCH 178/611] minor fix to datastore example (#5452) --- packages/google-cloud-datastore/README.rst | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index a36b4321a180..3e37dfc0f9c6 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -60,17 +60,19 @@ how to activate Cloud Datastore for your project. .. code:: python from google.cloud import datastore - # Create, populate and persist an entity - entity = datastore.Entity(key=datastore.Key('EntityKind')) + # Create, populate and persist an entity with keyID=1234 + client = datastore.Client() + key = client.key('EntityKind', 1234) + entity = datastore.Entity(key=key) entity.update({ 'foo': u'bar', 'baz': 1337, 'qux': False, }) - # Then query for entities - query = datastore.Query(kind='EntityKind') - for result in query.fetch(): - print(result) + client.put(entity) + # Then get by key for this entity + result = client.get(key) + print result .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-datastore.svg :target: https://pypi.org/project/google-cloud-datastore/ From f7aa564384066622dbc1922e1b9fa29bc1119559 Mon Sep 17 00:00:00 2001 From: Chris McDonough Date: Fri, 15 Jun 2018 16:48:49 -0400 Subject: [PATCH 179/611] Do not pass 'offset' once the query iterator has a cursor (#5503) Closes #4675. --- .../google/cloud/datastore/query.py | 10 +++-- .../tests/system/test_system.py | 12 ++++++ .../tests/unit/test_query.py | 40 +++++++++++++++++-- 3 files changed, 54 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 6cb0810cd6d2..5a331da79ccc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -452,10 +452,12 @@ def _build_protobuf(self): if self.max_results is not None: pb.limit.value = self.max_results - self.num_results - if self._offset is not None: - # NOTE: The offset goes down relative to the location - # because we are updating the cursor each time. - pb.offset = self._offset - self._skipped_results + if start_cursor is None and self._offset is not None: + # NOTE: We don't need to add an offset to the request protobuf + # if we are using an existing cursor, because the offset + # is only relative to the start of the result set, not + # relative to each page (this method is called per-page) + pb.offset = self._offset return pb diff --git a/packages/google-cloud-datastore/tests/system/test_system.py b/packages/google-cloud-datastore/tests/system/test_system.py index cadfbc8403f4..c73b2a80cc17 100644 --- a/packages/google-cloud-datastore/tests/system/test_system.py +++ b/packages/google-cloud-datastore/tests/system/test_system.py @@ -397,6 +397,18 @@ def test_query_paginate_simple_timestamp_keys(self): self.assertTrue(page_count > 1) + def test_query_offset_timestamp_keys(self): + # See issue #4675 + max_all = 10000 + offset = 1 + max_offset = max_all - offset + query = self.CLIENT.query(kind='timestamp_key') + all_w_limit = list(query.fetch(limit=max_all)) + self.assertEqual(len(all_w_limit), max_all) + + offset_w_limit = list(query.fetch(offset=offset, limit=max_offset)) + self.assertEqual(offset_w_limit, all_w_limit[offset:]) + def test_query_paginate_with_offset(self): page_query = self._base_query() page_query.order = 'appearances' diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 3a7aa9300a56..8689bbb1e745 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -404,21 +404,27 @@ def test__build_protobuf_empty(self): expected_pb = query_pb2.Query() self.assertEqual(pb, expected_pb) - def test__build_protobuf_all_values(self): + def test__build_protobuf_all_values_except_offset(self): + # this test and the following (all_values_except_start_and_end_cursor) + # test mutually exclusive states; the offset is ignored + # if a start_cursor is supplied from google.cloud.datastore_v1.proto import query_pb2 from google.cloud.datastore.query import Query client = _Client(None) query = Query(client) limit = 15 - offset = 9 start_bytes = b'i\xb7\x1d' start_cursor = 'abcd' end_bytes = b'\xc3\x1c\xb3' end_cursor = 'wxyz' iterator = self._make_one( - query, client, limit=limit, offset=offset, - start_cursor=start_cursor, end_cursor=end_cursor) + query, + client, + limit=limit, + start_cursor=start_cursor, + end_cursor=end_cursor + ) self.assertEqual(iterator.max_results, limit) iterator.num_results = 4 iterator._skipped_results = 1 @@ -427,6 +433,32 @@ def test__build_protobuf_all_values(self): expected_pb = query_pb2.Query( start_cursor=start_bytes, end_cursor=end_bytes, + ) + expected_pb.limit.value = limit - iterator.num_results + self.assertEqual(pb, expected_pb) + + def test__build_protobuf_all_values_except_start_and_end_cursor(self): + # this test and the previous (all_values_except_start_offset) + # test mutually exclusive states; the offset is ignored + # if a start_cursor is supplied + from google.cloud.datastore_v1.proto import query_pb2 + from google.cloud.datastore.query import Query + + client = _Client(None) + query = Query(client) + limit = 15 + offset = 9 + iterator = self._make_one( + query, + client, + limit=limit, + offset=offset, + ) + self.assertEqual(iterator.max_results, limit) + iterator.num_results = 4 + + pb = iterator._build_protobuf() + expected_pb = query_pb2.Query( offset=offset - iterator._skipped_results, ) expected_pb.limit.value = limit - iterator.num_results From 7442e7e31777b3c7f663388724c201a1c8ba5fba Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Thu, 26 Jul 2018 11:14:56 -0700 Subject: [PATCH 180/611] Release datastore 1.7.0 (#5683) --- packages/google-cloud-datastore/CHANGELOG.md | 19 +++++++++++++++++++ packages/google-cloud-datastore/setup.py | 2 +- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 4c47905a4a53..2ad75b1ff3b5 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,25 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## 1.7.0 + +### Implementation Changes + +- Do not pass 'offset' once the query iterator has a cursor (#5503) +- Add test runs for Python 3.7 and remove run for 3.4 (#5295) + +### Documentation + +- minor fix to datastore example (#5452) +- Add example showing explicit unicode for text values in entities. (#5263) + +### Internal / Testing Changes + +- Modify system tests to use prerelease versions of grpcio (#5304) +- Avoid overwriting '__module__' of messages from shared modules. (#5364) +- Attempt again to reproduce #4264. (#5403) +- Fix bad trove classifier + ## 1.6.0 ### Implementation changes diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 139aae2e6852..d53e4fbdc4ad 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-datastore' description = 'Google Cloud Datastore API client library' -version = '1.6.0' +version = '1.7.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 206d3d2310ba185e970f89c2b960fd9d3912d474 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 29 Aug 2018 13:06:21 -0400 Subject: [PATCH 181/611] Expose 'Client.base_url' property. (#5821) Allows setting to alternate endpoints, e.g. for non-prod or use via something like 'batch-datastore.googleapis.com'. Closes #5801. --- .../google/cloud/datastore/client.py | 10 ++++++++++ .../tests/unit/test_client.py | 17 ++++++++++++++--- 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index ac93f8480d6f..8daa68c937c9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -221,6 +221,16 @@ def _determine_default(project): """Helper: override default project detection.""" return _determine_default_project(project) + @property + def base_url(self): + """Getter for API base URL.""" + return self._base_url + + @base_url.setter + def base_url(self, value): + """Setter for API base URL.""" + self._base_url = value + @property def _datastore_api(self): """Getter for a wrapped API object.""" diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index d3c4ea34d240..03c527b5e2be 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -164,7 +164,7 @@ def test_constructor_w_implicit_inputs(self): self.assertIsNone(client.namespace) self.assertIs(client._credentials, creds) self.assertIsNone(client._http_internal) - self.assertEqual(client._base_url, _DATASTORE_BASE_URL) + self.assertEqual(client.base_url, _DATASTORE_BASE_URL) self.assertIsNone(client.current_batch) self.assertIsNone(client.current_transaction) @@ -189,7 +189,7 @@ def test_constructor_w_explicit_inputs(self): self.assertIs(client._http_internal, http) self.assertIsNone(client.current_batch) self.assertEqual(list(client._batch_stack), []) - self.assertEqual(client._base_url, _DATASTORE_BASE_URL) + self.assertEqual(client.base_url, _DATASTORE_BASE_URL) def test_constructor_use_grpc_default(self): import google.cloud.datastore.client as MUT @@ -230,7 +230,7 @@ def test_constructor_gcd_host(self): with mock.patch('os.environ', new=fake_environ): client = self._make_one( project=project, credentials=creds, _http=http) - self.assertEqual(client._base_url, 'http://' + host) + self.assertEqual(client.base_url, 'http://' + host) def test__datastore_api_property_gapic(self): client = self._make_one( @@ -253,6 +253,17 @@ def test__datastore_api_property_gapic(self): client._datastore_api, mock.sentinel.ds_api) self.assertEqual(make_api.call_count, 1) + def test_base_url_property(self): + alternate_url = 'https://alias.example.com/' + project = 'PROJECT' + creds = _make_credentials() + http = object() + + client = self._make_one( + project=project, credentials=creds, _http=http) + client.base_url = alternate_url + self.assertEqual(client.base_url, alternate_url) + def test__datastore_api_property_http(self): from google.cloud.datastore._http import HTTPDatastoreAPI From 3e16fba8b397d2e6ed5c23c1b064de568318b910 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 30 Aug 2018 11:50:23 -0400 Subject: [PATCH 182/611] Nox: use inplace installs (#5865) --- packages/google-cloud-datastore/nox.py | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/nox.py index e59206d485fc..79d04e1f16ba 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/nox.py @@ -34,8 +34,10 @@ def default(session): Python corresponding to the ``nox`` binary the ``PATH`` can run the tests. """ - # Install all test dependencies, then install this package in-place. - session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) + # Install all test dependencies, then install local packages in-place. + session.install('mock', 'pytest', 'pytest-cov') + for local_dep in LOCAL_DEPS: + session.install('-e', local_dep) session.install('-e', '.') # Run py.test against the unit tests. @@ -85,11 +87,12 @@ def system(session, py): # Use pre-release gRPC for system tests. session.install('--pre', 'grpcio') - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install('mock', 'pytest', *LOCAL_DEPS) - session.install('../test_utils/') - session.install('.') + # Install all test dependencies, then install local packages in-place. + session.install('mock', 'pytest') + for local_dep in LOCAL_DEPS: + session.install('-e', local_dep) + session.install('-e', '../test_utils/') + session.install('-e', '.') # Run py.test against the system tests. session.run('py.test', '--quiet', 'tests/system', *session.posargs) @@ -111,9 +114,11 @@ def doctests(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install('mock', 'pytest', 'sphinx', *LOCAL_DEPS) - session.install('../test_utils/') - session.install('.') + session.install('mock', 'pytest', 'sphinx') + for local_dep in LOCAL_DEPS: + session.install('-e', local_dep) + session.install('-e', '../test_utils/') + session.install('-e', '.') # Run py.test against the system tests. session.run('py.test', '--quiet', 'tests/doctests.py') From 81dd34b45ef704be39dd9752b8756d9045e9e046 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 10 Sep 2018 15:55:01 -0400 Subject: [PATCH 183/611] Docs: Replace links to '/stable/' with '/latest/'. (#5901) * Replace links to '/stable/' with '/latest/'. * DRY out duplicated 'README.rst' vs. 'docs/index.rst'. * Include websecurityscanner in docs. Toward #5894. --- packages/google-cloud-datastore/docs/index.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/docs/index.rst b/packages/google-cloud-datastore/docs/index.rst index 08466ac45a67..c23023166c11 100644 --- a/packages/google-cloud-datastore/docs/index.rst +++ b/packages/google-cloud-datastore/docs/index.rst @@ -9,7 +9,7 @@ scalable storage for your application. .. _Beta: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst .. _Google Cloud Datastore API: https://cloud.google.com/datastore -.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore/usage.html +.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/datastore/usage.html .. _Product Documentation: https://cloud.google.com/datastore Quick Start @@ -25,7 +25,7 @@ In order to use this library, you first need to go through the following steps: .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Google Cloud Datastore API.: https://cloud.google.com/datastore -.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/stable/core/auth.html +.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/latest/core/auth.html Installation ~~~~~~~~~~~~ @@ -81,4 +81,4 @@ Api Reference :maxdepth: 2 gapic/v1/api - gapic/v1/types \ No newline at end of file + gapic/v1/types From b1dfdd11fd9529df5bee6121ed7e41979174fa2a Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 11 Sep 2018 13:42:40 -0400 Subject: [PATCH 184/611] Prep datastore docs for repo split. (#5919) - Move docs from 'docs/datastore' into 'datastore/docs' and leave symlink. - Harmonize / DRY 'datastore/README.rst' and 'datastore/docs/index.rst'. - Remove docs for GAPIC-generated bits (they aren't part of the surface). - Ensure that docs still build from top-level. Toward #5912. --- packages/google-cloud-datastore/CHANGELOG.md | 4 +- packages/google-cloud-datastore/README.rst | 100 +++++++++++------- .../google-cloud-datastore/docs/batches.rst | 6 ++ .../google-cloud-datastore/docs/changelog.md | 1 + .../google-cloud-datastore/docs/client.rst | 6 ++ packages/google-cloud-datastore/docs/conf.py | 9 +- .../google-cloud-datastore/docs/entities.rst | 6 ++ .../docs/gapic/v1/api.rst | 6 -- .../docs/gapic/v1/types.rst | 5 - .../google-cloud-datastore/docs/helpers.rst | 6 ++ .../google-cloud-datastore/docs/index.rst | 92 +++------------- packages/google-cloud-datastore/docs/keys.rst | 6 ++ .../google-cloud-datastore/docs/queries.rst | 6 ++ .../docs/transactions.rst | 7 ++ 14 files changed, 131 insertions(+), 129 deletions(-) create mode 100644 packages/google-cloud-datastore/docs/batches.rst create mode 120000 packages/google-cloud-datastore/docs/changelog.md create mode 100644 packages/google-cloud-datastore/docs/client.rst create mode 100644 packages/google-cloud-datastore/docs/entities.rst delete mode 100644 packages/google-cloud-datastore/docs/gapic/v1/api.rst delete mode 100644 packages/google-cloud-datastore/docs/gapic/v1/types.rst create mode 100644 packages/google-cloud-datastore/docs/helpers.rst create mode 100644 packages/google-cloud-datastore/docs/keys.rst create mode 100644 packages/google-cloud-datastore/docs/queries.rst create mode 100644 packages/google-cloud-datastore/docs/transactions.rst diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 2ad75b1ff3b5..1c7a1494ae44 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -27,7 +27,7 @@ ### Implementation changes -- Don't check 'exclude_from_indexes' for empty lists. (#4915) +- Don't check `exclude_from_indexes` for empty lists. (#4915) ### Dependencies @@ -39,7 +39,7 @@ - Re-enable lint for tests, remove usage of pylint (#4921) - Normalize all setup.py files (#4909) - Exercise datastore query result paging (#4905) -- Pass '*session.posargs' through on command line for system tests. (#4904) +- Pass `*session.posargs` through on command line for system tests. (#4904) ## 1.5.0 diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index 3e37dfc0f9c6..869bd2d3e176 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -1,61 +1,78 @@ Python Client for Google Cloud Datastore ======================================== - Python idiomatic client for `Google Cloud Datastore`_ - -.. _Google Cloud Datastore: https://cloud.google.com/datastore/docs - |pypi| |versions| -- `Documentation`_ +`Google Cloud Datastore API`_ is a fully managed, schemaless database for +storing non-relational data. Cloud Datastore automatically scales with your +users and supports ACID transactions, high availability of reads and writes, +strong consistency for reads and ancestor queries, and eventual consistency for +all other queries. -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/datastore/client.html +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-datastore.svg + :target: https://pypi.org/project/google-cloud-datastore/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-datastore.svg + :target: https://pypi.org/project/google-cloud-datastore/ +.. _Google Cloud Datastore API: https://cloud.google.com/datastore/docs +.. _Product Documentation: https://cloud.google.com/datastore/docs +.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/datastore/index.html Quick Start ----------- -.. code-block:: console +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Cloud Datastore API.`_ +4. `Setup Authentication.`_ - $ pip install --upgrade google-cloud-datastore +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Cloud Datastore API.: https://cloud.google.com/datastore +.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/latest/core/auth.html -For more information on setting up your Python development environment, -such as installing ``pip`` and ``virtualenv`` on your system, please refer -to `Python Development Environment Setup Guide`_ for Google Cloud Platform. +Installation +~~~~~~~~~~~~ -.. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. -Authentication --------------- +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. -With ``google-cloud-python`` we try to make authentication as painless as -possible. Check out the `Authentication section`_ in our documentation to -learn more. You may also find the `authentication document`_ shared by all -the ``google-cloud-*`` libraries to be helpful. +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ -.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html -.. _authentication document: https://github.com/GoogleCloudPlatform/google-cloud-common/tree/master/authentication -Using the API -------------- +Mac/Linux +^^^^^^^^^ -Google `Cloud Datastore`_ (`Datastore API docs`_) is a fully managed, -schemaless database for storing non-relational data. Cloud Datastore -automatically scales with your users and supports ACID transactions, high -availability of reads and writes, strong consistency for reads and ancestor -queries, and eventual consistency for all other queries. +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-datastore -.. _Cloud Datastore: https://cloud.google.com/datastore/docs -.. _Datastore API docs: https://cloud.google.com/datastore/docs/ -See the ``google-cloud-python`` API `datastore documentation`_ to learn how to -interact with the Cloud Datastore using this Client Library. +Windows +^^^^^^^ -.. _datastore documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/datastore/client.html +.. code-block:: console -See the `official Google Cloud Datastore documentation`_ for more details on -how to activate Cloud Datastore for your project. + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-datastore -.. _official Google Cloud Datastore documentation: https://cloud.google.com/datastore/docs/activate + +Example Usage +~~~~~~~~~~~~~ .. code:: python @@ -72,9 +89,12 @@ how to activate Cloud Datastore for your project. client.put(entity) # Then get by key for this entity result = client.get(key) - print result + print(result) -.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-datastore.svg - :target: https://pypi.org/project/google-cloud-datastore/ -.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-datastore.svg - :target: https://pypi.org/project/google-cloud-datastore/ +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Google Cloud Datastore API + API to see other available methods on the client. +- Read the `Product documentation`_ to learn + more about the product and see How-to Guides. diff --git a/packages/google-cloud-datastore/docs/batches.rst b/packages/google-cloud-datastore/docs/batches.rst new file mode 100644 index 000000000000..4a2b0eb7d761 --- /dev/null +++ b/packages/google-cloud-datastore/docs/batches.rst @@ -0,0 +1,6 @@ +Batches +~~~~~~~ + +.. automodule:: google.cloud.datastore.batch + :members: + :show-inheritance: diff --git a/packages/google-cloud-datastore/docs/changelog.md b/packages/google-cloud-datastore/docs/changelog.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-datastore/docs/changelog.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-datastore/docs/client.rst b/packages/google-cloud-datastore/docs/client.rst new file mode 100644 index 000000000000..060477ec12fd --- /dev/null +++ b/packages/google-cloud-datastore/docs/client.rst @@ -0,0 +1,6 @@ +Datastore Client +================ + +.. automodule:: google.cloud.datastore.client + :members: + :show-inheritance: diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index de92e20469fd..fd43beccaf91 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -33,6 +33,7 @@ extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.autosummary', + 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage', 'sphinx.ext.napoleon', @@ -47,6 +48,12 @@ # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = { + '.md': 'recommonmark.parser.CommonMarkParser', +} + # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] @@ -120,7 +127,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'sphinx_rtd_theme' +html_theme = 'alabaster' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the diff --git a/packages/google-cloud-datastore/docs/entities.rst b/packages/google-cloud-datastore/docs/entities.rst new file mode 100644 index 000000000000..dc8217ddf3ba --- /dev/null +++ b/packages/google-cloud-datastore/docs/entities.rst @@ -0,0 +1,6 @@ +Entities +~~~~~~~~ + +.. automodule:: google.cloud.datastore.entity + :members: + :show-inheritance: diff --git a/packages/google-cloud-datastore/docs/gapic/v1/api.rst b/packages/google-cloud-datastore/docs/gapic/v1/api.rst deleted file mode 100644 index 9d987a96747a..000000000000 --- a/packages/google-cloud-datastore/docs/gapic/v1/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Google Cloud Datastore API -===================================== - -.. automodule:: google.cloud.datastore_v1 - :members: - :inherited-members: \ No newline at end of file diff --git a/packages/google-cloud-datastore/docs/gapic/v1/types.rst b/packages/google-cloud-datastore/docs/gapic/v1/types.rst deleted file mode 100644 index 840085452eb4..000000000000 --- a/packages/google-cloud-datastore/docs/gapic/v1/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Google Cloud Datastore API Client -=========================================== - -.. automodule:: google.cloud.datastore_v1.types - :members: \ No newline at end of file diff --git a/packages/google-cloud-datastore/docs/helpers.rst b/packages/google-cloud-datastore/docs/helpers.rst new file mode 100644 index 000000000000..99f9b9bf28bc --- /dev/null +++ b/packages/google-cloud-datastore/docs/helpers.rst @@ -0,0 +1,6 @@ +Helpers +~~~~~~~ + +.. automodule:: google.cloud.datastore.helpers + :members: + :show-inheritance: diff --git a/packages/google-cloud-datastore/docs/index.rst b/packages/google-cloud-datastore/docs/index.rst index c23023166c11..c90607799913 100644 --- a/packages/google-cloud-datastore/docs/index.rst +++ b/packages/google-cloud-datastore/docs/index.rst @@ -1,84 +1,26 @@ -Python Client for Google Cloud Datastore API (`Beta`_) -====================================================== +.. include:: /../datastore/README.rst -`Google Cloud Datastore API`_: Accesses the schemaless NoSQL database to provide fully managed, robust, -scalable storage for your application. -- `Client Library Documentation`_ -- `Product Documentation`_ - -.. _Beta: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst -.. _Google Cloud Datastore API: https://cloud.google.com/datastore -.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/datastore/usage.html -.. _Product Documentation: https://cloud.google.com/datastore - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. `Enable the Google Cloud Datastore API.`_ -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Enable the Google Cloud Datastore API.: https://cloud.google.com/datastore -.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/latest/core/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - source /bin/activate - /bin/pip install google-cloud-datastore - - -Windows -^^^^^^^ - -.. code-block:: console +API Reference +------------- +.. toctree:: + :maxdepth: 2 - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-datastore + client + entities + keys + queries + transactions + batches + helpers -Next Steps -~~~~~~~~~~ -- Read the `Client Library Documentation`_ for Google Cloud Datastore API - API to see other available methods on the client. -- Read the `Google Cloud Datastore API Product documentation`_ to learn - more about the product and see How-to Guides. -- View this `repository’s main README`_ to see the full list of Cloud - APIs that we cover. +Changelog +--------- -.. _Google Cloud Datastore API Product documentation: https://cloud.google.com/datastore -.. _repository’s main README: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst +For a list of all ``google-cloud-datastore`` releases: -Api Reference -------------- .. toctree:: - :maxdepth: 2 + :maxdepth: 2 - gapic/v1/api - gapic/v1/types + changelog diff --git a/packages/google-cloud-datastore/docs/keys.rst b/packages/google-cloud-datastore/docs/keys.rst new file mode 100644 index 000000000000..79f7a7eb4e38 --- /dev/null +++ b/packages/google-cloud-datastore/docs/keys.rst @@ -0,0 +1,6 @@ +Keys +~~~~ + +.. automodule:: google.cloud.datastore.key + :members: + :show-inheritance: diff --git a/packages/google-cloud-datastore/docs/queries.rst b/packages/google-cloud-datastore/docs/queries.rst new file mode 100644 index 000000000000..aa9e7dfb7193 --- /dev/null +++ b/packages/google-cloud-datastore/docs/queries.rst @@ -0,0 +1,6 @@ +Queries +~~~~~~~ + +.. automodule:: google.cloud.datastore.query + :members: + :show-inheritance: diff --git a/packages/google-cloud-datastore/docs/transactions.rst b/packages/google-cloud-datastore/docs/transactions.rst new file mode 100644 index 000000000000..1737a7bf5994 --- /dev/null +++ b/packages/google-cloud-datastore/docs/transactions.rst @@ -0,0 +1,7 @@ +Transactions +~~~~~~~~~~~~ + +.. automodule:: google.cloud.datastore.transaction + :members: + :show-inheritance: + :inherited-members: From 6b3a742f98e1765bd1bdb3878dc4f3b2e4225a6f Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 17 Sep 2018 12:56:03 -0400 Subject: [PATCH 185/611] Redirect renamed 'usage.html'/'client.html' -> 'index.html'. (#5996) Closes #5995. --- packages/google-cloud-datastore/docs/usage.html | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 packages/google-cloud-datastore/docs/usage.html diff --git a/packages/google-cloud-datastore/docs/usage.html b/packages/google-cloud-datastore/docs/usage.html new file mode 100644 index 000000000000..9b81d6976cda --- /dev/null +++ b/packages/google-cloud-datastore/docs/usage.html @@ -0,0 +1,8 @@ + + + + + + From 1380b0643cb4793b999f1d77308a510968cb7bfc Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 25 Sep 2018 14:02:26 -0400 Subject: [PATCH 186/611] Datastore: add 'synth.py'. (#6078) Closes #6068. --- .../datastore_v1/gapic/datastore_client.py | 286 ++++++---- .../google/cloud/datastore_v1/gapic/enums.py | 20 +- .../datastore_v1/gapic/transports/__init__.py | 0 .../transports/datastore_grpc_transport.py | 189 +++++++ .../cloud/datastore_v1/proto/datastore_pb2.py | 510 +++++------------- .../datastore_v1/proto/datastore_pb2_grpc.py | 2 +- .../cloud/datastore_v1/proto/entity_pb2.py | 62 +-- .../cloud/datastore_v1/proto/query_pb2.py | 90 ++-- packages/google-cloud-datastore/synth.py | 32 ++ 9 files changed, 605 insertions(+), 586 deletions(-) create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py create mode 100644 packages/google-cloud-datastore/synth.py diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py index e747909477cc..404fc0c8a8b8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,16 +16,21 @@ """Accesses the google.datastore.v1 Datastore API.""" import pkg_resources +import warnings +from google.oauth2 import service_account import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method import google.api_core.grpc_helpers import google.api_core.protobuf_helpers +import grpc from google.cloud.datastore_v1.gapic import datastore_client_config from google.cloud.datastore_v1.gapic import enums +from google.cloud.datastore_v1.gapic.transports import datastore_grpc_transport from google.cloud.datastore_v1.proto import datastore_pb2 +from google.cloud.datastore_v1.proto import datastore_pb2_grpc from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore_v1.proto import query_pb2 @@ -44,18 +51,33 @@ class DatastoreClient(object): SERVICE_ADDRESS = 'datastore.googleapis.com:443' """The default address of the service.""" - # The scopes needed to make gRPC calls to all of the methods defined in - # this service - _DEFAULT_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', - ) - - # The name of the interface for this client. This is the key used to find - # method configuration in the client_config dictionary. + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. _INTERFACE_NAME = 'google.datastore.v1.Datastore' + @classmethod + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatastoreClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs['credentials'] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + def __init__(self, + transport=None, channel=None, credentials=None, client_config=datastore_client_config.config, @@ -63,98 +85,83 @@ def __init__(self, """Constructor. Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive + transport (Union[~.DatastoreGrpcTransport, + Callable[[~.Credentials, type], ~.DatastoreGrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive with ``credentials``; providing both will raise an exception. credentials (google.auth.credentials.Credentials): The authorization credentials to attach to requests. These credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - client_config (dict): A dictionary of call options for each - method. If not specified, the default configuration is used. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - 'The `channel` and `credentials` arguments to {} are mutually ' - 'exclusive.'.format(self.__class__.__name__), ) - - # Create the channel. - if channel is None: - channel = google.api_core.grpc_helpers.create_channel( - self.SERVICE_ADDRESS, + # Raise deprecation warnings for things we want to go away. + if client_config: + warnings.warn('The `client_config` argument is deprecated.', + PendingDeprecationWarning) + if channel: + warnings.warn( + 'The `channel` argument is deprecated; use ' + '`transport` instead.', PendingDeprecationWarning) + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=datastore_grpc_transport. + DatastoreGrpcTransport, + ) + else: + if credentials: + raise ValueError( + 'Received both a transport instance and ' + 'credentials; these are mutually exclusive.') + self.transport = transport + else: + self.transport = datastore_grpc_transport.DatastoreGrpcTransport( + address=self.SERVICE_ADDRESS, + channel=channel, credentials=credentials, - scopes=self._DEFAULT_SCOPES, ) - # Create the gRPC stubs. - self.datastore_stub = (datastore_pb2.DatastoreStub(channel)) - if client_info is None: client_info = ( google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info # Parse out the default settings for retry and timeout for each RPC # from the client configuration. # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) - method_configs = google.api_core.gapic_v1.config.parse_method_configs( + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( client_config['interfaces'][self._INTERFACE_NAME], ) - # Write the "inner API call" methods to the class. - # These are wrapped versions of the gRPC stub methods, with retry and - # timeout configuration applied, called by the public methods on - # this class. - self._lookup = google.api_core.gapic_v1.method.wrap_method( - self.datastore_stub.Lookup, - default_retry=method_configs['Lookup'].retry, - default_timeout=method_configs['Lookup'].timeout, - client_info=client_info, - ) - self._run_query = google.api_core.gapic_v1.method.wrap_method( - self.datastore_stub.RunQuery, - default_retry=method_configs['RunQuery'].retry, - default_timeout=method_configs['RunQuery'].timeout, - client_info=client_info, - ) - self._begin_transaction = google.api_core.gapic_v1.method.wrap_method( - self.datastore_stub.BeginTransaction, - default_retry=method_configs['BeginTransaction'].retry, - default_timeout=method_configs['BeginTransaction'].timeout, - client_info=client_info, - ) - self._commit = google.api_core.gapic_v1.method.wrap_method( - self.datastore_stub.Commit, - default_retry=method_configs['Commit'].retry, - default_timeout=method_configs['Commit'].timeout, - client_info=client_info, - ) - self._rollback = google.api_core.gapic_v1.method.wrap_method( - self.datastore_stub.Rollback, - default_retry=method_configs['Rollback'].retry, - default_timeout=method_configs['Rollback'].timeout, - client_info=client_info, - ) - self._allocate_ids = google.api_core.gapic_v1.method.wrap_method( - self.datastore_stub.AllocateIds, - default_retry=method_configs['AllocateIds'].retry, - default_timeout=method_configs['AllocateIds'].timeout, - client_info=client_info, - ) - self._reserve_ids = google.api_core.gapic_v1.method.wrap_method( - self.datastore_stub.ReserveIds, - default_retry=method_configs['ReserveIds'].retry, - default_timeout=method_configs['ReserveIds'].timeout, - client_info=client_info, - ) + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} # Service calls def lookup(self, @@ -172,7 +179,10 @@ def lookup(self, >>> >>> client = datastore_v1.DatastoreClient() >>> + >>> # TODO: Initialize ``project_id``: >>> project_id = '' + >>> + >>> # TODO: Initialize ``keys``: >>> keys = [] >>> >>> response = client.lookup(project_id, keys) @@ -204,15 +214,22 @@ def lookup(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'lookup' not in self._inner_api_calls: + self._inner_api_calls[ + 'lookup'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.lookup, + default_retry=self._method_configs['Lookup'].retry, + default_timeout=self._method_configs['Lookup'].timeout, + client_info=self._client_info, + ) + request = datastore_pb2.LookupRequest( project_id=project_id, keys=keys, read_options=read_options, ) - return self._lookup( + return self._inner_api_calls['lookup']( request, retry=retry, timeout=timeout, metadata=metadata) def run_query(self, @@ -232,7 +249,10 @@ def run_query(self, >>> >>> client = datastore_v1.DatastoreClient() >>> + >>> # TODO: Initialize ``project_id``: >>> project_id = '' + >>> + >>> # TODO: Initialize ``partition_id``: >>> partition_id = {} >>> >>> response = client.run_query(project_id, partition_id) @@ -273,9 +293,16 @@ def run_query(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'run_query' not in self._inner_api_calls: + self._inner_api_calls[ + 'run_query'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.run_query, + default_retry=self._method_configs['RunQuery'].retry, + default_timeout=self._method_configs['RunQuery'].timeout, + client_info=self._client_info, + ) + # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. google.api_core.protobuf_helpers.check_oneof( @@ -290,7 +317,7 @@ def run_query(self, query=query, gql_query=gql_query, ) - return self._run_query( + return self._inner_api_calls['run_query']( request, retry=retry, timeout=timeout, metadata=metadata) def begin_transaction(self, @@ -307,6 +334,7 @@ def begin_transaction(self, >>> >>> client = datastore_v1.DatastoreClient() >>> + >>> # TODO: Initialize ``project_id``: >>> project_id = '' >>> >>> response = client.begin_transaction(project_id) @@ -335,14 +363,23 @@ def begin_transaction(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'begin_transaction' not in self._inner_api_calls: + self._inner_api_calls[ + 'begin_transaction'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.begin_transaction, + default_retry=self._method_configs['BeginTransaction']. + retry, + default_timeout=self._method_configs['BeginTransaction']. + timeout, + client_info=self._client_info, + ) + request = datastore_pb2.BeginTransactionRequest( project_id=project_id, transaction_options=transaction_options, ) - return self._begin_transaction( + return self._inner_api_calls['begin_transaction']( request, retry=retry, timeout=timeout, metadata=metadata) def commit(self, @@ -363,8 +400,13 @@ def commit(self, >>> >>> client = datastore_v1.DatastoreClient() >>> + >>> # TODO: Initialize ``project_id``: >>> project_id = '' + >>> + >>> # TODO: Initialize ``mode``: >>> mode = enums.CommitRequest.Mode.MODE_UNSPECIFIED + >>> + >>> # TODO: Initialize ``mutations``: >>> mutations = [] >>> >>> response = client.commit(project_id, mode, mutations) @@ -409,9 +451,16 @@ def commit(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'commit' not in self._inner_api_calls: + self._inner_api_calls[ + 'commit'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.commit, + default_retry=self._method_configs['Commit'].retry, + default_timeout=self._method_configs['Commit'].timeout, + client_info=self._client_info, + ) + # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. google.api_core.protobuf_helpers.check_oneof(transaction=transaction, ) @@ -422,7 +471,7 @@ def commit(self, mutations=mutations, transaction=transaction, ) - return self._commit( + return self._inner_api_calls['commit']( request, retry=retry, timeout=timeout, metadata=metadata) def rollback(self, @@ -439,7 +488,10 @@ def rollback(self, >>> >>> client = datastore_v1.DatastoreClient() >>> + >>> # TODO: Initialize ``project_id``: >>> project_id = '' + >>> + >>> # TODO: Initialize ``transaction``: >>> transaction = b'' >>> >>> response = client.rollback(project_id, transaction) @@ -467,14 +519,21 @@ def rollback(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'rollback' not in self._inner_api_calls: + self._inner_api_calls[ + 'rollback'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.rollback, + default_retry=self._method_configs['Rollback'].retry, + default_timeout=self._method_configs['Rollback'].timeout, + client_info=self._client_info, + ) + request = datastore_pb2.RollbackRequest( project_id=project_id, transaction=transaction, ) - return self._rollback( + return self._inner_api_calls['rollback']( request, retry=retry, timeout=timeout, metadata=metadata) def allocate_ids(self, @@ -492,7 +551,10 @@ def allocate_ids(self, >>> >>> client = datastore_v1.DatastoreClient() >>> + >>> # TODO: Initialize ``project_id``: >>> project_id = '' + >>> + >>> # TODO: Initialize ``keys``: >>> keys = [] >>> >>> response = client.allocate_ids(project_id, keys) @@ -522,14 +584,22 @@ def allocate_ids(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'allocate_ids' not in self._inner_api_calls: + self._inner_api_calls[ + 'allocate_ids'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.allocate_ids, + default_retry=self._method_configs['AllocateIds'].retry, + default_timeout=self._method_configs['AllocateIds']. + timeout, + client_info=self._client_info, + ) + request = datastore_pb2.AllocateIdsRequest( project_id=project_id, keys=keys, ) - return self._allocate_ids( + return self._inner_api_calls['allocate_ids']( request, retry=retry, timeout=timeout, metadata=metadata) def reserve_ids(self, @@ -548,7 +618,10 @@ def reserve_ids(self, >>> >>> client = datastore_v1.DatastoreClient() >>> + >>> # TODO: Initialize ``project_id``: >>> project_id = '' + >>> + >>> # TODO: Initialize ``keys``: >>> keys = [] >>> >>> response = client.reserve_ids(project_id, keys) @@ -579,13 +652,20 @@ def reserve_ids(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'reserve_ids' not in self._inner_api_calls: + self._inner_api_calls[ + 'reserve_ids'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.reserve_ids, + default_retry=self._method_configs['ReserveIds'].retry, + default_timeout=self._method_configs['ReserveIds'].timeout, + client_info=self._client_info, + ) + request = datastore_pb2.ReserveIdsRequest( project_id=project_id, keys=keys, database_id=database_id, ) - return self._reserve_ids( + return self._inner_api_calls['reserve_ids']( request, retry=retry, timeout=timeout, metadata=metadata) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py index 893c098c92dc..299a20ff87af 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,8 +15,10 @@ # limitations under the License. """Wrappers for protocol buffer enum types.""" +import enum + -class NullValue(object): +class NullValue(enum.IntEnum): """ ``NullValue`` is a singleton enumeration to represent the null value for the ``Value`` type union. @@ -28,7 +32,7 @@ class NullValue(object): class EntityResult(object): - class ResultType(object): + class ResultType(enum.IntEnum): """ Specifies what data the 'entity' field contains. A ``ResultType`` is either implied (for example, in ``LookupResponse.missing`` @@ -49,7 +53,7 @@ class ResultType(object): class PropertyOrder(object): - class Direction(object): + class Direction(enum.IntEnum): """ The sort direction. @@ -64,7 +68,7 @@ class Direction(object): class CompositeFilter(object): - class Operator(object): + class Operator(enum.IntEnum): """ A composite filter operator. @@ -77,7 +81,7 @@ class Operator(object): class PropertyFilter(object): - class Operator(object): + class Operator(enum.IntEnum): """ A property filter operator. @@ -100,7 +104,7 @@ class Operator(object): class QueryResultBatch(object): - class MoreResultsType(object): + class MoreResultsType(enum.IntEnum): """ The possible values for the ``more_results`` field. @@ -120,7 +124,7 @@ class MoreResultsType(object): class CommitRequest(object): - class Mode(object): + class Mode(enum.IntEnum): """ The modes available for commits. @@ -136,7 +140,7 @@ class Mode(object): class ReadOptions(object): - class ReadConsistency(object): + class ReadConsistency(enum.IntEnum): """ The possible values for read consistencies. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py new file mode 100644 index 000000000000..af1db245878b --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py @@ -0,0 +1,189 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import google.api_core.grpc_helpers + +from google.cloud.datastore_v1.proto import datastore_pb2_grpc + + +class DatastoreGrpcTransport(object): + """gRPC transport class providing stubs for + google.datastore.v1 Datastore API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/datastore', + ) + + def __init__(self, + channel=None, + credentials=None, + address='datastore.googleapis.com:443'): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + 'The `channel` and `credentials` arguments are mutually ' + 'exclusive.', ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + ) + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = { + 'datastore_stub': datastore_pb2_grpc.DatastoreStub(channel), + } + + @classmethod + def create_channel(cls, + address='datastore.googleapis.com:443', + credentials=None): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, + credentials=credentials, + scopes=cls._OAUTH_SCOPES, + ) + + @property + def lookup(self): + """Return the gRPC stub for {$apiMethod.name}. + + Looks up entities by key. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['datastore_stub'].Lookup + + @property + def run_query(self): + """Return the gRPC stub for {$apiMethod.name}. + + Queries for entities. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['datastore_stub'].RunQuery + + @property + def begin_transaction(self): + """Return the gRPC stub for {$apiMethod.name}. + + Begins a new transaction. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['datastore_stub'].BeginTransaction + + @property + def commit(self): + """Return the gRPC stub for {$apiMethod.name}. + + Commits a transaction, optionally creating, deleting or modifying some + entities. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['datastore_stub'].Commit + + @property + def rollback(self): + """Return the gRPC stub for {$apiMethod.name}. + + Rolls back a transaction. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['datastore_stub'].Rollback + + @property + def allocate_ids(self): + """Return the gRPC stub for {$apiMethod.name}. + + Allocates IDs for the given keys, which is useful for referencing an entity + before it is inserted. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['datastore_stub'].AllocateIds + + @property + def reserve_ids(self): + """Return the gRPC stub for {$apiMethod.name}. + + Prevents the supplied keys' IDs from being auto-allocated by Cloud + Datastore. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['datastore_stub'].ReserveIds diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py index 316bb6cce1eb..2cfcf62435f6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py @@ -25,7 +25,6 @@ serialized_pb=_b('\n/google/cloud/datastore_v1/proto/datastore.proto\x12\x13google.datastore.v1\x1a\x1cgoogle/api/annotations.proto\x1a,google/cloud/datastore_v1/proto/entity.proto\x1a+google/cloud/datastore_v1/proto/query.proto\"\x83\x01\n\rLookupRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x36\n\x0cread_options\x18\x01 \x01(\x0b\x32 .google.datastore.v1.ReadOptions\x12&\n\x04keys\x18\x03 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\xa2\x01\n\x0eLookupResponse\x12\x30\n\x05\x66ound\x18\x01 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x32\n\x07missing\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12*\n\x08\x64\x65\x66\x65rred\x18\x03 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\x84\x02\n\x0fRunQueryRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x36\n\x0cpartition_id\x18\x02 \x01(\x0b\x32 .google.datastore.v1.PartitionId\x12\x36\n\x0cread_options\x18\x01 \x01(\x0b\x32 .google.datastore.v1.ReadOptions\x12+\n\x05query\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.QueryH\x00\x12\x32\n\tgql_query\x18\x07 \x01(\x0b\x32\x1d.google.datastore.v1.GqlQueryH\x00\x42\x0c\n\nquery_type\"s\n\x10RunQueryResponse\x12\x34\n\x05\x62\x61tch\x18\x01 \x01(\x0b\x32%.google.datastore.v1.QueryResultBatch\x12)\n\x05query\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.Query\"s\n\x17\x42\x65ginTransactionRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x44\n\x13transaction_options\x18\n \x01(\x0b\x32\'.google.datastore.v1.TransactionOptions\"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\":\n\x0fRollbackRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\"\x12\n\x10RollbackResponse\"\x83\x02\n\rCommitRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x35\n\x04mode\x18\x05 \x01(\x0e\x32\'.google.datastore.v1.CommitRequest.Mode\x12\x15\n\x0btransaction\x18\x01 \x01(\x0cH\x00\x12\x30\n\tmutations\x18\x06 \x03(\x0b\x32\x1d.google.datastore.v1.Mutation\"F\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\x11\n\rTRANSACTIONAL\x10\x01\x12\x15\n\x11NON_TRANSACTIONAL\x10\x02\x42\x16\n\x14transaction_selector\"f\n\x0e\x43ommitResponse\x12=\n\x10mutation_results\x18\x03 \x03(\x0b\x32#.google.datastore.v1.MutationResult\x12\x15\n\rindex_updates\x18\x04 \x01(\x05\"P\n\x12\x41llocateIdsRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"=\n\x13\x41llocateIdsResponse\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"d\n\x11ReserveIdsRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x13\n\x0b\x64\x61tabase_id\x18\t \x01(\t\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\x14\n\x12ReserveIdsResponse\"\x87\x02\n\x08Mutation\x12-\n\x06insert\x18\x04 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12-\n\x06update\x18\x05 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12-\n\x06upsert\x18\x06 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12*\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x18.google.datastore.v1.KeyH\x00\x12\x16\n\x0c\x62\x61se_version\x18\x08 \x01(\x03H\x01\x42\x0b\n\toperationB\x1d\n\x1b\x63onflict_detection_strategy\"c\n\x0eMutationResult\x12%\n\x03key\x18\x03 \x01(\x0b\x32\x18.google.datastore.v1.Key\x12\x0f\n\x07version\x18\x04 \x01(\x03\x12\x19\n\x11\x63onflict_detected\x18\x05 \x01(\x08\"\xd5\x01\n\x0bReadOptions\x12L\n\x10read_consistency\x18\x01 \x01(\x0e\x32\x30.google.datastore.v1.ReadOptions.ReadConsistencyH\x00\x12\x15\n\x0btransaction\x18\x02 \x01(\x0cH\x00\"M\n\x0fReadConsistency\x12 \n\x1cREAD_CONSISTENCY_UNSPECIFIED\x10\x00\x12\n\n\x06STRONG\x10\x01\x12\x0c\n\x08\x45VENTUAL\x10\x02\x42\x12\n\x10\x63onsistency_type\"\xe3\x01\n\x12TransactionOptions\x12G\n\nread_write\x18\x01 \x01(\x0b\x32\x31.google.datastore.v1.TransactionOptions.ReadWriteH\x00\x12\x45\n\tread_only\x18\x02 \x01(\x0b\x32\x30.google.datastore.v1.TransactionOptions.ReadOnlyH\x00\x1a)\n\tReadWrite\x12\x1c\n\x14previous_transaction\x18\x01 \x01(\x0c\x1a\n\n\x08ReadOnlyB\x06\n\x04mode2\xec\x07\n\tDatastore\x12~\n\x06Lookup\x12\".google.datastore.v1.LookupRequest\x1a#.google.datastore.v1.LookupResponse\"+\x82\xd3\xe4\x93\x02%\" /v1/projects/{project_id}:lookup:\x01*\x12\x86\x01\n\x08RunQuery\x12$.google.datastore.v1.RunQueryRequest\x1a%.google.datastore.v1.RunQueryResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1/projects/{project_id}:runQuery:\x01*\x12\xa6\x01\n\x10\x42\x65ginTransaction\x12,.google.datastore.v1.BeginTransactionRequest\x1a-.google.datastore.v1.BeginTransactionResponse\"5\x82\xd3\xe4\x93\x02/\"*/v1/projects/{project_id}:beginTransaction:\x01*\x12~\n\x06\x43ommit\x12\".google.datastore.v1.CommitRequest\x1a#.google.datastore.v1.CommitResponse\"+\x82\xd3\xe4\x93\x02%\" /v1/projects/{project_id}:commit:\x01*\x12\x86\x01\n\x08Rollback\x12$.google.datastore.v1.RollbackRequest\x1a%.google.datastore.v1.RollbackResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1/projects/{project_id}:rollback:\x01*\x12\x92\x01\n\x0b\x41llocateIds\x12\'.google.datastore.v1.AllocateIdsRequest\x1a(.google.datastore.v1.AllocateIdsResponse\"0\x82\xd3\xe4\x93\x02*\"%/v1/projects/{project_id}:allocateIds:\x01*\x12\x8e\x01\n\nReserveIds\x12&.google.datastore.v1.ReserveIdsRequest\x1a\'.google.datastore.v1.ReserveIdsResponse\"/\x82\xd3\xe4\x93\x02)\"$/v1/projects/{project_id}:reserveIds:\x01*B\xa1\x01\n\x17\x63om.google.datastore.v1B\x0e\x44\x61tastoreProtoP\x01Z=0.15.0.""" - """Each RPC normalizes the partition IDs of the keys in its input entities, - and always returns entities with keys with normalized partition IDs. - This applies to all keys and entities, including those in values, except keys - with both an empty path and an empty or unset partition ID. Normalization of - input keys sets the project ID (if not already set) to the project ID from - the request. - - """ - def Lookup(self, request, context): - """Looks up entities by key. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def RunQuery(self, request, context): - """Queries for entities. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def BeginTransaction(self, request, context): - """Begins a new transaction. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def Commit(self, request, context): - """Commits a transaction, optionally creating, deleting or modifying some - entities. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def Rollback(self, request, context): - """Rolls back a transaction. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def AllocateIds(self, request, context): - """Allocates IDs for the given keys, which is useful for referencing an entity - before it is inserted. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def ReserveIds(self, request, context): - """Prevents the supplied keys' IDs from being auto-allocated by Cloud - Datastore. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - - - class BetaDatastoreStub(object): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This class was generated - only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" - """Each RPC normalizes the partition IDs of the keys in its input entities, - and always returns entities with keys with normalized partition IDs. - This applies to all keys and entities, including those in values, except keys - with both an empty path and an empty or unset partition ID. Normalization of - input keys sets the project ID (if not already set) to the project ID from - the request. - - """ - def Lookup(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Looks up entities by key. - """ - raise NotImplementedError() - Lookup.future = None - def RunQuery(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Queries for entities. - """ - raise NotImplementedError() - RunQuery.future = None - def BeginTransaction(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Begins a new transaction. - """ - raise NotImplementedError() - BeginTransaction.future = None - def Commit(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Commits a transaction, optionally creating, deleting or modifying some - entities. - """ - raise NotImplementedError() - Commit.future = None - def Rollback(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Rolls back a transaction. - """ - raise NotImplementedError() - Rollback.future = None - def AllocateIds(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Allocates IDs for the given keys, which is useful for referencing an entity - before it is inserted. - """ - raise NotImplementedError() - AllocateIds.future = None - def ReserveIds(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Prevents the supplied keys' IDs from being auto-allocated by Cloud - Datastore. - """ - raise NotImplementedError() - ReserveIds.future = None - - - def beta_create_Datastore_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This function was - generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" - request_deserializers = { - ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsRequest.FromString, - ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionRequest.FromString, - ('google.datastore.v1.Datastore', 'Commit'): CommitRequest.FromString, - ('google.datastore.v1.Datastore', 'Lookup'): LookupRequest.FromString, - ('google.datastore.v1.Datastore', 'ReserveIds'): ReserveIdsRequest.FromString, - ('google.datastore.v1.Datastore', 'Rollback'): RollbackRequest.FromString, - ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryRequest.FromString, - } - response_serializers = { - ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsResponse.SerializeToString, - ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionResponse.SerializeToString, - ('google.datastore.v1.Datastore', 'Commit'): CommitResponse.SerializeToString, - ('google.datastore.v1.Datastore', 'Lookup'): LookupResponse.SerializeToString, - ('google.datastore.v1.Datastore', 'ReserveIds'): ReserveIdsResponse.SerializeToString, - ('google.datastore.v1.Datastore', 'Rollback'): RollbackResponse.SerializeToString, - ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryResponse.SerializeToString, - } - method_implementations = { - ('google.datastore.v1.Datastore', 'AllocateIds'): face_utilities.unary_unary_inline(servicer.AllocateIds), - ('google.datastore.v1.Datastore', 'BeginTransaction'): face_utilities.unary_unary_inline(servicer.BeginTransaction), - ('google.datastore.v1.Datastore', 'Commit'): face_utilities.unary_unary_inline(servicer.Commit), - ('google.datastore.v1.Datastore', 'Lookup'): face_utilities.unary_unary_inline(servicer.Lookup), - ('google.datastore.v1.Datastore', 'ReserveIds'): face_utilities.unary_unary_inline(servicer.ReserveIds), - ('google.datastore.v1.Datastore', 'Rollback'): face_utilities.unary_unary_inline(servicer.Rollback), - ('google.datastore.v1.Datastore', 'RunQuery'): face_utilities.unary_unary_inline(servicer.RunQuery), - } - server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) - return beta_implementations.server(method_implementations, options=server_options) - - - def beta_create_Datastore_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This function was - generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" - request_serializers = { - ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsRequest.SerializeToString, - ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionRequest.SerializeToString, - ('google.datastore.v1.Datastore', 'Commit'): CommitRequest.SerializeToString, - ('google.datastore.v1.Datastore', 'Lookup'): LookupRequest.SerializeToString, - ('google.datastore.v1.Datastore', 'ReserveIds'): ReserveIdsRequest.SerializeToString, - ('google.datastore.v1.Datastore', 'Rollback'): RollbackRequest.SerializeToString, - ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryRequest.SerializeToString, - } - response_deserializers = { - ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsResponse.FromString, - ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionResponse.FromString, - ('google.datastore.v1.Datastore', 'Commit'): CommitResponse.FromString, - ('google.datastore.v1.Datastore', 'Lookup'): LookupResponse.FromString, - ('google.datastore.v1.Datastore', 'ReserveIds'): ReserveIdsResponse.FromString, - ('google.datastore.v1.Datastore', 'Rollback'): RollbackResponse.FromString, - ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryResponse.FromString, - } - cardinalities = { - 'AllocateIds': cardinality.Cardinality.UNARY_UNARY, - 'BeginTransaction': cardinality.Cardinality.UNARY_UNARY, - 'Commit': cardinality.Cardinality.UNARY_UNARY, - 'Lookup': cardinality.Cardinality.UNARY_UNARY, - 'ReserveIds': cardinality.Cardinality.UNARY_UNARY, - 'Rollback': cardinality.Cardinality.UNARY_UNARY, - 'RunQuery': cardinality.Cardinality.UNARY_UNARY, - } - stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) - return beta_implementations.dynamic_stub(channel, 'google.datastore.v1.Datastore', cardinalities, options=stub_options) -except ImportError: - pass + +_DATASTORE = _descriptor.ServiceDescriptor( + name='Datastore', + full_name='google.datastore.v1.Datastore', + file=DESCRIPTOR, + index=0, + options=None, + serialized_start=2567, + serialized_end=3571, + methods=[ + _descriptor.MethodDescriptor( + name='Lookup', + full_name='google.datastore.v1.Datastore.Lookup', + index=0, + containing_service=None, + input_type=_LOOKUPREQUEST, + output_type=_LOOKUPRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002%\" /v1/projects/{project_id}:lookup:\001*')), + ), + _descriptor.MethodDescriptor( + name='RunQuery', + full_name='google.datastore.v1.Datastore.RunQuery', + index=1, + containing_service=None, + input_type=_RUNQUERYREQUEST, + output_type=_RUNQUERYRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\'\"\"/v1/projects/{project_id}:runQuery:\001*')), + ), + _descriptor.MethodDescriptor( + name='BeginTransaction', + full_name='google.datastore.v1.Datastore.BeginTransaction', + index=2, + containing_service=None, + input_type=_BEGINTRANSACTIONREQUEST, + output_type=_BEGINTRANSACTIONRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002/\"*/v1/projects/{project_id}:beginTransaction:\001*')), + ), + _descriptor.MethodDescriptor( + name='Commit', + full_name='google.datastore.v1.Datastore.Commit', + index=3, + containing_service=None, + input_type=_COMMITREQUEST, + output_type=_COMMITRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002%\" /v1/projects/{project_id}:commit:\001*')), + ), + _descriptor.MethodDescriptor( + name='Rollback', + full_name='google.datastore.v1.Datastore.Rollback', + index=4, + containing_service=None, + input_type=_ROLLBACKREQUEST, + output_type=_ROLLBACKRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\'\"\"/v1/projects/{project_id}:rollback:\001*')), + ), + _descriptor.MethodDescriptor( + name='AllocateIds', + full_name='google.datastore.v1.Datastore.AllocateIds', + index=5, + containing_service=None, + input_type=_ALLOCATEIDSREQUEST, + output_type=_ALLOCATEIDSRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002*\"%/v1/projects/{project_id}:allocateIds:\001*')), + ), + _descriptor.MethodDescriptor( + name='ReserveIds', + full_name='google.datastore.v1.Datastore.ReserveIds', + index=6, + containing_service=None, + input_type=_RESERVEIDSREQUEST, + output_type=_RESERVEIDSRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002)\"$/v1/projects/{project_id}:reserveIds:\001*')), + ), +]) +_sym_db.RegisterServiceDescriptor(_DATASTORE) + +DESCRIPTOR.services_by_name['Datastore'] = _DATASTORE + # @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2_grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2_grpc.py index 32d35c2c0a9a..5209ca6e146d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2_grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2_grpc.py @@ -1,7 +1,7 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc -import google.cloud.datastore_v1.proto.datastore_pb2 as google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2 +from google.cloud.datastore_v1.proto import datastore_pb2 as google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2 class DatastoreStub(object): diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py index b196fc6879cb..c92eaf1848b5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py @@ -26,7 +26,6 @@ serialized_pb=_b('\n,google/cloud/datastore_v1/proto/entity.proto\x12\x13google.datastore.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x18google/type/latlng.proto\"7\n\x0bPartitionId\x12\x12\n\nproject_id\x18\x02 \x01(\t\x12\x14\n\x0cnamespace_id\x18\x04 \x01(\t\"\xb7\x01\n\x03Key\x12\x36\n\x0cpartition_id\x18\x01 \x01(\x0b\x32 .google.datastore.v1.PartitionId\x12\x32\n\x04path\x18\x02 \x03(\x0b\x32$.google.datastore.v1.Key.PathElement\x1a\x44\n\x0bPathElement\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x0c\n\x02id\x18\x02 \x01(\x03H\x00\x12\x0e\n\x04name\x18\x03 \x01(\tH\x00\x42\t\n\x07id_type\"8\n\nArrayValue\x12*\n\x06values\x18\x01 \x03(\x0b\x32\x1a.google.datastore.v1.Value\"\xf1\x03\n\x05Value\x12\x30\n\nnull_value\x18\x0b \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x17\n\rboolean_value\x18\x01 \x01(\x08H\x00\x12\x17\n\rinteger_value\x18\x02 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x03 \x01(\x01H\x00\x12\x35\n\x0ftimestamp_value\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12-\n\tkey_value\x18\x05 \x01(\x0b\x32\x18.google.datastore.v1.KeyH\x00\x12\x16\n\x0cstring_value\x18\x11 \x01(\tH\x00\x12\x14\n\nblob_value\x18\x12 \x01(\x0cH\x00\x12.\n\x0fgeo_point_value\x18\x08 \x01(\x0b\x32\x13.google.type.LatLngH\x00\x12\x33\n\x0c\x65ntity_value\x18\x06 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12\x36\n\x0b\x61rray_value\x18\t \x01(\x0b\x32\x1f.google.datastore.v1.ArrayValueH\x00\x12\x0f\n\x07meaning\x18\x0e \x01(\x05\x12\x1c\n\x14\x65xclude_from_indexes\x18\x13 \x01(\x08\x42\x0c\n\nvalue_type\"\xbf\x01\n\x06\x45ntity\x12%\n\x03key\x18\x01 \x01(\x0b\x32\x18.google.datastore.v1.Key\x12?\n\nproperties\x18\x03 \x03(\x0b\x32+.google.datastore.v1.Entity.PropertiesEntry\x1aM\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.Value:\x02\x38\x01\x42\x9e\x01\n\x17\x63om.google.datastore.v1B\x0b\x45ntityProtoP\x01Z\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32#.google.datastore.v1.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type\"\xa9\x01\n\x0f\x43ompositeFilter\x12\x39\n\x02op\x18\x01 \x01(\x0e\x32-.google.datastore.v1.CompositeFilter.Operator\x12,\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x1b.google.datastore.v1.Filter\"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\"\xc7\x02\n\x0ePropertyFilter\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x38\n\x02op\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyFilter.Operator\x12)\n\x05value\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.Value\"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b\"\xa5\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12H\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x30.google.datastore.v1.GqlQuery.NamedBindingsEntry\x12\x43\n\x13positional_bindings\x18\x04 \x03(\x0b\x32&.google.datastore.v1.GqlQueryParameter\x1a\\\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.datastore.v1.GqlQueryParameter:\x02\x38\x01\"d\n\x11GqlQueryParameter\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type\"\xde\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12H\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32,.google.datastore.v1.EntityResult.ResultType\x12\x39\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12K\n\x0cmore_results\x18\x05 \x01(\x0e\x32\x35.google.datastore.v1.QueryResultBatch.MoreResultsType\x12\x18\n\x10snapshot_version\x18\x07 \x01(\x03\"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42\x9d\x01\n\x17\x63om.google.datastore.v1B\nQueryProtoP\x01Z Date: Wed, 10 Oct 2018 11:04:44 -0700 Subject: [PATCH 187/611] Use new Nox (#6175) --- .../{nox.py => noxfile.py} | 47 +++++-------------- 1 file changed, 12 insertions(+), 35 deletions(-) rename packages/google-cloud-datastore/{nox.py => noxfile.py} (83%) diff --git a/packages/google-cloud-datastore/nox.py b/packages/google-cloud-datastore/noxfile.py similarity index 83% rename from packages/google-cloud-datastore/nox.py rename to packages/google-cloud-datastore/noxfile.py index 79d04e1f16ba..39b4eef9c890 100644 --- a/packages/google-cloud-datastore/nox.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -25,7 +25,6 @@ ) -@nox.session def default(session): """Default unit test session. @@ -55,35 +54,20 @@ def default(session): ) -@nox.session -@nox.parametrize('py', ['2.7', '3.5', '3.6', '3.7']) -def unit(session, py): +@nox.session(python=['2.7', '3.5', '3.6', '3.7']) +def unit(session): """Run the unit test suite.""" - - # Run unit tests against all supported versions of Python. - session.interpreter = 'python{}'.format(py) - - # Set the virtualenv dirname. - session.virtualenv_dirname = 'unit-' + py - default(session) -@nox.session -@nox.parametrize('py', ['2.7', '3.6']) -def system(session, py): +@nox.session(python=['2.7', '3.6']) +def system(session): """Run the system test suite.""" # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): session.skip('Credentials must be set via environment variable.') - # Run the system tests against latest Python 2 and Python 3 only. - session.interpreter = 'python{}'.format(py) - - # Set the virtualenv dirname. - session.virtualenv_dirname = 'sys-' + py - # Use pre-release gRPC for system tests. session.install('--pre', 'grpcio') @@ -98,19 +82,18 @@ def system(session, py): session.run('py.test', '--quiet', 'tests/system', *session.posargs) -@nox.session +@nox.session(python='3.6') def doctests(session): """Run the system test suite.""" + # Doctests run against Python 3.6 only. + # It is difficult to make doctests run against both Python 2 and Python 3 + # because they test string output equivalence, which is difficult to + # make match (e.g. unicode literals starting with "u"). # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): session.skip('Credentials must be set via environment variable.') - # Doctests run against Python 3.6 only. - # It is difficult to make doctests run against both Python 2 and Python 3 - # because they test string output equivalence, which is difficult to - # make match (e.g. unicode literals starting with "u"). - session.interpreter = 'python3.6' # Install all test dependencies, then install this package into the # virtualenv's dist-packages. @@ -124,40 +107,34 @@ def doctests(session): session.run('py.test', '--quiet', 'tests/doctests.py') -@nox.session +@nox.session(python='3.6') def lint(session): """Run linters. Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ - session.interpreter = 'python3.6' session.install('flake8', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google', 'tests') -@nox.session +@nox.session(python='3.6') def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" - session.interpreter = 'python3.6' - - # Set the virtualenv dirname. - session.virtualenv_dirname = 'setup' session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') -@nox.session +@nox.session(python='3.6') def cover(session): """Run the final coverage report. This outputs the coverage report aggregating coverage from the unit test runs (not system test runs), and then erases coverage data. """ - session.interpreter = 'python3.6' session.install('coverage', 'pytest-cov') session.run('coverage', 'report', '--show-missing', '--fail-under=100') session.run('coverage', 'erase') From af33ac6b71d04985f862068aba6e4aadf60a7561 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 17 Oct 2018 12:51:09 -0400 Subject: [PATCH 188/611] Docs: normalize use of support level badges (#6159) * Remove badges for deprecated umbrella 'google-cloud' package. * Clarify support levels. - Add explicit section to support linking from sub-package README badges. - Move explanatory text for a support level above the list of packages at that level. * Normalize use of support-level badges in READMEs. - Note that 'error_reporting/README.rst' and 'monitoring/README.rst' are undergoing other edits; they are left out here to avoid conflicts. * Use 'General Avaialblity' for support level. Fix linkx in related API READMEs. * Fix links for alpha support in API READMEs. * Fix links for beta support in API READMEs. --- packages/google-cloud-datastore/README.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index 869bd2d3e176..575585f756e9 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -1,7 +1,7 @@ Python Client for Google Cloud Datastore ======================================== -|pypi| |versions| +|GA| |pypi| |versions| `Google Cloud Datastore API`_ is a fully managed, schemaless database for storing non-relational data. Cloud Datastore automatically scales with your @@ -12,6 +12,8 @@ all other queries. - `Client Library Documentation`_ - `Product Documentation`_ +.. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#general-availability .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-datastore.svg :target: https://pypi.org/project/google-cloud-datastore/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-datastore.svg From 8383a99ad57f5c231f112ed5e60d846020b07340 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Fri, 26 Oct 2018 16:26:54 -0700 Subject: [PATCH 189/611] Datastore: propagate empty arrays in entity values (#6285) * Fix #6284 by handling empty arrays in entity to pb --- .../google/cloud/datastore/helpers.py | 14 ++++++++------ .../tests/system/test_system.py | 12 ++++++++++++ .../tests/unit/test_batch.py | 6 +++--- .../tests/unit/test_helpers.py | 6 +++++- 4 files changed, 28 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index 964eb2a37204..38c4b0ddfdf6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -212,8 +212,6 @@ def entity_to_protobuf(entity): for name, value in entity.items(): value_is_list = isinstance(value, list) - if value_is_list and len(value) == 0: - continue value_pb = _new_value_pb(entity_pb, name) # Set the appropriate value. @@ -453,10 +451,14 @@ def _set_protobuf_value(value_pb, val): entity_pb = entity_to_protobuf(val) value_pb.entity_value.CopyFrom(entity_pb) elif attr == 'array_value': - l_pb = value_pb.array_value.values - for item in val: - i_pb = l_pb.add() - _set_protobuf_value(i_pb, item) + if len(val) == 0: + array_value = entity_pb2.ArrayValue(values=[]) + value_pb.array_value.CopyFrom(array_value) + else: + l_pb = value_pb.array_value.values + for item in val: + i_pb = l_pb.add() + _set_protobuf_value(i_pb, item) elif attr == 'geo_point_value': value_pb.geo_point_value.CopyFrom(val) else: # scalar, just assign diff --git a/packages/google-cloud-datastore/tests/system/test_system.py b/packages/google-cloud-datastore/tests/system/test_system.py index c73b2a80cc17..2a1080b628e5 100644 --- a/packages/google-cloud-datastore/tests/system/test_system.py +++ b/packages/google-cloud-datastore/tests/system/test_system.py @@ -547,3 +547,15 @@ def test_failure_with_contention(self): # transaction. entity_in_txn[contention_prop_name] = u'inside' txn.put(entity_in_txn) + + def test_empty_array_put(self): + local_client = clone_client(Config.CLIENT) + + key = local_client.key('EmptyArray', 1234) + local_client = datastore.Client() + entity = datastore.Entity(key=key) + entity['children'] = [] + local_client.put(entity) + retrieved = local_client.get(entity.key) + + self.assertEqual(entity['children'], retrieved['children']) diff --git a/packages/google-cloud-datastore/tests/unit/test_batch.py b/packages/google-cloud-datastore/tests/unit/test_batch.py index 9b854141e553..4b94b657e9d0 100644 --- a/packages/google-cloud-datastore/tests/unit/test_batch.py +++ b/packages/google-cloud-datastore/tests/unit/test_batch.py @@ -119,7 +119,7 @@ def test_put_entity_w_completed_key(self): 'foo': 'bar', 'baz': 'qux', 'spam': [1, 2, 3], - 'frotz': [], # will be ignored + 'frotz': [], } client = _Client(project) batch = self._make_one(client) @@ -134,7 +134,7 @@ def test_put_entity_w_completed_key(self): self.assertEqual(mutated_entity.key, key._key) prop_dict = dict(_property_tuples(mutated_entity)) - self.assertEqual(len(prop_dict), 3) + self.assertEqual(len(prop_dict), 4) self.assertFalse(prop_dict['foo'].exclude_from_indexes) self.assertTrue(prop_dict['baz'].exclude_from_indexes) self.assertFalse(prop_dict['spam'].exclude_from_indexes) @@ -142,7 +142,7 @@ def test_put_entity_w_completed_key(self): self.assertTrue(spam_values[0].exclude_from_indexes) self.assertTrue(spam_values[1].exclude_from_indexes) self.assertTrue(spam_values[2].exclude_from_indexes) - self.assertFalse('frotz' in prop_dict) + self.assertTrue('frotz' in prop_dict) def test_delete_wrong_status(self): project = 'PROJECT' diff --git a/packages/google-cloud-datastore/tests/unit/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py index 91a693455b7d..f4338812be54 100644 --- a/packages/google-cloud-datastore/tests/unit/test_helpers.py +++ b/packages/google-cloud-datastore/tests/unit/test_helpers.py @@ -294,7 +294,11 @@ def test_with_empty_list(self): entity['foo'] = [] entity_pb = self._call_fut(entity) - self._compare_entity_proto(entity_pb, entity_pb2.Entity()) + expected_pb = entity_pb2.Entity() + prop = expected_pb.properties.get_or_create('foo') + prop.array_value.CopyFrom(entity_pb2.ArrayValue(values=[])) + + self._compare_entity_proto(entity_pb, expected_pb) def test_inverts_to_protobuf(self): from google.cloud.datastore_v1.proto import entity_pb2 From 2973e425299d0e586a83a4bf31d7fb53cfa4693e Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Mon, 29 Oct 2018 11:21:19 -0700 Subject: [PATCH 190/611] Release datastore 1.7.1 (#6325) --- packages/google-cloud-datastore/CHANGELOG.md | 19 +++++++++++++++++++ packages/google-cloud-datastore/setup.py | 2 +- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 1c7a1494ae44..57b311b9af96 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,25 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## 1.7.1 + +10-29-2018 10:38 PDT + +### Implementation Changes +- Propagate empty arrays in entity values. ([#6285](https://github.com/googleapis/google-cloud-python/pull/6285)) +- Expose 'Client.base_url' property to allow alternate endpoints. ([#5821](https://github.com/googleapis/google-cloud-python/pull/5821)) + +### Documentation +- Normalize use of support level badges ([#6159](https://github.com/googleapis/google-cloud-python/pull/6159)) +- Redirect renamed 'usage.html'/'client.html' -> 'index.html'. ([#5996](https://github.com/googleapis/google-cloud-python/pull/5996)) +- Replace links to '/stable/' with '/latest/'. ([#5901](https://github.com/googleapis/google-cloud-python/pull/5901)) + +### Internal / Testing Changes +- Use new Nox ([#6175](https://github.com/googleapis/google-cloud-python/pull/6175)) +- Add 'synth.py'. ([#6078](https://github.com/googleapis/google-cloud-python/pull/6078)) +- Prep datastore docs for repo split. ([#5919](https://github.com/googleapis/google-cloud-python/pull/5919)) +- Use inplace installs under `nox` ([#5865](https://github.com/googleapis/google-cloud-python/pull/5865)) + ## 1.7.0 ### Implementation Changes diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index d53e4fbdc4ad..1804121dd36f 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-datastore' description = 'Google Cloud Datastore API client library' -version = '1.7.0' +version = '1.7.1' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 9e8a7ed4efcee2f30f1e1dca3666dc08694bb1ac Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 6 Nov 2018 08:11:54 -0800 Subject: [PATCH 191/611] Fix client_info bug, update docstrings. (#6409) --- .../datastore_v1/gapic/datastore_client.py | 57 +++++++++++-------- .../google/cloud/datastore_v1/gapic/enums.py | 22 +++---- 2 files changed, 46 insertions(+), 33 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py index 404fc0c8a8b8..4da4d60425b4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py @@ -145,9 +145,10 @@ def __init__(self, ) if client_info is None: - client_info = ( - google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) - client_info.gapic_version = _GAPIC_LIBRARY_VERSION + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION, ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info # Parse out the default settings for retry and timeout for each RPC @@ -179,10 +180,10 @@ def lookup(self, >>> >>> client = datastore_v1.DatastoreClient() >>> - >>> # TODO: Initialize ``project_id``: + >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> - >>> # TODO: Initialize ``keys``: + >>> # TODO: Initialize `keys`: >>> keys = [] >>> >>> response = client.lookup(project_id, keys) @@ -190,9 +191,11 @@ def lookup(self, Args: project_id (str): The ID of the project against which to make the request. keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): Keys of entities to look up. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.Key` read_options (Union[dict, ~google.cloud.datastore_v1.types.ReadOptions]): The options for this lookup request. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.ReadOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -249,10 +252,10 @@ def run_query(self, >>> >>> client = datastore_v1.DatastoreClient() >>> - >>> # TODO: Initialize ``project_id``: + >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> - >>> # TODO: Initialize ``partition_id``: + >>> # TODO: Initialize `partition_id`: >>> partition_id = {} >>> >>> response = client.run_query(project_id, partition_id) @@ -263,15 +266,19 @@ def run_query(self, Queries are scoped to a single partition. This partition ID is normalized with the standard default context partition ID. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.PartitionId` read_options (Union[dict, ~google.cloud.datastore_v1.types.ReadOptions]): The options for this query. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.ReadOptions` query (Union[dict, ~google.cloud.datastore_v1.types.Query]): The query to run. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.Query` gql_query (Union[dict, ~google.cloud.datastore_v1.types.GqlQuery]): The GQL query to run. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.GqlQuery` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -334,7 +341,7 @@ def begin_transaction(self, >>> >>> client = datastore_v1.DatastoreClient() >>> - >>> # TODO: Initialize ``project_id``: + >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> response = client.begin_transaction(project_id) @@ -342,6 +349,7 @@ def begin_transaction(self, Args: project_id (str): The ID of the project against which to make the request. transaction_options (Union[dict, ~google.cloud.datastore_v1.types.TransactionOptions]): Options for a new transaction. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.TransactionOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -400,13 +408,13 @@ def commit(self, >>> >>> client = datastore_v1.DatastoreClient() >>> - >>> # TODO: Initialize ``project_id``: + >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> - >>> # TODO: Initialize ``mode``: + >>> # TODO: Initialize `mode`: >>> mode = enums.CommitRequest.Mode.MODE_UNSPECIFIED >>> - >>> # TODO: Initialize ``mutations``: + >>> # TODO: Initialize `mutations`: >>> mutations = [] >>> >>> response = client.commit(project_id, mode, mutations) @@ -417,16 +425,17 @@ def commit(self, mutations (list[Union[dict, ~google.cloud.datastore_v1.types.Mutation]]): The mutations to perform. When mode is ``TRANSACTIONAL``, mutations affecting a single entity are - applied in order. The following sequences of mutations affecting a single - entity are not permitted in a single ``Commit`` request: + applied in order. The following sequences of mutations affecting a + single entity are not permitted in a single ``Commit`` request: - - ``insert`` followed by ``insert`` - - ``update`` followed by ``insert`` - - ``upsert`` followed by ``insert`` - - ``delete`` followed by ``update`` + - ``insert`` followed by ``insert`` + - ``update`` followed by ``insert`` + - ``upsert`` followed by ``insert`` + - ``delete`` followed by ``update`` When mode is ``NON_TRANSACTIONAL``, no two mutations may affect a single entity. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.Mutation` transaction (bytes): The identifier of the transaction associated with the commit. A @@ -488,10 +497,10 @@ def rollback(self, >>> >>> client = datastore_v1.DatastoreClient() >>> - >>> # TODO: Initialize ``project_id``: + >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> - >>> # TODO: Initialize ``transaction``: + >>> # TODO: Initialize `transaction`: >>> transaction = b'' >>> >>> response = client.rollback(project_id, transaction) @@ -551,10 +560,10 @@ def allocate_ids(self, >>> >>> client = datastore_v1.DatastoreClient() >>> - >>> # TODO: Initialize ``project_id``: + >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> - >>> # TODO: Initialize ``keys``: + >>> # TODO: Initialize `keys`: >>> keys = [] >>> >>> response = client.allocate_ids(project_id, keys) @@ -563,6 +572,7 @@ def allocate_ids(self, project_id (str): The ID of the project against which to make the request. keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): A list of keys with incomplete key paths for which to allocate IDs. No key may be reserved/read-only. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.Key` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -618,10 +628,10 @@ def reserve_ids(self, >>> >>> client = datastore_v1.DatastoreClient() >>> - >>> # TODO: Initialize ``project_id``: + >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> - >>> # TODO: Initialize ``keys``: + >>> # TODO: Initialize `keys`: >>> keys = [] >>> >>> response = client.reserve_ids(project_id, keys) @@ -630,6 +640,7 @@ def reserve_ids(self, project_id (str): The ID of the project against which to make the request. keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): A list of keys with complete key paths whose numeric IDs should not be auto-allocated. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.Key` database_id (str): If not empty, the ID of the database against which to make the request. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py index 299a20ff87af..41497f71781b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py @@ -20,10 +20,10 @@ class NullValue(enum.IntEnum): """ - ``NullValue`` is a singleton enumeration to represent the null value for the - ``Value`` type union. + ``NullValue`` is a singleton enumeration to represent the null value for + the ``Value`` type union. - The JSON representation for ``NullValue`` is JSON ``null``. + The JSON representation for ``NullValue`` is JSON ``null``. Attributes: NULL_VALUE (int): Null value. @@ -34,11 +34,12 @@ class NullValue(enum.IntEnum): class EntityResult(object): class ResultType(enum.IntEnum): """ - Specifies what data the 'entity' field contains. - A ``ResultType`` is either implied (for example, in ``LookupResponse.missing`` - from ``datastore.proto``, it is always ``KEY_ONLY``) or specified by context - (for example, in message ``QueryResultBatch``, field ``entity_result_type`` - specifies a ``ResultType`` for all the values in field ``entity_results``). + Specifies what data the 'entity' field contains. A ``ResultType`` is + either implied (for example, in ``LookupResponse.missing`` from + ``datastore.proto``, it is always ``KEY_ONLY``) or specified by context + (for example, in message ``QueryResultBatch``, field + ``entity_result_type`` specifies a ``ResultType`` for all the values in + field ``entity_results``). Attributes: RESULT_TYPE_UNSPECIFIED (int): Unspecified. This value is never used. @@ -130,8 +131,9 @@ class Mode(enum.IntEnum): Attributes: MODE_UNSPECIFIED (int): Unspecified. This value must not be used. - TRANSACTIONAL (int): Transactional: The mutations are either all applied, or none are applied. - Learn about transactions `here `_. + TRANSACTIONAL (int): Transactional: The mutations are either all applied, or none are + applied. Learn about transactions + `here `__. NON_TRANSACTIONAL (int): Non-transactional: The mutations may not apply as all or none. """ MODE_UNSPECIFIED = 0 From c1ec820409bc49f1ef3e8a840a4ad5640eb1df25 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Nov 2018 14:03:35 -0500 Subject: [PATCH 192/611] Bump minimum 'api_core' version for all GAPIC libs to 1.4.1. (#6391) Closes #6390. --- packages/google-cloud-datastore/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 1804121dd36f..7cf1a2491e6d 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -29,8 +29,8 @@ # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ - 'google-cloud-core<0.29dev,>=0.28.0', - 'google-api-core[grpc]<2.0.0dev,>=1.0.0', + 'google-api-core[grpc] >= 1.4.1, < 2.0.0dev', + 'google-cloud-core >=0.28.0, <0.29dev', ] extras = { } From dffa24fde6152ec675a475fcc3aa5346ab5ab90e Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Wed, 14 Nov 2018 10:27:28 -0800 Subject: [PATCH 193/611] Pick up fixes in GAPIC generator. (#6494) Includes fixes from these PRs: - https://github.com/googleapis/gapic-generator/pull/2407 - https://github.com/googleapis/gapic-generator/pull/2396 --- .../datastore_v1/gapic/datastore_client.py | 17 +++-- .../google/cloud/datastore_v1/gapic/enums.py | 69 ++++++++----------- .../transports/datastore_grpc_transport.py | 11 +++ 3 files changed, 51 insertions(+), 46 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py index 4da4d60425b4..d1ebd3c925bf 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py @@ -80,7 +80,7 @@ def __init__(self, transport=None, channel=None, credentials=None, - client_config=datastore_client_config.config, + client_config=None, client_info=None): """Constructor. @@ -113,13 +113,20 @@ def __init__(self, your own client library. """ # Raise deprecation warnings for things we want to go away. - if client_config: - warnings.warn('The `client_config` argument is deprecated.', - PendingDeprecationWarning) + if client_config is not None: + warnings.warn( + 'The `client_config` argument is deprecated.', + PendingDeprecationWarning, + stacklevel=2) + else: + client_config = datastore_client_config.config + if channel: warnings.warn( 'The `channel` argument is deprecated; use ' - '`transport` instead.', PendingDeprecationWarning) + '`transport` instead.', + PendingDeprecationWarning, + stacklevel=2) # Instantiate the transport. # The transport is responsible for handling serialization and diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py index 41497f71781b..7aa90c136e1c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py @@ -18,17 +18,36 @@ import enum -class NullValue(enum.IntEnum): - """ - ``NullValue`` is a singleton enumeration to represent the null value for - the ``Value`` type union. +class CommitRequest(object): + class Mode(enum.IntEnum): + """ + The modes available for commits. + + Attributes: + MODE_UNSPECIFIED (int): Unspecified. This value must not be used. + TRANSACTIONAL (int): Transactional: The mutations are either all applied, or none are + applied. Learn about transactions + `here `__. + NON_TRANSACTIONAL (int): Non-transactional: The mutations may not apply as all or none. + """ + MODE_UNSPECIFIED = 0 + TRANSACTIONAL = 1 + NON_TRANSACTIONAL = 2 - The JSON representation for ``NullValue`` is JSON ``null``. - Attributes: - NULL_VALUE (int): Null value. - """ - NULL_VALUE = 0 +class ReadOptions(object): + class ReadConsistency(enum.IntEnum): + """ + The possible values for read consistencies. + + Attributes: + READ_CONSISTENCY_UNSPECIFIED (int): Unspecified. This value must not be used. + STRONG (int): Strong consistency. + EVENTUAL (int): Eventual consistency. + """ + READ_CONSISTENCY_UNSPECIFIED = 0 + STRONG = 1 + EVENTUAL = 2 class EntityResult(object): @@ -122,35 +141,3 @@ class MoreResultsType(enum.IntEnum): MORE_RESULTS_AFTER_LIMIT = 2 MORE_RESULTS_AFTER_CURSOR = 4 NO_MORE_RESULTS = 3 - - -class CommitRequest(object): - class Mode(enum.IntEnum): - """ - The modes available for commits. - - Attributes: - MODE_UNSPECIFIED (int): Unspecified. This value must not be used. - TRANSACTIONAL (int): Transactional: The mutations are either all applied, or none are - applied. Learn about transactions - `here `__. - NON_TRANSACTIONAL (int): Non-transactional: The mutations may not apply as all or none. - """ - MODE_UNSPECIFIED = 0 - TRANSACTIONAL = 1 - NON_TRANSACTIONAL = 2 - - -class ReadOptions(object): - class ReadConsistency(enum.IntEnum): - """ - The possible values for read consistencies. - - Attributes: - READ_CONSISTENCY_UNSPECIFIED (int): Unspecified. This value must not be used. - STRONG (int): Strong consistency. - EVENTUAL (int): Eventual consistency. - """ - READ_CONSISTENCY_UNSPECIFIED = 0 - STRONG = 1 - EVENTUAL = 2 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py index af1db245878b..3f0e8ef0cd13 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py @@ -65,6 +65,8 @@ def __init__(self, credentials=credentials, ) + self._channel = channel + # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { @@ -94,6 +96,15 @@ def create_channel(cls, scopes=cls._OAUTH_SCOPES, ) + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + @property def lookup(self): """Return the gRPC stub for {$apiMethod.name}. From 97d273ea050fe9ee4d8d2f456cd8035d7c78ebc6 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Mon, 19 Nov 2018 11:25:13 -0800 Subject: [PATCH 194/611] Add synth metadata. (#6564) --- .../google-cloud-datastore/synth.metadata | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 packages/google-cloud-datastore/synth.metadata diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata new file mode 100644 index 000000000000..f26e69af8847 --- /dev/null +++ b/packages/google-cloud-datastore/synth.metadata @@ -0,0 +1,27 @@ +{ + "sources": [ + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "5a57f0c13a358b2b15452bf2d67453774a5f6d4f", + "internalRef": "221837528" + } + }, + { + "git": { + "name": "googleapis-private", + "remote": "https://github.com/googleapis/googleapis-private.git", + "sha": "6aa8e1a447bb8d0367150356a28cb4d3f2332641", + "internalRef": "221340946" + } + }, + { + "generator": { + "name": "artman", + "version": "0.16.0", + "dockerImage": "googleapis/artman@sha256:90f9d15e9bad675aeecd586725bce48f5667ffe7d5fc4d1e96d51ff34304815b" + } + } + ] +} \ No newline at end of file From 5fc8da59cb4bb9b0a23d21782d469e0dc97bb11e Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Wed, 21 Nov 2018 07:25:37 -0800 Subject: [PATCH 195/611] Pick up enum fixes in the GAPIC generator. (#6610) --- .../google/cloud/datastore_v1/gapic/enums.py | 69 +++++++++++-------- 1 file changed, 41 insertions(+), 28 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py index 7aa90c136e1c..41497f71781b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py @@ -18,36 +18,17 @@ import enum -class CommitRequest(object): - class Mode(enum.IntEnum): - """ - The modes available for commits. - - Attributes: - MODE_UNSPECIFIED (int): Unspecified. This value must not be used. - TRANSACTIONAL (int): Transactional: The mutations are either all applied, or none are - applied. Learn about transactions - `here `__. - NON_TRANSACTIONAL (int): Non-transactional: The mutations may not apply as all or none. - """ - MODE_UNSPECIFIED = 0 - TRANSACTIONAL = 1 - NON_TRANSACTIONAL = 2 +class NullValue(enum.IntEnum): + """ + ``NullValue`` is a singleton enumeration to represent the null value for + the ``Value`` type union. + The JSON representation for ``NullValue`` is JSON ``null``. -class ReadOptions(object): - class ReadConsistency(enum.IntEnum): - """ - The possible values for read consistencies. - - Attributes: - READ_CONSISTENCY_UNSPECIFIED (int): Unspecified. This value must not be used. - STRONG (int): Strong consistency. - EVENTUAL (int): Eventual consistency. - """ - READ_CONSISTENCY_UNSPECIFIED = 0 - STRONG = 1 - EVENTUAL = 2 + Attributes: + NULL_VALUE (int): Null value. + """ + NULL_VALUE = 0 class EntityResult(object): @@ -141,3 +122,35 @@ class MoreResultsType(enum.IntEnum): MORE_RESULTS_AFTER_LIMIT = 2 MORE_RESULTS_AFTER_CURSOR = 4 NO_MORE_RESULTS = 3 + + +class CommitRequest(object): + class Mode(enum.IntEnum): + """ + The modes available for commits. + + Attributes: + MODE_UNSPECIFIED (int): Unspecified. This value must not be used. + TRANSACTIONAL (int): Transactional: The mutations are either all applied, or none are + applied. Learn about transactions + `here `__. + NON_TRANSACTIONAL (int): Non-transactional: The mutations may not apply as all or none. + """ + MODE_UNSPECIFIED = 0 + TRANSACTIONAL = 1 + NON_TRANSACTIONAL = 2 + + +class ReadOptions(object): + class ReadConsistency(enum.IntEnum): + """ + The possible values for read consistencies. + + Attributes: + READ_CONSISTENCY_UNSPECIFIED (int): Unspecified. This value must not be used. + STRONG (int): Strong consistency. + EVENTUAL (int): Eventual consistency. + """ + READ_CONSISTENCY_UNSPECIFIED = 0 + STRONG = 1 + EVENTUAL = 2 From 312bb416c263239759ca16a0578488c6a7a04436 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 28 Nov 2018 13:55:23 -0800 Subject: [PATCH 196/611] Add templates for flake8, coveragerc, noxfile, and black. (#6642) --- packages/google-cloud-datastore/.coveragerc | 9 +- packages/google-cloud-datastore/.flake8 | 1 + packages/google-cloud-datastore/MANIFEST.in | 3 +- packages/google-cloud-datastore/noxfile.py | 176 ++++++++++---------- packages/google-cloud-datastore/synth.py | 27 +-- 5 files changed, 115 insertions(+), 101 deletions(-) diff --git a/packages/google-cloud-datastore/.coveragerc b/packages/google-cloud-datastore/.coveragerc index 1596e4637d3f..51fec440cebf 100644 --- a/packages/google-cloud-datastore/.coveragerc +++ b/packages/google-cloud-datastore/.coveragerc @@ -2,8 +2,6 @@ branch = True [report] -omit = - _app_engine_key_pb2.py fail_under = 100 show_missing = True exclude_lines = @@ -11,3 +9,10 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ + # Ignore abstract methods + raise NotImplementedError +omit = + */gapic/*.py + */proto/*.py + */google-cloud-python/core/*.py + */site-packages/*.py \ No newline at end of file diff --git a/packages/google-cloud-datastore/.flake8 b/packages/google-cloud-datastore/.flake8 index 1f44a90f8195..61766fa84d02 100644 --- a/packages/google-cloud-datastore/.flake8 +++ b/packages/google-cloud-datastore/.flake8 @@ -1,4 +1,5 @@ [flake8] +ignore = E203, E266, E501, W503 exclude = # Exclude generated code. **/proto/** diff --git a/packages/google-cloud-datastore/MANIFEST.in b/packages/google-cloud-datastore/MANIFEST.in index fc77f8c82ff0..9cbf175afe6b 100644 --- a/packages/google-cloud-datastore/MANIFEST.in +++ b/packages/google-cloud-datastore/MANIFEST.in @@ -1,4 +1,5 @@ include README.rst LICENSE recursive-include google *.json *.proto recursive-include tests * -global-exclude *.pyc __pycache__ +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 39b4eef9c890..a9efc0e344ce 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -1,10 +1,12 @@ -# Copyright 2016 Google LLC +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,128 +15,126 @@ # limitations under the License. from __future__ import absolute_import - import os import nox -LOCAL_DEPS = ( - os.path.join('..', 'api_core'), - os.path.join('..', 'core'), -) +LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) +@nox.session(python="3.7") +def blacken(session): + """Run black. -def default(session): - """Default unit test session. + Format code to uniform standard. + """ + session.install("black") + session.run( + "black", + "google", + "tests", + "docs", + "--exclude", + ".*/proto/.*|.*/gapic/.*|.*/.*_pb2.py", + ) - This is intended to be run **without** an interpreter set, so - that the current ``python`` (on the ``PATH``) or the version of - Python corresponding to the ``nox`` binary the ``PATH`` can - run the tests. + +@nox.session(python="3.7") +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. """ - # Install all test dependencies, then install local packages in-place. - session.install('mock', 'pytest', 'pytest-cov') + session.install("flake8", "black", *LOCAL_DEPS) + session.run( + "black", + "--check", + "google", + "tests", + "docs", + "--exclude", + ".*/proto/.*|.*/gapic/.*|.*/.*_pb2.py", + ) + session.run("flake8", "google", "tests") + + +@nox.session(python="3.7") +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def default(session): + # Install all test dependencies, then install this package in-place. + session.install("mock", "pytest", "pytest-cov") for local_dep in LOCAL_DEPS: - session.install('-e', local_dep) - session.install('-e', '.') + session.install("-e", local_dep) + session.install("-e", ".") # Run py.test against the unit tests. session.run( - 'py.test', - '--quiet', - '--cov=google.cloud.datastore', - '--cov=tests.unit', - '--cov-append', - '--cov-config=.coveragerc', - '--cov-report=', - '--cov-fail-under=97', - os.path.join('tests', 'unit'), - *session.posargs + "py.test", + "--quiet", + "--cov=google.cloud", + "--cov=tests.unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=97", + os.path.join("tests", "unit"), + *session.posargs, ) -@nox.session(python=['2.7', '3.5', '3.6', '3.7']) +@nox.session(python=["2.7", "3.5", "3.6", "3.7"]) def unit(session): """Run the unit test suite.""" default(session) -@nox.session(python=['2.7', '3.6']) +@nox.session(python=["2.7", "3.7"]) def system(session): """Run the system test suite.""" - - # Sanity check: Only run system tests if the environment variable is set. - if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - session.skip('Credentials must be set via environment variable.') + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + # Sanity check: Only run tests if the environment variable is set. + if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): + session.skip("Credentials must be set via environment variable") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") # Use pre-release gRPC for system tests. - session.install('--pre', 'grpcio') - - # Install all test dependencies, then install local packages in-place. - session.install('mock', 'pytest') - for local_dep in LOCAL_DEPS: - session.install('-e', local_dep) - session.install('-e', '../test_utils/') - session.install('-e', '.') - - # Run py.test against the system tests. - session.run('py.test', '--quiet', 'tests/system', *session.posargs) - - -@nox.session(python='3.6') -def doctests(session): - """Run the system test suite.""" - # Doctests run against Python 3.6 only. - # It is difficult to make doctests run against both Python 2 and Python 3 - # because they test string output equivalence, which is difficult to - # make match (e.g. unicode literals starting with "u"). - - # Sanity check: Only run system tests if the environment variable is set. - if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - session.skip('Credentials must be set via environment variable.') - + session.install("--pre", "grpcio") # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install('mock', 'pytest', 'sphinx') + session.install("mock", "pytest") for local_dep in LOCAL_DEPS: - session.install('-e', local_dep) - session.install('-e', '../test_utils/') - session.install('-e', '.') + session.install("-e", local_dep) + session.install("-e", "../test_utils/") + session.install("-e", ".") # Run py.test against the system tests. - session.run('py.test', '--quiet', 'tests/doctests.py') - + if system_test_exists: + session.run("py.test", "--quiet", system_test_path, *session.posargs) + if system_test_folder_exists: + session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) -@nox.session(python='3.6') -def lint(session): - """Run linters. - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install('flake8', *LOCAL_DEPS) - session.install('.') - session.run('flake8', 'google', 'tests') - - -@nox.session(python='3.6') -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - - session.install('docutils', 'Pygments') - session.run( - 'python', 'setup.py', 'check', '--restructuredtext', '--strict') - - -@nox.session(python='3.6') +@nox.session(python="3.7") def cover(session): """Run the final coverage report. This outputs the coverage report aggregating coverage from the unit test runs (not system test runs), and then erases coverage data. """ - session.install('coverage', 'pytest-cov') - session.run('coverage', 'report', '--show-missing', '--fail-under=100') - session.run('coverage', 'erase') + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index 37319fb0859f..efd03dc21547 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -17,16 +17,23 @@ from synthtool import gcp gapic = gcp.GAPICGenerator() +common = gcp.CommonTemplates() - -#---------------------------------------------------------------------------- -# Generate datastore client -#---------------------------------------------------------------------------- +# ---------------------------------------------------------------------------- +# Generate datastore GAPIC layer +# ---------------------------------------------------------------------------- library = gapic.py_library( - 'datastore', - 'v1', - config_path='/google/datastore/artman_datastore.yaml', - artman_output_name='datastore-v1') + "datastore", + "v1", + config_path="/google/datastore/artman_datastore.yaml", + artman_output_name="datastore-v1", +) + +s.move(library / "google/cloud/datastore_v1/proto") +s.move(library / "google/cloud/datastore_v1/gapic") -s.move(library / 'google/cloud/datastore_v1/proto') -s.move(library / 'google/cloud/datastore_v1/gapic') +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- +templated_files = common.py_library(unit_cov_level=97, cov_level=100) +s.move(templated_files) From fea131358feac53871f632fd6124d74ae66c778e Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 28 Nov 2018 14:28:56 -0800 Subject: [PATCH 197/611] Run Black on Generated libraries (#6666) * blacken appveyor * Blacken all gen'd libraries not under PR --- packages/google-cloud-datastore/docs/conf.py | 173 +++---- .../google-cloud-datastore/google/__init__.py | 2 + .../google/cloud/__init__.py | 2 + .../google/cloud/datastore/__init__.py | 6 +- .../cloud/datastore/_app_engine_key_pb2.py | 375 +++++++++------ .../google/cloud/datastore/_gapic.py | 11 +- .../google/cloud/datastore/_http.py | 104 +++-- .../google/cloud/datastore/batch.py | 21 +- .../google/cloud/datastore/client.py | 110 +++-- .../google/cloud/datastore/entity.py | 23 +- .../google/cloud/datastore/helpers.py | 120 ++--- .../google/cloud/datastore/key.py | 131 +++--- .../google/cloud/datastore/query.py | 118 +++-- .../google/cloud/datastore/transaction.py | 8 +- .../google/cloud/datastore_v1/__init__.py | 6 +- .../datastore_v1/gapic/datastore_client.py | 346 +++++++------- .../gapic/datastore_client_config.py | 22 +- .../google/cloud/datastore_v1/gapic/enums.py | 8 + .../transports/datastore_grpc_transport.py | 47 +- .../google/cloud/datastore_v1/types.py | 8 +- .../google-cloud-datastore/tests/doctests.py | 38 +- .../tests/system/test_system.py | 213 +++++---- .../tests/system/utils/clear_datastore.py | 31 +- .../tests/system/utils/populate_datastore.py | 106 ++--- .../unit/gapic/v1/test_datastore_client_v1.py | 73 ++- .../tests/unit/test__gapic.py | 46 +- .../tests/unit/test__http.py | 396 ++++++++-------- .../tests/unit/test_batch.py | 148 +++--- .../tests/unit/test_client.py | 437 ++++++++---------- .../tests/unit/test_entity.py | 71 +-- .../tests/unit/test_helpers.py | 343 ++++++-------- .../tests/unit/test_key.py | 431 +++++++++-------- .../tests/unit/test_query.py | 362 +++++++-------- .../tests/unit/test_transaction.py | 66 ++- 34 files changed, 2217 insertions(+), 2185 deletions(-) diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index fd43beccaf91..a24a380e1afd 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -18,57 +18,55 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('..')) +sys.path.insert(0, os.path.abspath("..")) -__version__ = '0.90.4' +__version__ = "0.90.4" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.autosummary', - 'sphinx.ext.doctest', - 'sphinx.ext.intersphinx', - 'sphinx.ext.coverage', - 'sphinx.ext.napoleon', - 'sphinx.ext.viewcode', + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.doctest", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.viewcode", ] # autodoc/autosummary flags -autoclass_content = 'both' -autodoc_default_flags = ['members'] +autoclass_content = "both" +autodoc_default_flags = ["members"] autosummary_generate = True # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # Allow markdown includes (so releases.md can include CHANGLEOG.md) # http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = { - '.md': 'recommonmark.parser.CommonMarkParser', -} +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'google-cloud-datastore' -copyright = u'2017, Google' -author = u'Google APIs' +project = u"google-cloud-datastore" +copyright = u"2017, Google" +author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -77,7 +75,7 @@ # The full version, including alpha/beta/rc tags. release = __version__ # The short X.Y version. -version = '.'.join(release.split('.')[0:2]) +version = ".".join(release.split(".")[0:2]) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -88,37 +86,37 @@ # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all # documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False +# keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True @@ -127,31 +125,31 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'alabaster' +html_theme = "alabaster" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, @@ -161,78 +159,75 @@ # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. -#html_extra_path = [] +# html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -#html_search_language = 'en' +# html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} +# html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' +# html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = 'google-cloud-datastore-doc' +htmlhelp_basename = "google-cloud-datastore-doc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. #'preamble': '', - # Latex figure (float) alignment #'figure_align': 'htbp', } @@ -241,39 +236,51 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'google-cloud-datastore.tex', - u'google-cloud-datastore Documentation', author, 'manual'), + ( + master_doc, + "google-cloud-datastore.tex", + u"google-cloud-datastore Documentation", + author, + "manual", + ) ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [(master_doc, 'google-cloud-datastore', - u'google-cloud-datastore Documentation', [author], 1)] +man_pages = [ + ( + master_doc, + "google-cloud-datastore", + u"google-cloud-datastore Documentation", + [author], + 1, + ) +] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------- @@ -281,27 +288,33 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'google-cloud-datastore', - u'google-cloud-datastore Documentation', author, 'google-cloud-datastore', - 'GAPIC library for the {metadata.shortName} v1 service', 'APIs'), + ( + master_doc, + "google-cloud-datastore", + u"google-cloud-datastore Documentation", + author, + "google-cloud-datastore", + "GAPIC library for the {metadata.shortName} v1 service", + "APIs", + ) ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False +# texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - 'python': ('http://python.readthedocs.org/en/latest/', None), - 'gax': ('https://gax-python.readthedocs.org/en/latest/', None), + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), } # Napoleon settings diff --git a/packages/google-cloud-datastore/google/__init__.py b/packages/google-cloud-datastore/google/__init__.py index 9ee9bf4342ab..0e1bc5131ba6 100644 --- a/packages/google-cloud-datastore/google/__init__.py +++ b/packages/google-cloud-datastore/google/__init__.py @@ -14,7 +14,9 @@ try: import pkg_resources + pkg_resources.declare_namespace(__name__) except ImportError: import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/packages/google-cloud-datastore/google/cloud/__init__.py b/packages/google-cloud-datastore/google/cloud/__init__.py index 9ee9bf4342ab..0e1bc5131ba6 100644 --- a/packages/google-cloud-datastore/google/cloud/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/__init__.py @@ -14,7 +14,9 @@ try: import pkg_resources + pkg_resources.declare_namespace(__name__) except ImportError: import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py index 8ceee43f0f74..078180f57128 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py @@ -56,7 +56,8 @@ from pkg_resources import get_distribution -__version__ = get_distribution('google-cloud-datastore').version + +__version__ = get_distribution("google-cloud-datastore").version from google.cloud.datastore.batch import Batch from google.cloud.datastore.client import Client @@ -65,5 +66,4 @@ from google.cloud.datastore.query import Query from google.cloud.datastore.transaction import Transaction -__all__ = ['__version__', 'Batch', 'Client', 'Entity', 'Key', 'Query', - 'Transaction'] +__all__ = ["__version__", "Batch", "Client", "Entity", "Key", "Query", "Transaction"] diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py index bbb1c75b80df..7fcd8fb181a4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py @@ -2,181 +2,262 @@ # source: _app_engine_key.proto import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() - - DESCRIPTOR = _descriptor.FileDescriptor( - name='_app_engine_key.proto', - package='', - syntax='proto2', - serialized_pb=_b('\n\x15_app_engine_key.proto\"V\n\tReference\x12\x0b\n\x03\x61pp\x18\r \x02(\t\x12\x12\n\nname_space\x18\x14 \x01(\t\x12\x13\n\x04path\x18\x0e \x02(\x0b\x32\x05.Path\x12\x13\n\x0b\x64\x61tabase_id\x18\x17 \x01(\t\"Y\n\x04Path\x12\x1e\n\x07\x65lement\x18\x01 \x03(\n2\r.Path.Element\x1a\x31\n\x07\x45lement\x12\x0c\n\x04type\x18\x02 \x02(\t\x12\n\n\x02id\x18\x03 \x01(\x03\x12\x0c\n\x04name\x18\x04 \x01(\t') + name="_app_engine_key.proto", + package="", + syntax="proto2", + serialized_pb=_b( + '\n\x15_app_engine_key.proto"V\n\tReference\x12\x0b\n\x03\x61pp\x18\r \x02(\t\x12\x12\n\nname_space\x18\x14 \x01(\t\x12\x13\n\x04path\x18\x0e \x02(\x0b\x32\x05.Path\x12\x13\n\x0b\x64\x61tabase_id\x18\x17 \x01(\t"Y\n\x04Path\x12\x1e\n\x07\x65lement\x18\x01 \x03(\n2\r.Path.Element\x1a\x31\n\x07\x45lement\x12\x0c\n\x04type\x18\x02 \x02(\t\x12\n\n\x02id\x18\x03 \x01(\x03\x12\x0c\n\x04name\x18\x04 \x01(\t' + ), ) _sym_db.RegisterFileDescriptor(DESCRIPTOR) - - _REFERENCE = _descriptor.Descriptor( - name='Reference', - full_name='Reference', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='app', full_name='Reference.app', index=0, - number=13, type=9, cpp_type=9, label=2, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='name_space', full_name='Reference.name_space', index=1, - number=20, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='path', full_name='Reference.path', index=2, - number=14, type=11, cpp_type=10, label=2, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='database_id', full_name='Reference.database_id', index=3, - number=23, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=25, - serialized_end=111, + name="Reference", + full_name="Reference", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="app", + full_name="Reference.app", + index=0, + number=13, + type=9, + cpp_type=9, + label=2, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="name_space", + full_name="Reference.name_space", + index=1, + number=20, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="path", + full_name="Reference.path", + index=2, + number=14, + type=11, + cpp_type=10, + label=2, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="database_id", + full_name="Reference.database_id", + index=3, + number=23, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=25, + serialized_end=111, ) _PATH_ELEMENT = _descriptor.Descriptor( - name='Element', - full_name='Path.Element', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='type', full_name='Path.Element.type', index=0, - number=2, type=9, cpp_type=9, label=2, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='id', full_name='Path.Element.id', index=1, - number=3, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='name', full_name='Path.Element.name', index=2, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=153, - serialized_end=202, + name="Element", + full_name="Path.Element", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="type", + full_name="Path.Element.type", + index=0, + number=2, + type=9, + cpp_type=9, + label=2, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="id", + full_name="Path.Element.id", + index=1, + number=3, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="name", + full_name="Path.Element.name", + index=2, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=153, + serialized_end=202, ) _PATH = _descriptor.Descriptor( - name='Path', - full_name='Path', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='element', full_name='Path.element', index=0, - number=1, type=10, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[_PATH_ELEMENT, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=113, - serialized_end=202, + name="Path", + full_name="Path", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="element", + full_name="Path.element", + index=0, + number=1, + type=10, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ) + ], + extensions=[], + nested_types=[_PATH_ELEMENT], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=113, + serialized_end=202, ) -_REFERENCE.fields_by_name['path'].message_type = _PATH +_REFERENCE.fields_by_name["path"].message_type = _PATH _PATH_ELEMENT.containing_type = _PATH -_PATH.fields_by_name['element'].message_type = _PATH_ELEMENT -DESCRIPTOR.message_types_by_name['Reference'] = _REFERENCE -DESCRIPTOR.message_types_by_name['Path'] = _PATH - -Reference = _reflection.GeneratedProtocolMessageType('Reference', (_message.Message,), dict( - DESCRIPTOR = _REFERENCE, - __module__ = '_app_engine_key_pb2' - # @@protoc_insertion_point(class_scope:Reference) - )) +_PATH.fields_by_name["element"].message_type = _PATH_ELEMENT +DESCRIPTOR.message_types_by_name["Reference"] = _REFERENCE +DESCRIPTOR.message_types_by_name["Path"] = _PATH + +Reference = _reflection.GeneratedProtocolMessageType( + "Reference", + (_message.Message,), + dict( + DESCRIPTOR=_REFERENCE, + __module__="_app_engine_key_pb2" + # @@protoc_insertion_point(class_scope:Reference) + ), +) _sym_db.RegisterMessage(Reference) -Path = _reflection.GeneratedProtocolMessageType('Path', (_message.Message,), dict( - - Element = _reflection.GeneratedProtocolMessageType('Element', (_message.Message,), dict( - DESCRIPTOR = _PATH_ELEMENT, - __module__ = '_app_engine_key_pb2' - # @@protoc_insertion_point(class_scope:Path.Element) - )) - , - DESCRIPTOR = _PATH, - __module__ = '_app_engine_key_pb2' - # @@protoc_insertion_point(class_scope:Path) - )) +Path = _reflection.GeneratedProtocolMessageType( + "Path", + (_message.Message,), + dict( + Element=_reflection.GeneratedProtocolMessageType( + "Element", + (_message.Message,), + dict( + DESCRIPTOR=_PATH_ELEMENT, + __module__="_app_engine_key_pb2" + # @@protoc_insertion_point(class_scope:Path.Element) + ), + ), + DESCRIPTOR=_PATH, + __module__="_app_engine_key_pb2" + # @@protoc_insertion_point(class_scope:Path) + ), +) _sym_db.RegisterMessage(Path) _sym_db.RegisterMessage(Path.Element) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py b/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py index b56ba5e89c64..1210f2821802 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py @@ -34,19 +34,16 @@ def make_datastore_api(client): :rtype: :class:`.datastore.v1.datastore_client.DatastoreClient` :returns: A datastore API instance with the proper credentials. """ - parse_result = six.moves.urllib_parse.urlparse( - client._base_url) + parse_result = six.moves.urllib_parse.urlparse(client._base_url) host = parse_result.netloc - if parse_result.scheme == 'https': - channel = make_secure_channel( - client._credentials, DEFAULT_USER_AGENT, host) + if parse_result.scheme == "https": + channel = make_secure_channel(client._credentials, DEFAULT_USER_AGENT, host) else: channel = insecure_channel(host) return datastore_client.DatastoreClient( channel=channel, client_info=client_info.ClientInfo( - client_library_version=__version__, - gapic_version=__version__, + client_library_version=__version__, gapic_version=__version__ ), ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index ef02bbbff4ce..03a551cec64e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -23,14 +23,13 @@ from google.cloud.datastore import __version__ -DATASTORE_API_HOST = 'datastore.googleapis.com' +DATASTORE_API_HOST = "datastore.googleapis.com" """Datastore API request host.""" -API_BASE_URL = 'https://' + DATASTORE_API_HOST +API_BASE_URL = "https://" + DATASTORE_API_HOST """The base of the API call URL.""" -API_VERSION = 'v1' +API_VERSION = "v1" """The version of the API, used in building the API call's URL.""" -API_URL_TEMPLATE = ('{api_base}/{api_version}/projects' - '/{project}:{method}') +API_URL_TEMPLATE = "{api_base}/{api_version}/projects" "/{project}:{method}" """A template for the URL of a particular API call.""" _CLIENT_INFO = connection_module.CLIENT_INFO_TEMPLATE.format(__version__) @@ -62,19 +61,19 @@ def _request(http, project, method, data, base_url): response code is not 200 OK. """ headers = { - 'Content-Type': 'application/x-protobuf', - 'User-Agent': connection_module.DEFAULT_USER_AGENT, + "Content-Type": "application/x-protobuf", + "User-Agent": connection_module.DEFAULT_USER_AGENT, connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, } api_url = build_api_url(project, method, base_url) - response = http.request( - url=api_url, method='POST', headers=headers, data=data) + response = http.request(url=api_url, method="POST", headers=headers, data=data) if response.status_code != 200: error_status = status_pb2.Status.FromString(response.content) raise exceptions.from_http_status( - response.status_code, error_status.message, errors=[error_status]) + response.status_code, error_status.message, errors=[error_status] + ) return response.content @@ -107,8 +106,7 @@ def _rpc(http, project, method, base_url, request_pb, response_pb_cls): :returns: The RPC message parsed from the response. """ req_data = request_pb.SerializeToString() - response = _request( - http, project, method, req_data, base_url) + response = _request(http, project, method, req_data, base_url) return response_pb_cls.FromString(response) @@ -132,8 +130,8 @@ def build_api_url(project, method, base_url): :returns: The API URL created. """ return API_URL_TEMPLATE.format( - api_base=base_url, api_version=API_VERSION, - project=project, method=method) + api_base=base_url, api_version=API_VERSION, project=project, method=method + ) class HTTPDatastoreAPI(object): @@ -167,16 +165,20 @@ def lookup(self, project_id, keys, read_options=None): :returns: The returned protobuf response object. """ request_pb = _datastore_pb2.LookupRequest( - project_id=project_id, - read_options=read_options, - keys=keys, + project_id=project_id, read_options=read_options, keys=keys + ) + return _rpc( + self.client._http, + project_id, + "lookup", + self.client._base_url, + request_pb, + _datastore_pb2.LookupResponse, ) - return _rpc(self.client._http, project_id, 'lookup', - self.client._base_url, - request_pb, _datastore_pb2.LookupResponse) - def run_query(self, project_id, partition_id, read_options=None, - query=None, gql_query=None): + def run_query( + self, project_id, partition_id, read_options=None, query=None, gql_query=None + ): """Perform a ``runQuery`` request. :type project_id: str @@ -210,9 +212,14 @@ def run_query(self, project_id, partition_id, read_options=None, query=query, gql_query=gql_query, ) - return _rpc(self.client._http, project_id, 'runQuery', - self.client._base_url, - request_pb, _datastore_pb2.RunQueryResponse) + return _rpc( + self.client._http, + project_id, + "runQuery", + self.client._base_url, + request_pb, + _datastore_pb2.RunQueryResponse, + ) def begin_transaction(self, project_id, transaction_options=None): """Perform a ``beginTransaction`` request. @@ -228,9 +235,14 @@ def begin_transaction(self, project_id, transaction_options=None): :returns: The returned protobuf response object. """ request_pb = _datastore_pb2.BeginTransactionRequest() - return _rpc(self.client._http, project_id, 'beginTransaction', - self.client._base_url, - request_pb, _datastore_pb2.BeginTransactionResponse) + return _rpc( + self.client._http, + project_id, + "beginTransaction", + self.client._base_url, + request_pb, + _datastore_pb2.BeginTransactionResponse, + ) def commit(self, project_id, mode, mutations, transaction=None): """Perform a ``commit`` request. @@ -261,9 +273,14 @@ def commit(self, project_id, mode, mutations, transaction=None): transaction=transaction, mutations=mutations, ) - return _rpc(self.client._http, project_id, 'commit', - self.client._base_url, - request_pb, _datastore_pb2.CommitResponse) + return _rpc( + self.client._http, + project_id, + "commit", + self.client._base_url, + request_pb, + _datastore_pb2.CommitResponse, + ) def rollback(self, project_id, transaction): """Perform a ``rollback`` request. @@ -279,13 +296,17 @@ def rollback(self, project_id, transaction): :returns: The returned protobuf response object. """ request_pb = _datastore_pb2.RollbackRequest( - project_id=project_id, - transaction=transaction, + project_id=project_id, transaction=transaction ) # Response is empty (i.e. no fields) but we return it anyway. - return _rpc(self.client._http, project_id, 'rollback', - self.client._base_url, - request_pb, _datastore_pb2.RollbackResponse) + return _rpc( + self.client._http, + project_id, + "rollback", + self.client._base_url, + request_pb, + _datastore_pb2.RollbackResponse, + ) def allocate_ids(self, project_id, keys): """Perform an ``allocateIds`` request. @@ -301,6 +322,11 @@ def allocate_ids(self, project_id, keys): :returns: The returned protobuf response object. """ request_pb = _datastore_pb2.AllocateIdsRequest(keys=keys) - return _rpc(self.client._http, project_id, 'allocateIds', - self.client._base_url, - request_pb, _datastore_pb2.AllocateIdsResponse) + return _rpc( + self.client._http, + project_id, + "allocateIds", + self.client._base_url, + request_pb, + _datastore_pb2.AllocateIdsResponse, + ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/batch.py b/packages/google-cloud-datastore/google/cloud/datastore/batch.py index 49be09964eb3..eaa839f4e540 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/batch.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/batch.py @@ -182,7 +182,7 @@ def put(self, entity): ``project`` does not match ours. """ if self._status != self._IN_PROGRESS: - raise ValueError('Batch must be in progress to put()') + raise ValueError("Batch must be in progress to put()") if entity.key is None: raise ValueError("Entity must have a key") @@ -209,7 +209,7 @@ def delete(self, key): ``project`` does not match ours. """ if self._status != self._IN_PROGRESS: - raise ValueError('Batch must be in progress to delete()') + raise ValueError("Batch must be in progress to delete()") if key.is_partial: raise ValueError("Key must be complete") @@ -232,7 +232,7 @@ def begin(self): :raises: :class:`ValueError` if the batch has already begun. """ if self._status != self._INITIAL: - raise ValueError('Batch already started previously.') + raise ValueError("Batch already started previously.") self._status = self._IN_PROGRESS def _commit(self): @@ -246,13 +246,13 @@ def _commit(self): mode = _datastore_pb2.CommitRequest.TRANSACTIONAL commit_response_pb = self._client._datastore_api.commit( - self.project, mode, self._mutations, transaction=self._id) + self.project, mode, self._mutations, transaction=self._id + ) _, updated_keys = _parse_commit_response(commit_response_pb) # If the back-end returns without error, we are guaranteed that # ``commit`` will return keys that match (length and # order) directly ``_partial_key_entities``. - for new_key_pb, entity in zip(updated_keys, - self._partial_key_entities): + for new_key_pb, entity in zip(updated_keys, self._partial_key_entities): new_id = new_key_pb.path[-1].id entity.key = entity.key.completed_key(new_id) @@ -267,7 +267,7 @@ def commit(self): in progress. """ if self._status != self._IN_PROGRESS: - raise ValueError('Batch must be in progress to commit()') + raise ValueError("Batch must be in progress to commit()") try: self._commit() @@ -285,7 +285,7 @@ def rollback(self): in progress. """ if self._status != self._IN_PROGRESS: - raise ValueError('Batch must be in progress to rollback()') + raise ValueError("Batch must be in progress to rollback()") self._status = self._ABORTED @@ -334,6 +334,7 @@ def _parse_commit_response(commit_response_pb): """ mut_results = commit_response_pb.mutation_results index_updates = commit_response_pb.index_updates - completed_keys = [mut_result.key for mut_result in mut_results - if mut_result.HasField('key')] # Message field (Key) + completed_keys = [ + mut_result.key for mut_result in mut_results if mut_result.HasField("key") + ] # Message field (Key) return index_updates, completed_keys diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 8daa68c937c9..6f4d82eae672 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -16,8 +16,7 @@ import os from google.cloud._helpers import _LocalStack -from google.cloud._helpers import (_determine_default_project as - _base_default_project) +from google.cloud._helpers import _determine_default_project as _base_default_project from google.cloud.client import ClientWithProject from google.cloud.datastore import helpers from google.cloud.datastore._http import HTTPDatastoreAPI @@ -32,6 +31,7 @@ try: from google.cloud.datastore._gapic import make_datastore_api + _HAVE_GRPC = True except ImportError: # pragma: NO COVER make_datastore_api = None @@ -40,7 +40,7 @@ _MAX_LOOPS = 128 """Maximum number of iterations to wait for deferred keys.""" -_DATASTORE_BASE_URL = 'https://datastore.googleapis.com' +_DATASTORE_BASE_URL = "https://datastore.googleapis.com" """Datastore API request URL base.""" _USE_GRPC = _HAVE_GRPC and not os.getenv(DISABLE_GRPC, False) @@ -77,9 +77,15 @@ def _determine_default_project(project=None): return project -def _extended_lookup(datastore_api, project, key_pbs, - missing=None, deferred=None, - eventual=False, transaction_id=None): +def _extended_lookup( + datastore_api, + project, + key_pbs, + missing=None, + deferred=None, + eventual=False, + transaction_id=None, +): """Repeat lookup until all keys found (unless stop requested). Helper function for :meth:`Client.get_multi`. @@ -121,10 +127,10 @@ def _extended_lookup(datastore_api, project, key_pbs, empty list. """ if missing is not None and missing != []: - raise ValueError('missing must be None or an empty list') + raise ValueError("missing must be None or an empty list") if deferred is not None and deferred != []: - raise ValueError('deferred must be None or an empty list') + raise ValueError("deferred must be None or an empty list") results = [] @@ -133,9 +139,7 @@ def _extended_lookup(datastore_api, project, key_pbs, while loop_num < _MAX_LOOPS: # loop against possible deferred. loop_num += 1 lookup_response = datastore_api.lookup( - project, - key_pbs, - read_options=read_options, + project, key_pbs, read_options=read_options ) # Accumulate the new results. @@ -196,13 +200,15 @@ class Client(ClientWithProject): change in the future. """ - SCOPE = ('https://www.googleapis.com/auth/datastore',) + SCOPE = ("https://www.googleapis.com/auth/datastore",) """The scopes required for authenticating as a Cloud Datastore consumer.""" - def __init__(self, project=None, namespace=None, - credentials=None, _http=None, _use_grpc=None): + def __init__( + self, project=None, namespace=None, credentials=None, _http=None, _use_grpc=None + ): super(Client, self).__init__( - project=project, credentials=credentials, _http=_http) + project=project, credentials=credentials, _http=_http + ) self.namespace = namespace self._batch_stack = _LocalStack() self._datastore_api_internal = None @@ -212,7 +218,7 @@ def __init__(self, project=None, namespace=None, self._use_grpc = _use_grpc try: host = os.environ[GCD_HOST] - self._base_url = 'http://' + host + self._base_url = "http://" + host except KeyError: self._base_url = _DATASTORE_BASE_URL @@ -287,8 +293,7 @@ def current_transaction(self): if isinstance(transaction, Transaction): return transaction - def get(self, key, missing=None, deferred=None, - transaction=None, eventual=False): + def get(self, key, missing=None, deferred=None, transaction=None, eventual=False): """Retrieve an entity from a single key (if it exists). .. note:: @@ -324,16 +329,19 @@ def get(self, key, missing=None, deferred=None, :raises: :class:`ValueError` if eventual is True and in a transaction. """ - entities = self.get_multi(keys=[key], - missing=missing, - deferred=deferred, - transaction=transaction, - eventual=eventual) + entities = self.get_multi( + keys=[key], + missing=missing, + deferred=deferred, + transaction=transaction, + eventual=eventual, + ) if entities: return entities[0] - def get_multi(self, keys, missing=None, deferred=None, - transaction=None, eventual=False): + def get_multi( + self, keys, missing=None, deferred=None, transaction=None, eventual=False + ): """Retrieve entities, along with their attributes. :type keys: list of :class:`google.cloud.datastore.key.Key` @@ -371,7 +379,7 @@ def get_multi(self, keys, missing=None, deferred=None, ids = set(key.project for key in keys) for current_id in ids: if current_id != self.project: - raise ValueError('Keys do not match project') + raise ValueError("Keys do not match project") if transaction is None: transaction = self.current_transaction @@ -388,16 +396,15 @@ def get_multi(self, keys, missing=None, deferred=None, if missing is not None: missing[:] = [ - helpers.entity_from_protobuf(missed_pb) - for missed_pb in missing] + helpers.entity_from_protobuf(missed_pb) for missed_pb in missing + ] if deferred is not None: deferred[:] = [ - helpers.key_from_protobuf(deferred_pb) - for deferred_pb in deferred] + helpers.key_from_protobuf(deferred_pb) for deferred_pb in deferred + ] - return [helpers.entity_from_protobuf(entity_pb) - for entity_pb in entity_pbs] + return [helpers.entity_from_protobuf(entity_pb) for entity_pb in entity_pbs] def put(self, entity): """Save an entity in the Cloud Datastore. @@ -493,28 +500,31 @@ def allocate_ids(self, incomplete_key, num_ids): partial key. """ if not incomplete_key.is_partial: - raise ValueError(('Key is not partial.', incomplete_key)) + raise ValueError(("Key is not partial.", incomplete_key)) incomplete_key_pb = incomplete_key.to_protobuf() incomplete_key_pbs = [incomplete_key_pb] * num_ids response_pb = self._datastore_api.allocate_ids( - incomplete_key.project, incomplete_key_pbs) - allocated_ids = [allocated_key_pb.path[-1].id - for allocated_key_pb in response_pb.keys] - return [incomplete_key.completed_key(allocated_id) - for allocated_id in allocated_ids] + incomplete_key.project, incomplete_key_pbs + ) + allocated_ids = [ + allocated_key_pb.path[-1].id for allocated_key_pb in response_pb.keys + ] + return [ + incomplete_key.completed_key(allocated_id) for allocated_id in allocated_ids + ] def key(self, *path_args, **kwargs): """Proxy to :class:`google.cloud.datastore.key.Key`. Passes our ``project``. """ - if 'project' in kwargs: - raise TypeError('Cannot pass project') - kwargs['project'] = self.project - if 'namespace' not in kwargs: - kwargs['namespace'] = self.namespace + if "project" in kwargs: + raise TypeError("Cannot pass project") + kwargs["project"] = self.project + if "namespace" not in kwargs: + kwargs["namespace"] = self.namespace return Key(*path_args, **kwargs) def batch(self): @@ -604,11 +614,11 @@ def do_something(entity): :rtype: :class:`~google.cloud.datastore.query.Query` :returns: A query object. """ - if 'client' in kwargs: - raise TypeError('Cannot pass client') - if 'project' in kwargs: - raise TypeError('Cannot pass project') - kwargs['project'] = self.project - if 'namespace' not in kwargs: - kwargs['namespace'] = self.namespace + if "client" in kwargs: + raise TypeError("Cannot pass client") + if "project" in kwargs: + raise TypeError("Cannot pass project") + kwargs["project"] = self.project + if "namespace" not in kwargs: + kwargs["namespace"] = self.namespace return Query(self, **kwargs) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/entity.py b/packages/google-cloud-datastore/google/cloud/datastore/entity.py index 6f76e869645d..c317bdda85fd 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/entity.py @@ -135,8 +135,9 @@ class Entity(dict): def __init__(self, key=None, exclude_from_indexes=()): super(Entity, self).__init__() self.key = key - self.exclude_from_indexes = set(_ensure_tuple_or_list( - 'exclude_from_indexes', exclude_from_indexes)) + self.exclude_from_indexes = set( + _ensure_tuple_or_list("exclude_from_indexes", exclude_from_indexes) + ) """Names of fields which are *not* to be indexed for this entity.""" # NOTE: This will be populated when parsing a protobuf in # google.cloud.datastore.helpers.entity_from_protobuf. @@ -154,10 +155,12 @@ def __eq__(self, other): if not isinstance(other, Entity): return NotImplemented - return (self.key == other.key and - self.exclude_from_indexes == other.exclude_from_indexes and - self._meanings == other._meanings and - super(Entity, self).__eq__(other)) + return ( + self.key == other.key + and self.exclude_from_indexes == other.exclude_from_indexes + and self._meanings == other._meanings + and super(Entity, self).__eq__(other) + ) def __ne__(self, other): """Compare two entities for inequality. @@ -202,7 +205,9 @@ def id(self): def __repr__(self): if self.key: - return '' % (self.key._flat_path, - super(Entity, self).__repr__()) + return "" % ( + self.key._flat_path, + super(Entity, self).__repr__(), + ) else: - return '' % (super(Entity, self).__repr__(),) + return "" % (super(Entity, self).__repr__(),) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index 38c4b0ddfdf6..db6f150eff8b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -58,8 +58,9 @@ def _get_meaning(value_pb, is_list=False): # We check among all the meanings, some of which may be None, # the rest which may be enum/int values. - all_meanings = [_get_meaning(sub_value_pb) - for sub_value_pb in value_pb.array_value.values] + all_meanings = [ + _get_meaning(sub_value_pb) for sub_value_pb in value_pb.array_value.values + ] unique_meanings = set(all_meanings) if len(unique_meanings) == 1: # If there is a unique meaning, we preserve it. @@ -114,7 +115,7 @@ def entity_from_protobuf(pb): :returns: The entity derived from the protobuf. """ key = None - if pb.HasField('key'): # Message field (Key) + if pb.HasField("key"): # Message field (Key) key = key_from_protobuf(pb.key) entity_props = {} @@ -135,12 +136,16 @@ def entity_from_protobuf(pb): # special-cased and we require all ``exclude_from_indexes`` values # in a list agree. if is_list and len(value) > 0: - exclude_values = set(value_pb.exclude_from_indexes - for value_pb in value_pb.array_value.values) + exclude_values = set( + value_pb.exclude_from_indexes + for value_pb in value_pb.array_value.values + ) if len(exclude_values) != 1: - raise ValueError('For an array_value, subvalues must either ' - 'all be indexed or all excluded from ' - 'indexes.') + raise ValueError( + "For an array_value, subvalues must either " + "all be indexed or all excluded from " + "indexes." + ) if exclude_values.pop(): exclude_from_indexes.append(prop_name) @@ -154,8 +159,7 @@ def entity_from_protobuf(pb): return entity -def _set_pb_meaning_from_entity(entity, name, value, value_pb, - is_list=False): +def _set_pb_meaning_from_entity(entity, name, value, value_pb, is_list=False): """Add meaning information (from an entity) to a protobuf. :type entity: :class:`google.cloud.datastore.entity.Entity` @@ -187,8 +191,7 @@ def _set_pb_meaning_from_entity(entity, name, value, value_pb, if is_list: if not isinstance(meaning, list): meaning = itertools.repeat(meaning) - val_iter = six.moves.zip(value_pb.array_value.values, - meaning) + val_iter = six.moves.zip(value_pb.array_value.values, meaning) for sub_value_pb, sub_meaning in val_iter: if sub_meaning is not None: sub_value_pb.meaning = sub_meaning @@ -226,8 +229,9 @@ def entity_to_protobuf(entity): sub_value.exclude_from_indexes = True # Add meaning information to protobuf. - _set_pb_meaning_from_entity(entity, name, value, value_pb, - is_list=value_is_list) + _set_pb_meaning_from_entity( + entity, name, value, value_pb, is_list=value_is_list + ) return entity_pb @@ -252,15 +256,15 @@ def get_read_options(eventual, transaction_id): if transaction_id is None: if eventual: return datastore_pb2.ReadOptions( - read_consistency=datastore_pb2.ReadOptions.EVENTUAL) + read_consistency=datastore_pb2.ReadOptions.EVENTUAL + ) else: return datastore_pb2.ReadOptions() else: if eventual: - raise ValueError('eventual must be False when in a transaction') + raise ValueError("eventual must be False when in a transaction") else: - return datastore_pb2.ReadOptions( - transaction=transaction_id) + return datastore_pb2.ReadOptions(transaction=transaction_id) def key_from_protobuf(pb): @@ -332,36 +336,36 @@ def _pb_attr_value(val): """ if isinstance(val, datetime.datetime): - name = 'timestamp' + name = "timestamp" value = _datetime_to_pb_timestamp(val) elif isinstance(val, Key): - name, value = 'key', val.to_protobuf() + name, value = "key", val.to_protobuf() elif isinstance(val, bool): - name, value = 'boolean', val + name, value = "boolean", val elif isinstance(val, float): - name, value = 'double', val + name, value = "double", val elif isinstance(val, six.integer_types): - name, value = 'integer', val + name, value = "integer", val elif isinstance(val, six.text_type): - name, value = 'string', val + name, value = "string", val elif isinstance(val, six.binary_type): - name, value = 'blob', val + name, value = "blob", val elif isinstance(val, Entity): - name, value = 'entity', val + name, value = "entity", val elif isinstance(val, dict): entity_val = Entity(key=None) entity_val.update(val) - name, value = 'entity', entity_val + name, value = "entity", entity_val elif isinstance(val, list): - name, value = 'array', val + name, value = "array", val elif isinstance(val, GeoPoint): - name, value = 'geo_point', val.to_protobuf() + name, value = "geo_point", val.to_protobuf() elif val is None: - name, value = 'null', struct_pb2.NULL_VALUE + name, value = "null", struct_pb2.NULL_VALUE else: - raise ValueError('Unknown protobuf attr type', type(val)) + raise ValueError("Unknown protobuf attr type", type(val)) - return name + '_value', value + return name + "_value", value def _get_value_from_value_pb(value_pb): @@ -382,45 +386,47 @@ def _get_value_from_value_pb(value_pb): :raises: :class:`ValueError ` if no value type has been set. """ - value_type = value_pb.WhichOneof('value_type') + value_type = value_pb.WhichOneof("value_type") - if value_type == 'timestamp_value': + if value_type == "timestamp_value": result = _pb_timestamp_to_datetime(value_pb.timestamp_value) - elif value_type == 'key_value': + elif value_type == "key_value": result = key_from_protobuf(value_pb.key_value) - elif value_type == 'boolean_value': + elif value_type == "boolean_value": result = value_pb.boolean_value - elif value_type == 'double_value': + elif value_type == "double_value": result = value_pb.double_value - elif value_type == 'integer_value': + elif value_type == "integer_value": result = value_pb.integer_value - elif value_type == 'string_value': + elif value_type == "string_value": result = value_pb.string_value - elif value_type == 'blob_value': + elif value_type == "blob_value": result = value_pb.blob_value - elif value_type == 'entity_value': + elif value_type == "entity_value": result = entity_from_protobuf(value_pb.entity_value) - elif value_type == 'array_value': - result = [_get_value_from_value_pb(value) - for value in value_pb.array_value.values] + elif value_type == "array_value": + result = [ + _get_value_from_value_pb(value) for value in value_pb.array_value.values + ] - elif value_type == 'geo_point_value': - result = GeoPoint(value_pb.geo_point_value.latitude, - value_pb.geo_point_value.longitude) + elif value_type == "geo_point_value": + result = GeoPoint( + value_pb.geo_point_value.latitude, value_pb.geo_point_value.longitude + ) - elif value_type == 'null_value': + elif value_type == "null_value": result = None else: - raise ValueError('Value protobuf did not have any value set') + raise ValueError("Value protobuf did not have any value set") return result @@ -443,14 +449,14 @@ def _set_protobuf_value(value_pb, val): :param val: The value to be assigned. """ attr, val = _pb_attr_value(val) - if attr == 'key_value': + if attr == "key_value": value_pb.key_value.CopyFrom(val) - elif attr == 'timestamp_value': + elif attr == "timestamp_value": value_pb.timestamp_value.CopyFrom(val) - elif attr == 'entity_value': + elif attr == "entity_value": entity_pb = entity_to_protobuf(val) value_pb.entity_value.CopyFrom(entity_pb) - elif attr == 'array_value': + elif attr == "array_value": if len(val) == 0: array_value = entity_pb2.ArrayValue(values=[]) value_pb.array_value.CopyFrom(array_value) @@ -459,7 +465,7 @@ def _set_protobuf_value(value_pb, val): for item in val: i_pb = l_pb.add() _set_protobuf_value(i_pb, item) - elif attr == 'geo_point_value': + elif attr == "geo_point_value": value_pb.geo_point_value.CopyFrom(val) else: # scalar, just assign setattr(value_pb, attr, val) @@ -485,8 +491,7 @@ def to_protobuf(self): :rtype: :class:`google.type.latlng_pb2.LatLng`. :returns: The current point as a protobuf. """ - return latlng_pb2.LatLng(latitude=self.latitude, - longitude=self.longitude) + return latlng_pb2.LatLng(latitude=self.latitude, longitude=self.longitude) def __eq__(self, other): """Compare two geo points for equality. @@ -497,8 +502,7 @@ def __eq__(self, other): if not isinstance(other, GeoPoint): return NotImplemented - return (self.latitude == other.latitude and - self.longitude == other.longitude) + return self.latitude == other.latitude and self.longitude == other.longitude def __ne__(self, other): """Compare two geo points for inequality. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index 2824e57d3ebf..f537e9d0fd69 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -25,16 +25,19 @@ _DATABASE_ID_TEMPLATE = ( - 'Received non-empty database ID: {!r}.\n' - 'urlsafe strings are not expected to encode a Reference that ' - 'contains a database ID.') + "Received non-empty database ID: {!r}.\n" + "urlsafe strings are not expected to encode a Reference that " + "contains a database ID." +) _BAD_ELEMENT_TEMPLATE = ( - 'At most one of ID and name can be set on an element. Received ' - 'id = {!r} and name = {!r}.') + "At most one of ID and name can be set on an element. Received " + "id = {!r} and name = {!r}." +) _EMPTY_ELEMENT = ( - 'Exactly one of ID and name must be set on an element. ' - 'Encountered an element with neither set that was not the last ' - 'element of a path.') + "Exactly one of ID and name must be set on an element. " + "Encountered an element with neither set that was not the last " + "element of a path." +) class Key(object): @@ -103,9 +106,9 @@ class Key(object): def __init__(self, *path_args, **kwargs): self._flat_path = path_args - parent = self._parent = kwargs.get('parent') - self._namespace = kwargs.get('namespace') - project = kwargs.get('project') + parent = self._parent = kwargs.get("parent") + self._namespace = kwargs.get("namespace") + project = kwargs.get("project") self._project = _validate_project(project, parent) # _flat_path, _parent, _namespace and _project must be set before # _combine_args() is called. @@ -128,9 +131,11 @@ def __eq__(self, other): if self.is_partial or other.is_partial: return False - return (self.flat_path == other.flat_path and - self.project == other.project and - self.namespace == other.namespace) + return ( + self.flat_path == other.flat_path + and self.project == other.project + and self.namespace == other.namespace + ) def __ne__(self, other): """Compare two keys for inequality. @@ -151,9 +156,7 @@ def __hash__(self): :rtype: int :returns: a hash of the key's state. """ - return (hash(self.flat_path) + - hash(self.project) + - hash(self.namespace)) + return hash(self.flat_path) + hash(self.project) + hash(self.namespace) @staticmethod def _parse_path(path_args): @@ -171,7 +174,7 @@ def _parse_path(path_args): a string or an integer. """ if len(path_args) == 0: - raise ValueError('Key path must not be empty.') + raise ValueError("Key path must not be empty.") kind_list = path_args[::2] id_or_name_list = path_args[1::2] @@ -184,17 +187,16 @@ def _parse_path(path_args): for kind, id_or_name in zip(kind_list, id_or_name_list): curr_key_part = {} if isinstance(kind, six.string_types): - curr_key_part['kind'] = kind + curr_key_part["kind"] = kind else: - raise ValueError(kind, 'Kind was not a string.') + raise ValueError(kind, "Kind was not a string.") if isinstance(id_or_name, six.string_types): - curr_key_part['name'] = id_or_name + curr_key_part["name"] = id_or_name elif isinstance(id_or_name, six.integer_types): - curr_key_part['id'] = id_or_name + curr_key_part["id"] = id_or_name elif id_or_name is not partial_ending: - raise ValueError(id_or_name, - 'ID/name was not a string or integer.') + raise ValueError(id_or_name, "ID/name was not a string or integer.") result.append(curr_key_part) @@ -214,18 +216,19 @@ def _combine_args(self): if self._parent is not None: if self._parent.is_partial: - raise ValueError('Parent key must be complete.') + raise ValueError("Parent key must be complete.") # We know that _parent.path() will return a copy. child_path = self._parent.path + child_path self._flat_path = self._parent.flat_path + self._flat_path - if (self._namespace is not None and - self._namespace != self._parent.namespace): - raise ValueError('Child namespace must agree with parent\'s.') + if ( + self._namespace is not None + and self._namespace != self._parent.namespace + ): + raise ValueError("Child namespace must agree with parent's.") self._namespace = self._parent.namespace - if (self._project is not None and - self._project != self._parent.project): - raise ValueError('Child project must agree with parent\'s.') + if self._project is not None and self._project != self._parent.project: + raise ValueError("Child project must agree with parent's.") self._project = self._parent.project return child_path @@ -239,9 +242,9 @@ def _clone(self): :rtype: :class:`google.cloud.datastore.key.Key` :returns: A new ``Key`` instance with the same data as the current one. """ - cloned_self = self.__class__(*self.flat_path, - project=self.project, - namespace=self.namespace) + cloned_self = self.__class__( + *self.flat_path, project=self.project, namespace=self.namespace + ) # If the current parent has already been set, we re-use # the same instance cloned_self._parent = self._parent @@ -260,15 +263,14 @@ def completed_key(self, id_or_name): ``id_or_name`` is not a string or integer. """ if not self.is_partial: - raise ValueError('Only a partial key can be completed.') + raise ValueError("Only a partial key can be completed.") if isinstance(id_or_name, six.string_types): - id_or_name_key = 'name' + id_or_name_key = "name" elif isinstance(id_or_name, six.integer_types): - id_or_name_key = 'id' + id_or_name_key = "id" else: - raise ValueError(id_or_name, - 'ID/name was not a string or integer.') + raise ValueError(id_or_name, "ID/name was not a string or integer.") new_key = self._clone() new_key._path[-1][id_or_name_key] = id_or_name @@ -289,12 +291,12 @@ def to_protobuf(self): for item in self.path: element = key.path.add() - if 'kind' in item: - element.kind = item['kind'] - if 'id' in item: - element.id = item['id'] - if 'name' in item: - element.name = item['name'] + if "kind" in item: + element.kind = item["kind"] + if "id" in item: + element.id = item["id"] + if "name" in item: + element.name = item["name"] return key @@ -334,7 +336,7 @@ def to_legacy_urlsafe(self, location_prefix=None): name_space=self.namespace, ) raw_bytes = reference.SerializeToString() - return base64.urlsafe_b64encode(raw_bytes).strip(b'=') + return base64.urlsafe_b64encode(raw_bytes).strip(b"=") @classmethod def from_legacy_urlsafe(cls, urlsafe): @@ -352,8 +354,8 @@ def from_legacy_urlsafe(cls, urlsafe): :rtype: :class:`~google.cloud.datastore.key.Key`. :returns: The key corresponding to ``urlsafe``. """ - urlsafe = _to_bytes(urlsafe, encoding='ascii') - padding = b'=' * (-len(urlsafe) % 4) + urlsafe = _to_bytes(urlsafe, encoding="ascii") + padding = b"=" * (-len(urlsafe) % 4) urlsafe += padding raw_bytes = base64.urlsafe_b64decode(urlsafe) @@ -361,7 +363,7 @@ def from_legacy_urlsafe(cls, urlsafe): reference.ParseFromString(raw_bytes) project = _clean_app(reference.app) - namespace = _get_empty(reference.name_space, u'') + namespace = _get_empty(reference.name_space, u"") _check_database_id(reference.database_id) flat_path = _get_flat_path(reference.path) return cls(*flat_path, project=project, namespace=namespace) @@ -412,7 +414,7 @@ def kind(self): :rtype: str :returns: The kind of the current key. """ - return self.path[-1]['kind'] + return self.path[-1]["kind"] @property def id(self): @@ -421,7 +423,7 @@ def id(self): :rtype: int :returns: The (integer) ID of the key. """ - return self.path[-1].get('id') + return self.path[-1].get("id") @property def name(self): @@ -430,7 +432,7 @@ def name(self): :rtype: str :returns: The (string) name of the key. """ - return self.path[-1].get('name') + return self.path[-1].get("name") @property def id_or_name(self): @@ -467,8 +469,9 @@ def _make_parent(self): else: parent_args = self.flat_path[:-2] if parent_args: - return self.__class__(*parent_args, project=self.project, - namespace=self.namespace) + return self.__class__( + *parent_args, project=self.project, namespace=self.namespace + ) @property def parent(self): @@ -485,7 +488,7 @@ def parent(self): return self._parent def __repr__(self): - return '' % (self._flat_path, self.project) + return "" % (self._flat_path, self.project) def _validate_project(project, parent): @@ -523,7 +526,7 @@ def _clean_app(app_str): :rtype: str :returns: The cleaned value. """ - parts = app_str.split('~', 1) + parts = app_str.split("~", 1) return parts[-1] @@ -551,7 +554,7 @@ def _check_database_id(database_id): :raises: :exc:`ValueError` if the ``database_id`` is not empty. """ - if database_id != u'': + if database_id != u"": msg = _DATABASE_ID_TEMPLATE.format(database_id) raise ValueError(msg) @@ -577,13 +580,13 @@ def _add_id_or_name(flat_path, element_pb, empty_allowed): # NOTE: Below 0 and the empty string are the "null" values for their # respective types, indicating that the value is unset. if id_ == 0: - if name == u'': + if name == u"": if not empty_allowed: raise ValueError(_EMPTY_ELEMENT) else: flat_path.append(name) else: - if name == u'': + if name == u"": flat_path.append(id_) else: msg = _BAD_ELEMENT_TEMPLATE.format(id_, name) @@ -642,11 +645,11 @@ def _to_legacy_path(dict_path): """ elements = [] for part in dict_path: - element_kwargs = {'type': part['kind']} - if 'id' in part: - element_kwargs['id'] = part['id'] - elif 'name' in part: - element_kwargs['name'] = part['name'] + element_kwargs = {"type": part["kind"]} + if "id" in part: + element_kwargs["id"] = part["id"] + elif "name" in part: + element_kwargs["name"] = part["name"] element = _app_engine_key_pb2.Path.Element(**element_kwargs) elements.append(element) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 5a331da79ccc..f7979d12be70 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -81,24 +81,26 @@ class Query(object): """ OPERATORS = { - '<=': query_pb2.PropertyFilter.LESS_THAN_OR_EQUAL, - '>=': query_pb2.PropertyFilter.GREATER_THAN_OR_EQUAL, - '<': query_pb2.PropertyFilter.LESS_THAN, - '>': query_pb2.PropertyFilter.GREATER_THAN, - '=': query_pb2.PropertyFilter.EQUAL, + "<=": query_pb2.PropertyFilter.LESS_THAN_OR_EQUAL, + ">=": query_pb2.PropertyFilter.GREATER_THAN_OR_EQUAL, + "<": query_pb2.PropertyFilter.LESS_THAN, + ">": query_pb2.PropertyFilter.GREATER_THAN, + "=": query_pb2.PropertyFilter.EQUAL, } """Mapping of operator strings and their protobuf equivalents.""" - def __init__(self, - client, - kind=None, - project=None, - namespace=None, - ancestor=None, - filters=(), - projection=(), - order=(), - distinct_on=()): + def __init__( + self, + client, + kind=None, + project=None, + namespace=None, + ancestor=None, + filters=(), + projection=(), + order=(), + distinct_on=(), + ): self._client = client self._kind = kind @@ -109,9 +111,9 @@ def __init__(self, # Verify filters passed in. for property_name, operator, value in filters: self.add_filter(property_name, operator, value) - self._projection = _ensure_tuple_or_list('projection', projection) - self._order = _ensure_tuple_or_list('order', order) - self._distinct_on = _ensure_tuple_or_list('distinct_on', distinct_on) + self._projection = _ensure_tuple_or_list("projection", projection) + self._order = _ensure_tuple_or_list("order", order) + self._distinct_on = _ensure_tuple_or_list("distinct_on", distinct_on) @property def project(self): @@ -239,10 +241,10 @@ def add_filter(self, property_name, operator, value): """ if self.OPERATORS.get(operator) is None: error_message = 'Invalid expression: "%s"' % (operator,) - choices_message = 'Please use one of: =, <, <=, >, >=.' + choices_message = "Please use one of: =, <, <=, >, >=." raise ValueError(error_message, choices_message) - if property_name == '__key__' and not isinstance(value, Key): + if property_name == "__key__" and not isinstance(value, Key): raise ValueError('Invalid key: "%s"' % value) self._filters.append((property_name, operator, value)) @@ -270,9 +272,9 @@ def projection(self, projection): def keys_only(self): """Set the projection to include only keys.""" - self._projection[:] = ['__key__'] + self._projection[:] = ["__key__"] - def key_filter(self, key, operator='='): + def key_filter(self, key, operator="="): """Filter on a key. :type key: :class:`google.cloud.datastore.key.Key` @@ -282,7 +284,7 @@ def key_filter(self, key, operator='='): :param operator: (Optional) One of ``=``, ``<``, ``<=``, ``>``, ``>=``. Defaults to ``=``. """ - self.add_filter('__key__', operator, key) + self.add_filter("__key__", operator, key) @property def order(self): @@ -330,8 +332,15 @@ def distinct_on(self, value): value = [value] self._distinct_on[:] = value - def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, - client=None, eventual=False): + def fetch( + self, + limit=None, + offset=0, + start_cursor=None, + end_cursor=None, + client=None, + eventual=False, + ): """Execute the Query; return an iterator for the matching entities. For example:: @@ -373,13 +382,15 @@ def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, if client is None: client = self._client - return Iterator(self, - client, - limit=limit, - offset=offset, - start_cursor=start_cursor, - end_cursor=end_cursor, - eventual=eventual) + return Iterator( + self, + client, + limit=limit, + offset=offset, + start_cursor=start_cursor, + end_cursor=end_cursor, + eventual=eventual, + ) class Iterator(page_iterator.Iterator): @@ -416,11 +427,22 @@ class Iterator(page_iterator.Iterator): next_page_token = None - def __init__(self, query, client, limit=None, offset=None, - start_cursor=None, end_cursor=None, eventual=False): + def __init__( + self, + query, + client, + limit=None, + offset=None, + start_cursor=None, + end_cursor=None, + eventual=False, + ): super(Iterator, self).__init__( - client=client, item_to_value=_item_to_entity, - page_token=start_cursor, max_results=limit) + client=client, + item_to_value=_item_to_entity, + page_token=start_cursor, + max_results=limit, + ) self._query = query self._offset = offset self._end_cursor = end_cursor @@ -477,7 +499,8 @@ def _process_query_results(self, response_pb): self.next_page_token = None else: self.next_page_token = base64.urlsafe_b64encode( - response_pb.batch.end_cursor) + response_pb.batch.end_cursor + ) self._end_cursor = None if response_pb.batch.more_results == _NOT_FINISHED: @@ -485,7 +508,7 @@ def _process_query_results(self, response_pb): elif response_pb.batch.more_results in _FINISHED: self._more_results = False else: - raise ValueError('Unexpected value returned for `more_results`.') + raise ValueError("Unexpected value returned for `more_results`.") return [result.entity for result in response_pb.batch.entity_results] @@ -508,13 +531,10 @@ def _next_page(self): read_options = helpers.get_read_options(self._eventual, transaction_id) partition_id = entity_pb2.PartitionId( - project_id=self._query.project, - namespace_id=self._query.namespace) + project_id=self._query.project, namespace_id=self._query.namespace + ) response_pb = self.client._datastore_api.run_query( - self._query.project, - partition_id, - read_options, - query=query_pb, + self._query.project, partition_id, read_options, query=query_pb ) entity_pbs = self._process_query_results(response_pb) return page_iterator.Page(self, entity_pbs, self.item_to_value) @@ -547,7 +567,7 @@ def _pb_from_query(query): # Filter on __key__ HAS_ANCESTOR == ancestor. ancestor_filter = composite_filter.filters.add().property_filter - ancestor_filter.property.name = '__key__' + ancestor_filter.property.name = "__key__" ancestor_filter.op = query_pb2.PropertyFilter.HAS_ANCESTOR ancestor_filter.value.key_value.CopyFrom(ancestor_pb) @@ -560,19 +580,19 @@ def _pb_from_query(query): property_filter.op = pb_op_enum # Set the value to filter on based on the type. - if property_name == '__key__': + if property_name == "__key__": key_pb = value.to_protobuf() property_filter.value.key_value.CopyFrom(key_pb) else: helpers._set_protobuf_value(property_filter.value, value) if not composite_filter.filters: - pb.ClearField('filter') + pb.ClearField("filter") for prop in query.order: property_order = pb.order.add() - if prop.startswith('-'): + if prop.startswith("-"): property_order.property.name = prop[1:] property_order.direction = property_order.DESCENDING else: @@ -600,4 +620,6 @@ def _item_to_entity(iterator, entity_pb): :returns: The next entity in the page. """ return helpers.entity_from_protobuf(entity_pb) + + # pylint: enable=unused-argument diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index 8fa71db25d54..ccff5561c644 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -164,8 +164,7 @@ def __init__(self, client, read_only=False): super(Transaction, self).__init__(client) self._id = None if read_only: - options = TransactionOptions( - read_only=TransactionOptions.ReadOnly()) + options = TransactionOptions(read_only=TransactionOptions.ReadOnly()) else: options = TransactionOptions() self._options = options @@ -206,8 +205,7 @@ def begin(self): """ super(Transaction, self).begin() try: - response_pb = self._client._datastore_api.begin_transaction( - self.project) + response_pb = self._client._datastore_api.begin_transaction(self.project) self._id = response_pb.transaction except: # noqa: E722 do not use bare except, specify exception instead self._status = self._ABORTED @@ -258,7 +256,7 @@ def put(self, entity): :raises: :class:`RuntimeError` if the transaction is marked ReadOnly """ - if self._options.HasField('read_only'): + if self._options.HasField("read_only"): raise RuntimeError("Transaction is read only") else: super(Transaction, self).put(entity) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py index 5297ae3121d7..0308f10c0a6a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py @@ -24,8 +24,4 @@ class DatastoreClient(datastore_client.DatastoreClient): enums = enums -__all__ = ( - 'enums', - 'types', - 'DatastoreClient', -) +__all__ = ("enums", "types", "DatastoreClient") diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py index d1ebd3c925bf..cd1ba4582a63 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py @@ -35,7 +35,8 @@ from google.cloud.datastore_v1.proto import query_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - 'google-cloud-datastore', ).version + "google-cloud-datastore" +).version class DatastoreClient(object): @@ -48,12 +49,12 @@ class DatastoreClient(object): the request. """ - SERVICE_ADDRESS = 'datastore.googleapis.com:443' + SERVICE_ADDRESS = "datastore.googleapis.com:443" """The default address of the service.""" # The name of the interface for this client. This is the key used to # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = 'google.datastore.v1.Datastore' + _INTERFACE_NAME = "google.datastore.v1.Datastore" @classmethod def from_service_account_file(cls, filename, *args, **kwargs): @@ -69,19 +70,20 @@ def from_service_account_file(cls, filename, *args, **kwargs): Returns: DatastoreClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs['credentials'] = credentials + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file - def __init__(self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None): + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + ): """Constructor. Args: @@ -115,18 +117,19 @@ def __init__(self, # Raise deprecation warnings for things we want to go away. if client_config is not None: warnings.warn( - 'The `client_config` argument is deprecated.', + "The `client_config` argument is deprecated.", PendingDeprecationWarning, - stacklevel=2) + stacklevel=2, + ) else: client_config = datastore_client_config.config if channel: warnings.warn( - 'The `channel` argument is deprecated; use ' - '`transport` instead.', + "The `channel` argument is deprecated; use " "`transport` instead.", PendingDeprecationWarning, - stacklevel=2) + stacklevel=2, + ) # Instantiate the transport. # The transport is responsible for handling serialization and @@ -135,25 +138,24 @@ def __init__(self, if callable(transport): self.transport = transport( credentials=credentials, - default_class=datastore_grpc_transport. - DatastoreGrpcTransport, + default_class=datastore_grpc_transport.DatastoreGrpcTransport, ) else: if credentials: raise ValueError( - 'Received both a transport instance and ' - 'credentials; these are mutually exclusive.') + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) self.transport = transport else: self.transport = datastore_grpc_transport.DatastoreGrpcTransport( - address=self.SERVICE_ADDRESS, - channel=channel, - credentials=credentials, + address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, ) + gapic_version=_GAPIC_LIBRARY_VERSION + ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info @@ -163,7 +165,8 @@ def __init__(self, # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config['interfaces'][self._INTERFACE_NAME], ) + client_config["interfaces"][self._INTERFACE_NAME] + ) # Save a dictionary of cached API call functions. # These are the actual callables which invoke the proper @@ -172,13 +175,15 @@ def __init__(self, self._inner_api_calls = {} # Service calls - def lookup(self, - project_id, - keys, - read_options=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def lookup( + self, + project_id, + keys, + read_options=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Looks up entities by key. @@ -225,32 +230,34 @@ def lookup(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'lookup' not in self._inner_api_calls: + if "lookup" not in self._inner_api_calls: self._inner_api_calls[ - 'lookup'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.lookup, - default_retry=self._method_configs['Lookup'].retry, - default_timeout=self._method_configs['Lookup'].timeout, - client_info=self._client_info, - ) + "lookup" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.lookup, + default_retry=self._method_configs["Lookup"].retry, + default_timeout=self._method_configs["Lookup"].timeout, + client_info=self._client_info, + ) request = datastore_pb2.LookupRequest( - project_id=project_id, - keys=keys, - read_options=read_options, + project_id=project_id, keys=keys, read_options=read_options ) - return self._inner_api_calls['lookup']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def run_query(self, - project_id, - partition_id, - read_options=None, - query=None, - gql_query=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["lookup"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def run_query( + self, + project_id, + partition_id, + read_options=None, + query=None, + gql_query=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Queries for entities. @@ -308,21 +315,19 @@ def run_query(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'run_query' not in self._inner_api_calls: + if "run_query" not in self._inner_api_calls: self._inner_api_calls[ - 'run_query'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.run_query, - default_retry=self._method_configs['RunQuery'].retry, - default_timeout=self._method_configs['RunQuery'].timeout, - client_info=self._client_info, - ) + "run_query" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.run_query, + default_retry=self._method_configs["RunQuery"].retry, + default_timeout=self._method_configs["RunQuery"].timeout, + client_info=self._client_info, + ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof( - query=query, - gql_query=gql_query, - ) + google.api_core.protobuf_helpers.check_oneof(query=query, gql_query=gql_query) request = datastore_pb2.RunQueryRequest( project_id=project_id, @@ -331,15 +336,18 @@ def run_query(self, query=query, gql_query=gql_query, ) - return self._inner_api_calls['run_query']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def begin_transaction(self, - project_id, - transaction_options=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["run_query"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def begin_transaction( + self, + project_id, + transaction_options=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Begins a new transaction. @@ -379,32 +387,33 @@ def begin_transaction(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'begin_transaction' not in self._inner_api_calls: + if "begin_transaction" not in self._inner_api_calls: self._inner_api_calls[ - 'begin_transaction'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.begin_transaction, - default_retry=self._method_configs['BeginTransaction']. - retry, - default_timeout=self._method_configs['BeginTransaction']. - timeout, - client_info=self._client_info, - ) + "begin_transaction" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.begin_transaction, + default_retry=self._method_configs["BeginTransaction"].retry, + default_timeout=self._method_configs["BeginTransaction"].timeout, + client_info=self._client_info, + ) request = datastore_pb2.BeginTransactionRequest( - project_id=project_id, - transaction_options=transaction_options, + project_id=project_id, transaction_options=transaction_options ) - return self._inner_api_calls['begin_transaction']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def commit(self, - project_id, - mode, - mutations, - transaction=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["begin_transaction"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def commit( + self, + project_id, + mode, + mutations, + transaction=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Commits a transaction, optionally creating, deleting or modifying some entities. @@ -468,18 +477,19 @@ def commit(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'commit' not in self._inner_api_calls: + if "commit" not in self._inner_api_calls: self._inner_api_calls[ - 'commit'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.commit, - default_retry=self._method_configs['Commit'].retry, - default_timeout=self._method_configs['Commit'].timeout, - client_info=self._client_info, - ) + "commit" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.commit, + default_retry=self._method_configs["Commit"].retry, + default_timeout=self._method_configs["Commit"].timeout, + client_info=self._client_info, + ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof(transaction=transaction, ) + google.api_core.protobuf_helpers.check_oneof(transaction=transaction) request = datastore_pb2.CommitRequest( project_id=project_id, @@ -487,15 +497,18 @@ def commit(self, mutations=mutations, transaction=transaction, ) - return self._inner_api_calls['commit']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def rollback(self, - project_id, - transaction, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["commit"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def rollback( + self, + project_id, + transaction, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Rolls back a transaction. @@ -536,28 +549,31 @@ def rollback(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'rollback' not in self._inner_api_calls: + if "rollback" not in self._inner_api_calls: self._inner_api_calls[ - 'rollback'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.rollback, - default_retry=self._method_configs['Rollback'].retry, - default_timeout=self._method_configs['Rollback'].timeout, - client_info=self._client_info, - ) + "rollback" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.rollback, + default_retry=self._method_configs["Rollback"].retry, + default_timeout=self._method_configs["Rollback"].timeout, + client_info=self._client_info, + ) request = datastore_pb2.RollbackRequest( - project_id=project_id, - transaction=transaction, + project_id=project_id, transaction=transaction ) - return self._inner_api_calls['rollback']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def allocate_ids(self, - project_id, - keys, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["rollback"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def allocate_ids( + self, + project_id, + keys, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. @@ -602,30 +618,30 @@ def allocate_ids(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'allocate_ids' not in self._inner_api_calls: + if "allocate_ids" not in self._inner_api_calls: self._inner_api_calls[ - 'allocate_ids'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.allocate_ids, - default_retry=self._method_configs['AllocateIds'].retry, - default_timeout=self._method_configs['AllocateIds']. - timeout, - client_info=self._client_info, - ) + "allocate_ids" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.allocate_ids, + default_retry=self._method_configs["AllocateIds"].retry, + default_timeout=self._method_configs["AllocateIds"].timeout, + client_info=self._client_info, + ) - request = datastore_pb2.AllocateIdsRequest( - project_id=project_id, - keys=keys, + request = datastore_pb2.AllocateIdsRequest(project_id=project_id, keys=keys) + return self._inner_api_calls["allocate_ids"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - return self._inner_api_calls['allocate_ids']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def reserve_ids(self, - project_id, - keys, - database_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + + def reserve_ids( + self, + project_id, + keys, + database_id=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Prevents the supplied keys' IDs from being auto-allocated by Cloud Datastore. @@ -671,19 +687,19 @@ def reserve_ids(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'reserve_ids' not in self._inner_api_calls: + if "reserve_ids" not in self._inner_api_calls: self._inner_api_calls[ - 'reserve_ids'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.reserve_ids, - default_retry=self._method_configs['ReserveIds'].retry, - default_timeout=self._method_configs['ReserveIds'].timeout, - client_info=self._client_info, - ) + "reserve_ids" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.reserve_ids, + default_retry=self._method_configs["ReserveIds"].retry, + default_timeout=self._method_configs["ReserveIds"].timeout, + client_info=self._client_info, + ) request = datastore_pb2.ReserveIdsRequest( - project_id=project_id, - keys=keys, - database_id=database_id, + project_id=project_id, keys=keys, database_id=database_id + ) + return self._inner_api_calls["reserve_ids"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - return self._inner_api_calls['reserve_ids']( - request, retry=retry, timeout=timeout, metadata=metadata) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py index 1a3eb9523447..95822b8babfc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py @@ -3,7 +3,7 @@ "google.datastore.v1.Datastore": { "retry_codes": { "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [] + "non_idempotent": [], }, "retry_params": { "default": { @@ -13,46 +13,46 @@ "initial_rpc_timeout_millis": 60000, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 + "total_timeout_millis": 600000, } }, "methods": { "Lookup": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "RunQuery": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "BeginTransaction": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "Commit": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "Rollback": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "AllocateIds": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "ReserveIds": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" - } - } + "retry_params_name": "default", + }, + }, } } } diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py index 41497f71781b..78de5345e377 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py @@ -28,6 +28,7 @@ class NullValue(enum.IntEnum): Attributes: NULL_VALUE (int): Null value. """ + NULL_VALUE = 0 @@ -47,6 +48,7 @@ class ResultType(enum.IntEnum): PROJECTION (int): A projected subset of properties. The entity may have no key. KEY_ONLY (int): Only the key. """ + RESULT_TYPE_UNSPECIFIED = 0 FULL = 1 PROJECTION = 2 @@ -63,6 +65,7 @@ class Direction(enum.IntEnum): ASCENDING (int): Ascending. DESCENDING (int): Descending. """ + DIRECTION_UNSPECIFIED = 0 ASCENDING = 1 DESCENDING = 2 @@ -77,6 +80,7 @@ class Operator(enum.IntEnum): OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used. AND (int): The results are required to satisfy each of the combined filters. """ + OPERATOR_UNSPECIFIED = 0 AND = 1 @@ -95,6 +99,7 @@ class Operator(enum.IntEnum): EQUAL (int): Equal. HAS_ANCESTOR (int): Has ancestor. """ + OPERATOR_UNSPECIFIED = 0 LESS_THAN = 1 LESS_THAN_OR_EQUAL = 2 @@ -117,6 +122,7 @@ class MoreResultsType(enum.IntEnum): cursor. NO_MORE_RESULTS (int): The query is finished, and there are no more results. """ + MORE_RESULTS_TYPE_UNSPECIFIED = 0 NOT_FINISHED = 1 MORE_RESULTS_AFTER_LIMIT = 2 @@ -136,6 +142,7 @@ class Mode(enum.IntEnum): `here `__. NON_TRANSACTIONAL (int): Non-transactional: The mutations may not apply as all or none. """ + MODE_UNSPECIFIED = 0 TRANSACTIONAL = 1 NON_TRANSACTIONAL = 2 @@ -151,6 +158,7 @@ class ReadConsistency(enum.IntEnum): STRONG (int): Strong consistency. EVENTUAL (int): Eventual consistency. """ + READ_CONSISTENCY_UNSPECIFIED = 0 STRONG = 1 EVENTUAL = 2 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py index 3f0e8ef0cd13..1b5639d2dd44 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py @@ -27,17 +27,17 @@ class DatastoreGrpcTransport(object): which can be used to take advantage of advanced features of gRPC. """ + # The scopes needed to make gRPC calls to all of the methods defined # in this service. _OAUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", ) - def __init__(self, - channel=None, - credentials=None, - address='datastore.googleapis.com:443'): + def __init__( + self, channel=None, credentials=None, address="datastore.googleapis.com:443" + ): """Instantiate the transport class. Args: @@ -55,28 +55,21 @@ def __init__(self, # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - 'The `channel` and `credentials` arguments are mutually ' - 'exclusive.', ) + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) # Create the channel. if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - ) + channel = self.create_channel(address=address, credentials=credentials) self._channel = channel # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. - self._stubs = { - 'datastore_stub': datastore_pb2_grpc.DatastoreStub(channel), - } + self._stubs = {"datastore_stub": datastore_pb2_grpc.DatastoreStub(channel)} @classmethod - def create_channel(cls, - address='datastore.googleapis.com:443', - credentials=None): + def create_channel(cls, address="datastore.googleapis.com:443", credentials=None): """Create and return a gRPC channel object. Args: @@ -91,9 +84,7 @@ def create_channel(cls, grpc.Channel: A gRPC channel object. """ return google.api_core.grpc_helpers.create_channel( - address, - credentials=credentials, - scopes=cls._OAUTH_SCOPES, + address, credentials=credentials, scopes=cls._OAUTH_SCOPES ) @property @@ -116,7 +107,7 @@ def lookup(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['datastore_stub'].Lookup + return self._stubs["datastore_stub"].Lookup @property def run_query(self): @@ -129,7 +120,7 @@ def run_query(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['datastore_stub'].RunQuery + return self._stubs["datastore_stub"].RunQuery @property def begin_transaction(self): @@ -142,7 +133,7 @@ def begin_transaction(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['datastore_stub'].BeginTransaction + return self._stubs["datastore_stub"].BeginTransaction @property def commit(self): @@ -156,7 +147,7 @@ def commit(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['datastore_stub'].Commit + return self._stubs["datastore_stub"].Commit @property def rollback(self): @@ -169,7 +160,7 @@ def rollback(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['datastore_stub'].Rollback + return self._stubs["datastore_stub"].Rollback @property def allocate_ids(self): @@ -183,7 +174,7 @@ def allocate_ids(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['datastore_stub'].AllocateIds + return self._stubs["datastore_stub"].AllocateIds @property def reserve_ids(self): @@ -197,4 +188,4 @@ def reserve_ids(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['datastore_stub'].ReserveIds + return self._stubs["datastore_stub"].ReserveIds diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types.py index 6f9f243b3a84..ac154bda0e67 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types.py @@ -37,11 +37,7 @@ latlng_pb2, ] -_local_modules = [ - datastore_pb2, - entity_pb2, - query_pb2, -] +_local_modules = [datastore_pb2, entity_pb2, query_pb2] names = [] @@ -52,7 +48,7 @@ for module in _local_modules: for name, message in get_messages(module).items(): - message.__module__ = 'google.cloud.datastore_v1.types' + message.__module__ = "google.cloud.datastore_v1.types" setattr(sys.modules[__name__], name, message) names.append(name) diff --git a/packages/google-cloud-datastore/tests/doctests.py b/packages/google-cloud-datastore/tests/doctests.py index 4901d8de538f..329b3d416d3b 100644 --- a/packages/google-cloud-datastore/tests/doctests.py +++ b/packages/google-cloud-datastore/tests/doctests.py @@ -39,9 +39,8 @@ """ -@unittest.skipIf(six.PY2, 'Doctests run against Python 3 only.') +@unittest.skipIf(six.PY2, "Doctests run against Python 3 only.") class TestDoctest(unittest.TestCase): - def _submodules(self): pkg_iter = pkgutil.iter_modules(datastore.__path__) result = [] @@ -49,29 +48,29 @@ def _submodules(self): self.assertFalse(ispkg) result.append(mod_name) - self.assertNotIn('__init__', result) + self.assertNotIn("__init__", result) return result @staticmethod def _add_section(index, mod_name, file_obj): - mod_part = 'datastore' - if mod_name != '__init__': - mod_part += '.' + mod_name + mod_part = "datastore" + if mod_name != "__init__": + mod_part += "." + mod_name content = SPHINX_SECTION_TEMPLATE % (index, mod_part) file_obj.write(content) def _make_temp_docs(self): - docs_dir = tempfile.mkdtemp(prefix='datastore-') + docs_dir = tempfile.mkdtemp(prefix="datastore-") - conf_file = os.path.join(docs_dir, 'conf.py') + conf_file = os.path.join(docs_dir, "conf.py") - with open(conf_file, 'w') as file_obj: + with open(conf_file, "w") as file_obj: file_obj.write(SPHINX_CONF) - index_file = os.path.join(docs_dir, 'contents.rst') + index_file = os.path.join(docs_dir, "contents.rst") datastore_modules = self._submodules() - with open(index_file, 'w') as file_obj: - self._add_section(0, '__init__', file_obj) + with open(index_file, "w") as file_obj: + self._add_section(0, "__init__", file_obj) for index, datastore_module in enumerate(datastore_modules): self._add_section(index + 1, datastore_module, file_obj) @@ -81,13 +80,18 @@ def test_it(self): from sphinx import application docs_dir = self._make_temp_docs() - outdir = os.path.join(docs_dir, 'doctest', 'out') - doctreedir = os.path.join(docs_dir, 'doctest', 'doctrees') + outdir = os.path.join(docs_dir, "doctest", "out") + doctreedir = os.path.join(docs_dir, "doctest", "doctrees") app = application.Sphinx( - srcdir=docs_dir, confdir=docs_dir, - outdir=outdir, doctreedir=doctreedir, - buildername='doctest', warningiserror=True, parallel=1) + srcdir=docs_dir, + confdir=docs_dir, + outdir=outdir, + doctreedir=doctreedir, + buildername="doctest", + warningiserror=True, + parallel=1, + ) app.build() self.assertEqual(app.statuscode, 0) diff --git a/packages/google-cloud-datastore/tests/system/test_system.py b/packages/google-cloud-datastore/tests/system/test_system.py index 2a1080b628e5..482b0b80ba7d 100644 --- a/packages/google-cloud-datastore/tests/system/test_system.py +++ b/packages/google-cloud-datastore/tests/system/test_system.py @@ -38,30 +38,35 @@ class Config(object): This is a mutable stand-in to allow test set-up to modify global state. """ + CLIENT = None TO_DELETE = [] def clone_client(client): - return datastore.Client(project=client.project, - namespace=client.namespace, - credentials=client._credentials, - _http=client._http) + return datastore.Client( + project=client.project, + namespace=client.namespace, + credentials=client._credentials, + _http=client._http, + ) def setUpModule(): emulator_dataset = os.getenv(GCD_DATASET) # Isolated namespace so concurrent test runs don't collide. - test_namespace = 'ns' + unique_resource_id() + test_namespace = "ns" + unique_resource_id() if emulator_dataset is None: Config.CLIENT = datastore.Client(namespace=test_namespace) else: credentials = EmulatorCreds() http = requests.Session() # Un-authorized. - Config.CLIENT = datastore.Client(project=emulator_dataset, - namespace=test_namespace, - credentials=credentials, - _http=http) + Config.CLIENT = datastore.Client( + project=emulator_dataset, + namespace=test_namespace, + credentials=credentials, + _http=http, + ) def tearDownModule(): @@ -71,7 +76,6 @@ def tearDownModule(): class TestDatastore(unittest.TestCase): - def setUp(self): self.case_entities_to_delete = [] @@ -82,11 +86,9 @@ def tearDown(self): class TestDatastoreAllocateIDs(TestDatastore): - def test_allocate_ids(self): num_ids = 10 - allocated_keys = Config.CLIENT.allocate_ids( - Config.CLIENT.key('Kind'), num_ids) + allocated_keys = Config.CLIENT.allocate_ids(Config.CLIENT.key("Kind"), num_ids) self.assertEqual(len(allocated_keys), num_ids) unique_ids = set() @@ -99,25 +101,24 @@ def test_allocate_ids(self): class TestDatastoreSave(TestDatastore): - @classmethod def setUpClass(cls): - cls.PARENT = Config.CLIENT.key('Blog', 'PizzaMan') + cls.PARENT = Config.CLIENT.key("Blog", "PizzaMan") def _get_post(self, id_or_name=None, post_content=None): post_content = post_content or { - 'title': u'How to make the perfect pizza in your grill', - 'tags': [u'pizza', u'grill'], - 'publishedAt': datetime.datetime(2001, 1, 1, tzinfo=UTC), - 'author': u'Silvano', - 'isDraft': False, - 'wordCount': 400, - 'rating': 5.0, + "title": u"How to make the perfect pizza in your grill", + "tags": [u"pizza", u"grill"], + "publishedAt": datetime.datetime(2001, 1, 1, tzinfo=UTC), + "author": u"Silvano", + "isDraft": False, + "wordCount": 400, + "rating": 5.0, } # Create an entity with the given content. # NOTE: Using a parent to ensure consistency for query # in `test_empty_kind`. - key = Config.CLIENT.key('Post', parent=self.PARENT) + key = Config.CLIENT.key("Post", parent=self.PARENT) entity = datastore.Entity(key=key) entity.update(post_content) @@ -143,7 +144,7 @@ def _generic_test_post(self, name=None, key_id=None): self.assertEqual(retrieved_entity, entity) def test_post_with_name(self): - self._generic_test_post(name='post1') + self._generic_test_post(name="post1") def test_post_with_id(self): self._generic_test_post(key_id=123456789) @@ -159,13 +160,13 @@ def test_save_multiple(self): self.case_entities_to_delete.append(entity1) second_post_content = { - 'title': u'How to make the perfect homemade pasta', - 'tags': [u'pasta', u'homemade'], - 'publishedAt': datetime.datetime(2001, 1, 1), - 'author': u'Silvano', - 'isDraft': False, - 'wordCount': 450, - 'rating': 4.5, + "title": u"How to make the perfect homemade pasta", + "tags": [u"pasta", u"homemade"], + "publishedAt": datetime.datetime(2001, 1, 1), + "author": u"Silvano", + "isDraft": False, + "wordCount": 450, + "rating": 4.5, } entity2 = self._get_post(post_content=second_post_content) xact.put(entity2) @@ -177,28 +178,28 @@ def test_save_multiple(self): self.assertEqual(len(matches), 2) def test_empty_kind(self): - query = Config.CLIENT.query(kind='Post') + query = Config.CLIENT.query(kind="Post") query.ancestor = self.PARENT posts = list(query.fetch(limit=2)) self.assertEqual(posts, []) def test_all_value_types(self): - key = Config.CLIENT.key('TestPanObject', 1234) + key = Config.CLIENT.key("TestPanObject", 1234) entity = datastore.Entity(key=key) - entity['timestamp'] = datetime.datetime(2014, 9, 9, tzinfo=UTC) - key_stored = Config.CLIENT.key('SavedKey', 'right-here') - entity['key'] = key_stored - entity['truthy'] = True - entity['float'] = 2.718281828 - entity['int'] = 3735928559 - entity['words'] = u'foo' - entity['blob'] = b'seekretz' + entity["timestamp"] = datetime.datetime(2014, 9, 9, tzinfo=UTC) + key_stored = Config.CLIENT.key("SavedKey", "right-here") + entity["key"] = key_stored + entity["truthy"] = True + entity["float"] = 2.718281828 + entity["int"] = 3735928559 + entity["words"] = u"foo" + entity["blob"] = b"seekretz" entity_stored = datastore.Entity(key=key_stored) - entity_stored['hi'] = 'bye' - entity['nested'] = entity_stored - entity['items'] = [1, 2, 3] - entity['geo'] = GeoPoint(1.0, 2.0) - entity['nothing_here'] = None + entity_stored["hi"] = "bye" + entity["nested"] = entity_stored + entity["items"] = [1, 2, 3] + entity["geo"] = GeoPoint(1.0, 2.0) + entity["nothing_here"] = None # Store the entity. self.case_entities_to_delete.append(entity) @@ -210,28 +211,26 @@ def test_all_value_types(self): class TestDatastoreSaveKeys(TestDatastore): - def test_save_key_self_reference(self): - parent_key = Config.CLIENT.key('Residence', 'NewYork') - key = Config.CLIENT.key('Person', 'name', parent=parent_key) + parent_key = Config.CLIENT.key("Residence", "NewYork") + key = Config.CLIENT.key("Person", "name", parent=parent_key) entity = datastore.Entity(key=key) - entity['fullName'] = u'Full name' - entity['linkedTo'] = key # Self reference. + entity["fullName"] = u"Full name" + entity["linkedTo"] = key # Self reference. Config.CLIENT.put(entity) self.case_entities_to_delete.append(entity) - query = Config.CLIENT.query(kind='Person') + query = Config.CLIENT.query(kind="Person") # Adding ancestor to ensure consistency. query.ancestor = parent_key - query.add_filter('linkedTo', '=', key) + query.add_filter("linkedTo", "=", key) stored_persons = list(query.fetch(limit=2)) self.assertEqual(stored_persons, [entity]) class TestDatastoreQuery(TestDatastore): - @classmethod def setUpClass(cls): cls.CLIENT = clone_client(Config.CLIENT) @@ -258,8 +257,7 @@ def tearDownClass(cls): def _base_query(self): # Use the client for this test instead of the global. - return self.CLIENT.query(kind='Character', - ancestor=self.ANCESTOR_KEY) + return self.CLIENT.query(kind="Character", ancestor=self.ANCESTOR_KEY) def test_limit_queries(self): limit = 5 @@ -282,7 +280,7 @@ def test_limit_queries(self): def test_query_simple_filter(self): query = self._base_query() - query.add_filter('appearances', '>=', 20) + query.add_filter("appearances", ">=", 20) expected_matches = 6 # We expect 6, but allow the query to get 1 extra. entities = list(query.fetch(limit=expected_matches + 1)) @@ -290,8 +288,8 @@ def test_query_simple_filter(self): def test_query_multiple_filters(self): query = self._base_query() - query.add_filter('appearances', '>=', 26) - query.add_filter('family', '=', 'Stark') + query.add_filter("appearances", ">=", 26) + query.add_filter("family", "=", "Stark") expected_matches = 4 # We expect 4, but allow the query to get 1 extra. entities = list(query.fetch(limit=expected_matches + 1)) @@ -318,20 +316,20 @@ def test_query_key_filter(self): def test_ordered_query(self): query = self._base_query() - query.order = 'appearances' + query.order = "appearances" expected_matches = 8 # We expect 8, but allow the query to get 1 extra. entities = list(query.fetch(limit=expected_matches + 1)) self.assertEqual(len(entities), expected_matches) # Actually check the ordered data returned. - self.assertEqual(entities[0]['name'], self.CHARACTERS[0]['name']) - self.assertEqual(entities[7]['name'], self.CHARACTERS[3]['name']) + self.assertEqual(entities[0]["name"], self.CHARACTERS[0]["name"]) + self.assertEqual(entities[7]["name"], self.CHARACTERS[3]["name"]) def test_projection_query(self): filtered_query = self._base_query() - filtered_query.projection = ['name', 'family'] - filtered_query.order = ['name', 'family'] + filtered_query.projection = ["name", "family"] + filtered_query.order = ["name", "family"] # NOTE: There are 9 responses because of Catelyn. She has both # Stark and Tully as her families, hence occurs twice in @@ -346,27 +344,25 @@ def test_projection_query(self): sansa_entity = entities[8] arya_dict = dict(arya_entity) - self.assertEqual(arya_dict, {'name': 'Arya', 'family': 'Stark'}) + self.assertEqual(arya_dict, {"name": "Arya", "family": "Stark"}) catelyn_stark_entity = entities[2] catelyn_stark_dict = dict(catelyn_stark_entity) - self.assertEqual(catelyn_stark_dict, - {'name': 'Catelyn', 'family': 'Stark'}) + self.assertEqual(catelyn_stark_dict, {"name": "Catelyn", "family": "Stark"}) catelyn_tully_dict = dict(catelyn_tully_entity) - self.assertEqual(catelyn_tully_dict, - {'name': 'Catelyn', 'family': 'Tully'}) + self.assertEqual(catelyn_tully_dict, {"name": "Catelyn", "family": "Tully"}) # Check both Catelyn keys are the same. self.assertEqual(catelyn_stark_entity.key, catelyn_tully_entity.key) sansa_dict = dict(sansa_entity) - self.assertEqual(sansa_dict, {'name': 'Sansa', 'family': 'Stark'}) + self.assertEqual(sansa_dict, {"name": "Sansa", "family": "Stark"}) def test_query_paginate_simple_uuid_keys(self): # See issue #4264 - page_query = self.CLIENT.query(kind='uuid_key') + page_query = self.CLIENT.query(kind="uuid_key") iterator = page_query.fetch() seen = set() @@ -383,7 +379,7 @@ def test_query_paginate_simple_uuid_keys(self): def test_query_paginate_simple_timestamp_keys(self): # See issue #4264 - page_query = self.CLIENT.query(kind='timestamp_key') + page_query = self.CLIENT.query(kind="timestamp_key") iterator = page_query.fetch() seen = set() @@ -402,7 +398,7 @@ def test_query_offset_timestamp_keys(self): max_all = 10000 offset = 1 max_offset = max_all - offset - query = self.CLIENT.query(kind='timestamp_key') + query = self.CLIENT.query(kind="timestamp_key") all_w_limit = list(query.fetch(limit=max_all)) self.assertEqual(len(all_w_limit), max_all) @@ -411,7 +407,7 @@ def test_query_offset_timestamp_keys(self): def test_query_paginate_with_offset(self): page_query = self._base_query() - page_query.order = 'appearances' + page_query.order = "appearances" offset = 2 limit = 3 iterator = page_query.fetch(limit=limit, offset=offset) @@ -421,22 +417,21 @@ def test_query_paginate_with_offset(self): entities = list(page) cursor = iterator.next_page_token self.assertEqual(len(entities), limit) - self.assertEqual(entities[0]['name'], 'Robb') - self.assertEqual(entities[1]['name'], 'Bran') - self.assertEqual(entities[2]['name'], 'Catelyn') + self.assertEqual(entities[0]["name"], "Robb") + self.assertEqual(entities[1]["name"], "Bran") + self.assertEqual(entities[2]["name"], "Catelyn") # Fetch next set of characters. - new_iterator = page_query.fetch(limit=limit, offset=0, - start_cursor=cursor) + new_iterator = page_query.fetch(limit=limit, offset=0, start_cursor=cursor) entities = list(new_iterator) self.assertEqual(len(entities), limit) - self.assertEqual(entities[0]['name'], 'Sansa') - self.assertEqual(entities[1]['name'], 'Jon Snow') - self.assertEqual(entities[2]['name'], 'Arya') + self.assertEqual(entities[0]["name"], "Sansa") + self.assertEqual(entities[1]["name"], "Jon Snow") + self.assertEqual(entities[2]["name"], "Arya") def test_query_paginate_with_start_cursor(self): page_query = self._base_query() - page_query.order = 'appearances' + page_query.order = "appearances" limit = 3 offset = 2 iterator = page_query.fetch(limit=limit, offset=offset) @@ -449,33 +444,31 @@ def test_query_paginate_with_start_cursor(self): # Use cursor to create a fresh query. fresh_query = self._base_query() - fresh_query.order = 'appearances' + fresh_query.order = "appearances" - new_entities = list(fresh_query.fetch(start_cursor=cursor, - limit=limit)) + new_entities = list(fresh_query.fetch(start_cursor=cursor, limit=limit)) characters_remaining = len(self.CHARACTERS) - limit - offset self.assertEqual(len(new_entities), characters_remaining) - self.assertEqual(new_entities[0]['name'], 'Sansa') - self.assertEqual(new_entities[2]['name'], 'Arya') + self.assertEqual(new_entities[0]["name"], "Sansa") + self.assertEqual(new_entities[2]["name"], "Arya") def test_query_distinct_on(self): query = self._base_query() - query.distinct_on = ['alive'] + query.distinct_on = ["alive"] expected_matches = 2 # We expect 2, but allow the query to get 1 extra. entities = list(query.fetch(limit=expected_matches + 1)) self.assertEqual(len(entities), expected_matches) - self.assertEqual(entities[0]['name'], 'Catelyn') - self.assertEqual(entities[1]['name'], 'Arya') + self.assertEqual(entities[0]["name"], "Catelyn") + self.assertEqual(entities[1]["name"], "Arya") class TestDatastoreTransaction(TestDatastore): - def test_transaction_via_with_statement(self): - entity = datastore.Entity(key=Config.CLIENT.key('Company', 'Google')) - entity['url'] = u'www.google.com' + entity = datastore.Entity(key=Config.CLIENT.key("Company", "Google")) + entity["url"] = u"www.google.com" with Config.CLIENT.transaction() as xact: result = Config.CLIENT.get(entity.key) @@ -497,12 +490,12 @@ def test_transaction_via_explicit_begin_get_commit(self): BEFORE_1 = 100 BEFORE_2 = 0 TRANSFER_AMOUNT = 40 - key1 = Config.CLIENT.key('account', '123') + key1 = Config.CLIENT.key("account", "123") account1 = datastore.Entity(key=key1) - account1['balance'] = BEFORE_1 - key2 = Config.CLIENT.key('account', '234') + account1["balance"] = BEFORE_1 + key2 = Config.CLIENT.key("account", "234") account2 = datastore.Entity(key=key2) - account2['balance'] = BEFORE_2 + account2["balance"] = BEFORE_2 Config.CLIENT.put_multi([account1, account2]) self.case_entities_to_delete.append(account1) self.case_entities_to_delete.append(account2) @@ -511,8 +504,8 @@ def test_transaction_via_explicit_begin_get_commit(self): xact.begin() from_account = Config.CLIENT.get(key1, transaction=xact) to_account = Config.CLIENT.get(key2, transaction=xact) - from_account['balance'] -= TRANSFER_AMOUNT - to_account['balance'] += TRANSFER_AMOUNT + from_account["balance"] -= TRANSFER_AMOUNT + to_account["balance"] += TRANSFER_AMOUNT xact.put(from_account) xact.put(to_account) @@ -520,18 +513,18 @@ def test_transaction_via_explicit_begin_get_commit(self): after1 = Config.CLIENT.get(key1) after2 = Config.CLIENT.get(key2) - self.assertEqual(after1['balance'], BEFORE_1 - TRANSFER_AMOUNT) - self.assertEqual(after2['balance'], BEFORE_2 + TRANSFER_AMOUNT) + self.assertEqual(after1["balance"], BEFORE_1 - TRANSFER_AMOUNT) + self.assertEqual(after2["balance"], BEFORE_2 + TRANSFER_AMOUNT) def test_failure_with_contention(self): - contention_prop_name = 'baz' + contention_prop_name = "baz" local_client = clone_client(Config.CLIENT) # Insert an entity which will be retrieved in a transaction # and updated outside it with a contentious value. - key = local_client.key('BreakTxn', 1234) + key = local_client.key("BreakTxn", 1234) orig_entity = datastore.Entity(key=key) - orig_entity['foo'] = u'bar' + orig_entity["foo"] = u"bar" local_client.put(orig_entity) self.case_entities_to_delete.append(orig_entity) @@ -540,22 +533,22 @@ def test_failure_with_contention(self): entity_in_txn = local_client.get(key) # Update the original entity outside the transaction. - orig_entity[contention_prop_name] = u'outside' + orig_entity[contention_prop_name] = u"outside" Config.CLIENT.put(orig_entity) # Try to update the entity which we already updated outside the # transaction. - entity_in_txn[contention_prop_name] = u'inside' + entity_in_txn[contention_prop_name] = u"inside" txn.put(entity_in_txn) def test_empty_array_put(self): local_client = clone_client(Config.CLIENT) - key = local_client.key('EmptyArray', 1234) + key = local_client.key("EmptyArray", 1234) local_client = datastore.Client() entity = datastore.Entity(key=key) - entity['children'] = [] + entity["children"] = [] local_client.put(entity) retrieved = local_client.get(entity.key) - self.assertEqual(entity['children'], retrieved['children']) + self.assertEqual(entity["children"], retrieved["children"]) diff --git a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py index 5820fbf7d2b1..03f5ffddbe10 100644 --- a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py @@ -26,20 +26,20 @@ FETCH_MAX = 20 ALL_KINDS = ( - 'Character', - 'Company', - 'Kind', - 'Person', - 'Post', - 'uuid_key', - 'timestamp_key', + "Character", + "Company", + "Kind", + "Person", + "Post", + "uuid_key", + "timestamp_key", ) TRANSACTION_MAX_GROUPS = 5 MAX_DEL_ENTITIES = 500 def print_func(message): - if os.getenv('GOOGLE_CLOUD_NO_PRINT') != 'true': + if os.getenv("GOOGLE_CLOUD_NO_PRINT") != "true": print(message) @@ -67,7 +67,7 @@ def remove_kind(kind, client): delete_outside_transaction = False with client.transaction(): # Now that we have all results, we seek to delete. - print_func('Deleting keys:') + print_func("Deleting keys:") print_func(results) ancestors = get_ancestors(results) @@ -87,19 +87,18 @@ def main(): if len(kinds) == 0: kinds = ALL_KINDS - print_func('This command will remove all entities for ' - 'the following kinds:') - print_func('\n'.join('- ' + val for val in kinds)) - response = six.moves.input('Is this OK [y/n]? ') + print_func("This command will remove all entities for " "the following kinds:") + print_func("\n".join("- " + val for val in kinds)) + response = six.moves.input("Is this OK [y/n]? ") - if response.lower() == 'y': + if response.lower() == "y": for kind in kinds: remove_kind(kind, client) else: - print_func('Doing nothing.') + print_func("Doing nothing.") -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py index 9dcadd00dfb1..2c266a8ac4bc 100644 --- a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py @@ -27,66 +27,38 @@ from google.cloud import datastore -ANCESTOR = ('Book', 'GoT') -RICKARD = ANCESTOR + ('Character', 'Rickard') -EDDARD = RICKARD + ('Character', 'Eddard') +ANCESTOR = ("Book", "GoT") +RICKARD = ANCESTOR + ("Character", "Rickard") +EDDARD = RICKARD + ("Character", "Eddard") KEY_PATHS = ( RICKARD, EDDARD, - ANCESTOR + ('Character', 'Catelyn'), - EDDARD + ('Character', 'Arya'), - EDDARD + ('Character', 'Sansa'), - EDDARD + ('Character', 'Robb'), - EDDARD + ('Character', 'Bran'), - EDDARD + ('Character', 'Jon Snow'), + ANCESTOR + ("Character", "Catelyn"), + EDDARD + ("Character", "Arya"), + EDDARD + ("Character", "Sansa"), + EDDARD + ("Character", "Robb"), + EDDARD + ("Character", "Bran"), + EDDARD + ("Character", "Jon Snow"), ) CHARACTERS = ( + {"name": u"Rickard", "family": u"Stark", "appearances": 0, "alive": False}, + {"name": u"Eddard", "family": u"Stark", "appearances": 9, "alive": False}, { - 'name': u'Rickard', - 'family': u'Stark', - 'appearances': 0, - 'alive': False, - }, { - 'name': u'Eddard', - 'family': u'Stark', - 'appearances': 9, - 'alive': False, - }, { - 'name': u'Catelyn', - 'family': [u'Stark', u'Tully'], - 'appearances': 26, - 'alive': False, - }, { - 'name': u'Arya', - 'family': u'Stark', - 'appearances': 33, - 'alive': True, - }, { - 'name': u'Sansa', - 'family': u'Stark', - 'appearances': 31, - 'alive': True, - }, { - 'name': u'Robb', - 'family': u'Stark', - 'appearances': 22, - 'alive': False, - }, { - 'name': u'Bran', - 'family': u'Stark', - 'appearances': 25, - 'alive': True, - }, { - 'name': u'Jon Snow', - 'family': u'Stark', - 'appearances': 32, - 'alive': True, + "name": u"Catelyn", + "family": [u"Stark", u"Tully"], + "appearances": 26, + "alive": False, }, + {"name": u"Arya", "family": u"Stark", "appearances": 33, "alive": True}, + {"name": u"Sansa", "family": u"Stark", "appearances": 31, "alive": True}, + {"name": u"Robb", "family": u"Stark", "appearances": 22, "alive": False}, + {"name": u"Bran", "family": u"Stark", "appearances": 25, "alive": True}, + {"name": u"Jon Snow", "family": u"Stark", "appearances": 32, "alive": True}, ) def print_func(message): - if os.getenv('GOOGLE_CLOUD_NO_PRINT') != 'true': + if os.getenv("GOOGLE_CLOUD_NO_PRINT") != "true": print(message) @@ -96,14 +68,14 @@ def add_characters(client=None): client = datastore.Client() with client.transaction() as xact: for key_path, character in six.moves.zip(KEY_PATHS, CHARACTERS): - if key_path[-1] != character['name']: - raise ValueError(('Character and key don\'t agree', - key_path, character)) + if key_path[-1] != character["name"]: + raise ValueError(("Character and key don't agree", key_path, character)) entity = datastore.Entity(key=client.key(*key_path)) entity.update(character) xact.put(entity) - print_func('Adding Character %s %s' % (character['name'], - character['family'])) + print_func( + "Adding Character %s %s" % (character["name"], character["family"]) + ) def add_uid_keys(client=None): @@ -118,10 +90,10 @@ def add_uid_keys(client=None): with client.batch() as batch: for seq_no in range(batch_size): uid = str(uuid.uuid4()) - key = client.key('uuid_key', uid) + key = client.key("uuid_key", uid) entity = datastore.Entity(key=key) - entity['batch_num'] = batch_num - entity['seq_no'] = seq_no + entity["batch_num"] = batch_num + entity["seq_no"] = seq_no batch.put(entity) @@ -137,17 +109,15 @@ def add_timestamp_keys(client=None): for batch_num in range(num_batches): with client.batch() as batch: for seq_no in range(batch_size): - print( - "time_time: batch: {}, sequence: {}".format( - batch_num, seq_no)) + print("time_time: batch: {}, sequence: {}".format(batch_num, seq_no)) now_micros = int(time.time() * 1e6) while now_micros in timestamp_micros: now_micros = int(time.time() * 1e6) timestamp_micros.add(now_micros) - key = client.key('timestamp_key', now_micros) + key = client.key("timestamp_key", now_micros) entity = datastore.Entity(key=key) - entity['batch_num'] = batch_num - entity['seq_no'] = seq_no + entity["batch_num"] = batch_num + entity["seq_no"] = seq_no batch.put(entity) @@ -156,17 +126,17 @@ def main(): flags = sys.argv[1:] if len(flags) == 0: - flags = ['--characters', '--uuid', '--timestamps'] + flags = ["--characters", "--uuid", "--timestamps"] - if '--characters' in flags: + if "--characters" in flags: add_characters(client) - if '--uuid' in flags: + if "--uuid" in flags: add_uid_keys(client) - if '--timestamps' in flags: + if "--timestamps" in flags: add_timestamp_keys(client) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/packages/google-cloud-datastore/tests/unit/gapic/v1/test_datastore_client_v1.py b/packages/google-cloud-datastore/tests/unit/gapic/v1/test_datastore_client_v1.py index 0ac8a955480a..7dfb27ed129e 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/v1/test_datastore_client_v1.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/v1/test_datastore_client_v1.py @@ -49,10 +49,7 @@ def __init__(self, responses=[]): self.responses = responses self.requests = [] - def unary_unary(self, - method, - request_serializer=None, - response_deserializer=None): + def unary_unary(self, method, request_serializer=None, response_deserializer=None): return MultiCallableStub(method, self) @@ -71,15 +68,14 @@ def test_lookup(self): client = datastore_v1.DatastoreClient(channel=channel) # Setup Request - project_id = 'projectId-1969970175' + project_id = "projectId-1969970175" keys = [] response = client.lookup(project_id, keys) assert expected_response == response assert len(channel.requests) == 1 - expected_request = datastore_pb2.LookupRequest( - project_id=project_id, keys=keys) + expected_request = datastore_pb2.LookupRequest(project_id=project_id, keys=keys) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -89,7 +85,7 @@ def test_lookup_exception(self): client = datastore_v1.DatastoreClient(channel=channel) # Setup request - project_id = 'projectId-1969970175' + project_id = "projectId-1969970175" keys = [] with pytest.raises(CustomException): @@ -105,7 +101,7 @@ def test_run_query(self): client = datastore_v1.DatastoreClient(channel=channel) # Setup Request - project_id = 'projectId-1969970175' + project_id = "projectId-1969970175" partition_id = {} response = client.run_query(project_id, partition_id) @@ -113,7 +109,8 @@ def test_run_query(self): assert len(channel.requests) == 1 expected_request = datastore_pb2.RunQueryRequest( - project_id=project_id, partition_id=partition_id) + project_id=project_id, partition_id=partition_id + ) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -123,7 +120,7 @@ def test_run_query_exception(self): client = datastore_v1.DatastoreClient(channel=channel) # Setup request - project_id = 'projectId-1969970175' + project_id = "projectId-1969970175" partition_id = {} with pytest.raises(CustomException): @@ -131,24 +128,22 @@ def test_run_query_exception(self): def test_begin_transaction(self): # Setup Expected Response - transaction = b'-34' - expected_response = {'transaction': transaction} - expected_response = datastore_pb2.BeginTransactionResponse( - **expected_response) + transaction = b"-34" + expected_response = {"transaction": transaction} + expected_response = datastore_pb2.BeginTransactionResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) client = datastore_v1.DatastoreClient(channel=channel) # Setup Request - project_id = 'projectId-1969970175' + project_id = "projectId-1969970175" response = client.begin_transaction(project_id) assert expected_response == response assert len(channel.requests) == 1 - expected_request = datastore_pb2.BeginTransactionRequest( - project_id=project_id) + expected_request = datastore_pb2.BeginTransactionRequest(project_id=project_id) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -158,7 +153,7 @@ def test_begin_transaction_exception(self): client = datastore_v1.DatastoreClient(channel=channel) # Setup request - project_id = 'projectId-1969970175' + project_id = "projectId-1969970175" with pytest.raises(CustomException): client.begin_transaction(project_id) @@ -166,7 +161,7 @@ def test_begin_transaction_exception(self): def test_commit(self): # Setup Expected Response index_updates = 1425228195 - expected_response = {'index_updates': index_updates} + expected_response = {"index_updates": index_updates} expected_response = datastore_pb2.CommitResponse(**expected_response) # Mock the API response @@ -174,7 +169,7 @@ def test_commit(self): client = datastore_v1.DatastoreClient(channel=channel) # Setup Request - project_id = 'projectId-1969970175' + project_id = "projectId-1969970175" mode = enums.CommitRequest.Mode.MODE_UNSPECIFIED mutations = [] @@ -183,7 +178,8 @@ def test_commit(self): assert len(channel.requests) == 1 expected_request = datastore_pb2.CommitRequest( - project_id=project_id, mode=mode, mutations=mutations) + project_id=project_id, mode=mode, mutations=mutations + ) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -193,7 +189,7 @@ def test_commit_exception(self): client = datastore_v1.DatastoreClient(channel=channel) # Setup request - project_id = 'projectId-1969970175' + project_id = "projectId-1969970175" mode = enums.CommitRequest.Mode.MODE_UNSPECIFIED mutations = [] @@ -210,15 +206,16 @@ def test_rollback(self): client = datastore_v1.DatastoreClient(channel=channel) # Setup Request - project_id = 'projectId-1969970175' - transaction = b'-34' + project_id = "projectId-1969970175" + transaction = b"-34" response = client.rollback(project_id, transaction) assert expected_response == response assert len(channel.requests) == 1 expected_request = datastore_pb2.RollbackRequest( - project_id=project_id, transaction=transaction) + project_id=project_id, transaction=transaction + ) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -228,8 +225,8 @@ def test_rollback_exception(self): client = datastore_v1.DatastoreClient(channel=channel) # Setup request - project_id = 'projectId-1969970175' - transaction = b'-34' + project_id = "projectId-1969970175" + transaction = b"-34" with pytest.raises(CustomException): client.rollback(project_id, transaction) @@ -237,15 +234,14 @@ def test_rollback_exception(self): def test_allocate_ids(self): # Setup Expected Response expected_response = {} - expected_response = datastore_pb2.AllocateIdsResponse( - **expected_response) + expected_response = datastore_pb2.AllocateIdsResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) client = datastore_v1.DatastoreClient(channel=channel) # Setup Request - project_id = 'projectId-1969970175' + project_id = "projectId-1969970175" keys = [] response = client.allocate_ids(project_id, keys) @@ -253,7 +249,8 @@ def test_allocate_ids(self): assert len(channel.requests) == 1 expected_request = datastore_pb2.AllocateIdsRequest( - project_id=project_id, keys=keys) + project_id=project_id, keys=keys + ) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -263,7 +260,7 @@ def test_allocate_ids_exception(self): client = datastore_v1.DatastoreClient(channel=channel) # Setup request - project_id = 'projectId-1969970175' + project_id = "projectId-1969970175" keys = [] with pytest.raises(CustomException): @@ -272,15 +269,14 @@ def test_allocate_ids_exception(self): def test_reserve_ids(self): # Setup Expected Response expected_response = {} - expected_response = datastore_pb2.ReserveIdsResponse( - **expected_response) + expected_response = datastore_pb2.ReserveIdsResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) client = datastore_v1.DatastoreClient(channel=channel) # Setup Request - project_id = 'projectId-1969970175' + project_id = "projectId-1969970175" keys = [] response = client.reserve_ids(project_id, keys) @@ -288,7 +284,8 @@ def test_reserve_ids(self): assert len(channel.requests) == 1 expected_request = datastore_pb2.ReserveIdsRequest( - project_id=project_id, keys=keys) + project_id=project_id, keys=keys + ) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -298,7 +295,7 @@ def test_reserve_ids_exception(self): client = datastore_v1.DatastoreClient(channel=channel) # Setup request - project_id = 'projectId-1969970175' + project_id = "projectId-1969970175" keys = [] with pytest.raises(CustomException): diff --git a/packages/google-cloud-datastore/tests/unit/test__gapic.py b/packages/google-cloud-datastore/tests/unit/test__gapic.py index f971eef7c4d9..ef359d4068e5 100644 --- a/packages/google-cloud-datastore/tests/unit/test__gapic.py +++ b/packages/google-cloud-datastore/tests/unit/test__gapic.py @@ -19,57 +19,63 @@ from google.cloud.datastore.client import _HAVE_GRPC -@unittest.skipUnless(_HAVE_GRPC, 'No gRPC') +@unittest.skipUnless(_HAVE_GRPC, "No gRPC") class Test_make_datastore_api(unittest.TestCase): - def _call_fut(self, client): from google.cloud.datastore._gapic import make_datastore_api return make_datastore_api(client) @mock.patch( - 'google.cloud.datastore_v1.gapic.datastore_client.DatastoreClient', - return_value=mock.sentinel.ds_client) - @mock.patch('google.cloud.datastore._gapic.make_secure_channel', - return_value=mock.sentinel.channel) + "google.cloud.datastore_v1.gapic.datastore_client.DatastoreClient", + return_value=mock.sentinel.ds_client, + ) + @mock.patch( + "google.cloud.datastore._gapic.make_secure_channel", + return_value=mock.sentinel.channel, + ) def test_live_api(self, make_chan, mock_klass): from google.cloud._http import DEFAULT_USER_AGENT - base_url = 'https://datastore.googleapis.com:443' + base_url = "https://datastore.googleapis.com:443" client = mock.Mock( _base_url=base_url, _credentials=mock.sentinel.credentials, - spec=['_base_url', '_credentials']) + spec=["_base_url", "_credentials"], + ) ds_api = self._call_fut(client) self.assertIs(ds_api, mock.sentinel.ds_client) make_chan.assert_called_once_with( mock.sentinel.credentials, DEFAULT_USER_AGENT, - 'datastore.googleapis.com:443') + "datastore.googleapis.com:443", + ) mock_klass.assert_called_once_with( - channel=mock.sentinel.channel, - client_info=mock.ANY, + channel=mock.sentinel.channel, client_info=mock.ANY ) @mock.patch( - 'google.cloud.datastore_v1.gapic.datastore_client.DatastoreClient', - return_value=mock.sentinel.ds_client) - @mock.patch('google.cloud.datastore._gapic.insecure_channel', - return_value=mock.sentinel.channel) + "google.cloud.datastore_v1.gapic.datastore_client.DatastoreClient", + return_value=mock.sentinel.ds_client, + ) + @mock.patch( + "google.cloud.datastore._gapic.insecure_channel", + return_value=mock.sentinel.channel, + ) def test_emulator(self, make_chan, mock_klass): - host = 'localhost:8901' - base_url = 'http://' + host + host = "localhost:8901" + base_url = "http://" + host client = mock.Mock( _base_url=base_url, _credentials=mock.sentinel.credentials, - spec=['_base_url', '_credentials']) + spec=["_base_url", "_credentials"], + ) ds_api = self._call_fut(client) self.assertIs(ds_api, mock.sentinel.ds_client) make_chan.assert_called_once_with(host) mock_klass.assert_called_once_with( - channel=mock.sentinel.channel, - client_info=mock.ANY, + channel=mock.sentinel.channel, client_info=mock.ANY ) diff --git a/packages/google-cloud-datastore/tests/unit/test__http.py b/packages/google-cloud-datastore/tests/unit/test__http.py index 9610e966d419..b402eafc9532 100644 --- a/packages/google-cloud-datastore/tests/unit/test__http.py +++ b/packages/google-cloud-datastore/tests/unit/test__http.py @@ -21,7 +21,6 @@ class Test__request(unittest.TestCase): - @staticmethod def _call_fut(*args, **kwargs): from google.cloud.datastore._http import _request @@ -32,11 +31,11 @@ def test_success(self): from google.cloud import _http as connection_module from google.cloud.datastore._http import _CLIENT_INFO - project = 'PROJECT' - method = 'METHOD' - data = b'DATA' - base_url = 'http://api-url' - response_data = 'CONTENT' + project = "PROJECT" + method = "METHOD" + data = b"DATA" + base_url = "http://api-url" + response_data = "CONTENT" http = _make_requests_session([_make_response(content=response_data)]) @@ -47,43 +46,40 @@ def test_success(self): # Check that the mocks were called as expected. expected_url = _build_expected_url(base_url, project, method) expected_headers = { - 'Content-Type': 'application/x-protobuf', - 'User-Agent': connection_module.DEFAULT_USER_AGENT, + "Content-Type": "application/x-protobuf", + "User-Agent": connection_module.DEFAULT_USER_AGENT, connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, } http.request.assert_called_once_with( - method='POST', url=expected_url, headers=expected_headers, - data=data) + method="POST", url=expected_url, headers=expected_headers, data=data + ) def test_failure(self): from google.cloud.exceptions import BadRequest from google.rpc import code_pb2 from google.rpc import status_pb2 - project = 'PROJECT' - method = 'METHOD' - data = 'DATA' - uri = 'http://api-url' + project = "PROJECT" + method = "METHOD" + data = "DATA" + uri = "http://api-url" error = status_pb2.Status() - error.message = 'Entity value is indexed.' + error.message = "Entity value is indexed." error.code = code_pb2.FAILED_PRECONDITION - http = _make_requests_session([ - _make_response( - http_client.BAD_REQUEST, - content=error.SerializeToString()) - ]) + http = _make_requests_session( + [_make_response(http_client.BAD_REQUEST, content=error.SerializeToString())] + ) with self.assertRaises(BadRequest) as exc: self._call_fut(http, project, method, data, uri) - expected_message = '400 Entity value is indexed.' + expected_message = "400 Entity value is indexed." self.assertEqual(str(exc.exception), expected_message) class Test__rpc(unittest.TestCase): - @staticmethod def _call_fut(*args, **kwargs): from google.cloud.datastore._http import _rpc @@ -94,29 +90,33 @@ def test_it(self): from google.cloud.datastore_v1.proto import datastore_pb2 http = object() - project = 'projectOK' - method = 'beginTransaction' - base_url = 'test.invalid' - request_pb = datastore_pb2.BeginTransactionRequest( - project_id=project) - - response_pb = datastore_pb2.BeginTransactionResponse( - transaction=b'7830rmc') - patch = mock.patch('google.cloud.datastore._http._request', - return_value=response_pb.SerializeToString()) + project = "projectOK" + method = "beginTransaction" + base_url = "test.invalid" + request_pb = datastore_pb2.BeginTransactionRequest(project_id=project) + + response_pb = datastore_pb2.BeginTransactionResponse(transaction=b"7830rmc") + patch = mock.patch( + "google.cloud.datastore._http._request", + return_value=response_pb.SerializeToString(), + ) with patch as mock_request: result = self._call_fut( - http, project, method, base_url, - request_pb, datastore_pb2.BeginTransactionResponse) + http, + project, + method, + base_url, + request_pb, + datastore_pb2.BeginTransactionResponse, + ) self.assertEqual(result, response_pb) mock_request.assert_called_once_with( - http, project, method, request_pb.SerializeToString(), - base_url) + http, project, method, request_pb.SerializeToString(), base_url + ) class TestHTTPDatastoreAPI(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.datastore._http import HTTPDatastoreAPI @@ -130,9 +130,7 @@ def _make_one(self, *args, **kwargs): def _make_query_pb(kind): from google.cloud.datastore_v1.proto import query_pb2 - return query_pb2.Query( - kind=[query_pb2.KindExpression(name=kind)], - ) + return query_pb2.Query(kind=[query_pb2.KindExpression(name=kind)]) def test_constructor(self): client = object() @@ -142,16 +140,18 @@ def test_constructor(self): def test_lookup_single_key_empty_response(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' + project = "PROJECT" key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.LookupResponse() read_options = datastore_pb2.ReadOptions() # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())]) + [_make_response(content=rsp_pb.SerializeToString())] + ) client = mock.Mock( - _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) + _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + ) # Make request. ds_api = self._make_one(client) @@ -159,30 +159,32 @@ def test_lookup_single_key_empty_response(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(client._base_url, project, 'lookup') + uri = _build_expected_url(client._base_url, project, "lookup") self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) - request = _verify_protobuf_call( - http, uri, datastore_pb2.LookupRequest()) + request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) self.assertEqual(list(request.keys), [key_pb]) self.assertEqual(request.read_options, read_options) def test_lookup_single_key_empty_response_w_eventual(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' + project = "PROJECT" key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.LookupResponse() read_options = datastore_pb2.ReadOptions( - read_consistency=datastore_pb2.ReadOptions.EVENTUAL) + read_consistency=datastore_pb2.ReadOptions.EVENTUAL + ) # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())]) + [_make_response(content=rsp_pb.SerializeToString())] + ) client = mock.Mock( - _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) + _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + ) # Make request. ds_api = self._make_one(client) @@ -190,30 +192,31 @@ def test_lookup_single_key_empty_response_w_eventual(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(client._base_url, project, 'lookup') + uri = _build_expected_url(client._base_url, project, "lookup") self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) - request = _verify_protobuf_call( - http, uri, datastore_pb2.LookupRequest()) + request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) self.assertEqual(list(request.keys), [key_pb]) self.assertEqual(request.read_options, read_options) def test_lookup_single_key_empty_response_w_transaction(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' - transaction = b'TRANSACTION' + project = "PROJECT" + transaction = b"TRANSACTION" key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.LookupResponse() read_options = datastore_pb2.ReadOptions(transaction=transaction) # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())]) + [_make_response(content=rsp_pb.SerializeToString())] + ) client = mock.Mock( - _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) + _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + ) # Make request. ds_api = self._make_one(client) @@ -221,13 +224,12 @@ def test_lookup_single_key_empty_response_w_transaction(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(client._base_url, project, 'lookup') + uri = _build_expected_url(client._base_url, project, "lookup") self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) - request = _verify_protobuf_call( - http, uri, datastore_pb2.LookupRequest()) + request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) self.assertEqual(list(request.keys), [key_pb]) self.assertEqual(request.read_options, read_options) @@ -235,7 +237,7 @@ def test_lookup_single_key_nonempty_response(self): from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore_v1.proto import entity_pb2 - project = 'PROJECT' + project = "PROJECT" key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.LookupResponse() entity = entity_pb2.Entity() @@ -245,9 +247,11 @@ def test_lookup_single_key_nonempty_response(self): # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())]) + [_make_response(content=rsp_pb.SerializeToString())] + ) client = mock.Mock( - _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) + _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + ) # Make request. ds_api = self._make_one(client) @@ -255,23 +259,22 @@ def test_lookup_single_key_nonempty_response(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(client._base_url, project, 'lookup') + uri = _build_expected_url(client._base_url, project, "lookup") self.assertEqual(len(response.found), 1) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) found = response.found[0].entity - self.assertEqual(found.key.path[0].kind, 'Kind') + self.assertEqual(found.key.path[0].kind, "Kind") self.assertEqual(found.key.path[0].id, 1234) - request = _verify_protobuf_call( - http, uri, datastore_pb2.LookupRequest()) + request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) self.assertEqual(list(request.keys), [key_pb]) self.assertEqual(request.read_options, read_options) def test_lookup_multiple_keys_empty_response(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' + project = "PROJECT" key_pb1 = _make_key_pb(project) key_pb2 = _make_key_pb(project, id_=2345) rsp_pb = datastore_pb2.LookupResponse() @@ -279,31 +282,31 @@ def test_lookup_multiple_keys_empty_response(self): # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())]) + [_make_response(content=rsp_pb.SerializeToString())] + ) client = mock.Mock( - _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) + _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + ) # Make request. ds_api = self._make_one(client) - response = ds_api.lookup( - project, [key_pb1, key_pb2], read_options=read_options) + response = ds_api.lookup(project, [key_pb1, key_pb2], read_options=read_options) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(client._base_url, project, 'lookup') + uri = _build_expected_url(client._base_url, project, "lookup") self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) - request = _verify_protobuf_call( - http, uri, datastore_pb2.LookupRequest()) + request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) self.assertEqual(list(request.keys), [key_pb1, key_pb2]) self.assertEqual(request.read_options, read_options) def test_lookup_multiple_keys_w_missing(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' + project = "PROJECT" key_pb1 = _make_key_pb(project) key_pb2 = _make_key_pb(project, id_=2345) rsp_pb = datastore_pb2.LookupResponse() @@ -315,32 +318,32 @@ def test_lookup_multiple_keys_w_missing(self): # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())]) + [_make_response(content=rsp_pb.SerializeToString())] + ) client = mock.Mock( - _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) + _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + ) # Make request. ds_api = self._make_one(client) - response = ds_api.lookup( - project, [key_pb1, key_pb2], read_options=read_options) + response = ds_api.lookup(project, [key_pb1, key_pb2], read_options=read_options) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(client._base_url, project, 'lookup') + uri = _build_expected_url(client._base_url, project, "lookup") self.assertEqual(len(response.found), 0) self.assertEqual(len(response.deferred), 0) missing_keys = [result.entity.key for result in response.missing] self.assertEqual(missing_keys, [key_pb1, key_pb2]) - request = _verify_protobuf_call( - http, uri, datastore_pb2.LookupRequest()) + request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) self.assertEqual(list(request.keys), [key_pb1, key_pb2]) self.assertEqual(request.read_options, read_options) def test_lookup_multiple_keys_w_deferred(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' + project = "PROJECT" key_pb1 = _make_key_pb(project) key_pb2 = _make_key_pb(project, id_=2345) rsp_pb = datastore_pb2.LookupResponse() @@ -350,24 +353,24 @@ def test_lookup_multiple_keys_w_deferred(self): # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())]) + [_make_response(content=rsp_pb.SerializeToString())] + ) client = mock.Mock( - _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) + _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + ) # Make request. ds_api = self._make_one(client) - response = ds_api.lookup( - project, [key_pb1, key_pb2], read_options=read_options) + response = ds_api.lookup(project, [key_pb1, key_pb2], read_options=read_options) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(client._base_url, project, 'lookup') + uri = _build_expected_url(client._base_url, project, "lookup") self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(list(response.deferred), [key_pb1, key_pb2]) - request = _verify_protobuf_call( - http, uri, datastore_pb2.LookupRequest()) + request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) self.assertEqual(list(request.keys), [key_pb1, key_pb2]) self.assertEqual(request.read_options, read_options) @@ -376,13 +379,14 @@ def test_run_query_w_eventual_no_transaction(self): from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore_v1.proto import query_pb2 - project = 'PROJECT' - kind = 'Nonesuch' - cursor = b'\x00' + project = "PROJECT" + kind = "Nonesuch" + cursor = b"\x00" query_pb = self._make_query_pb(kind) partition_id = entity_pb2.PartitionId(project_id=project) read_options = datastore_pb2.ReadOptions( - read_consistency=datastore_pb2.ReadOptions.EVENTUAL) + read_consistency=datastore_pb2.ReadOptions.EVENTUAL + ) rsp_pb = datastore_pb2.RunQueryResponse( batch=query_pb2.QueryResultBatch( entity_result_type=query_pb2.EntityResult.FULL, @@ -393,21 +397,21 @@ def test_run_query_w_eventual_no_transaction(self): # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())]) + [_make_response(content=rsp_pb.SerializeToString())] + ) client = mock.Mock( - _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) + _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + ) # Make request. ds_api = self._make_one(client) - response = ds_api.run_query( - project, partition_id, read_options, query=query_pb) + response = ds_api.run_query(project, partition_id, read_options, query=query_pb) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(client._base_url, project, 'runQuery') - request = _verify_protobuf_call( - http, uri, datastore_pb2.RunQueryRequest()) + uri = _build_expected_url(client._base_url, project, "runQuery") + request = _verify_protobuf_call(http, uri, datastore_pb2.RunQueryRequest()) self.assertEqual(request.partition_id, partition_id) self.assertEqual(request.query, query_pb) self.assertEqual(request.read_options, read_options) @@ -417,10 +421,10 @@ def test_run_query_wo_eventual_w_transaction(self): from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore_v1.proto import query_pb2 - project = 'PROJECT' - kind = 'Nonesuch' - cursor = b'\x00' - transaction = b'TRANSACTION' + project = "PROJECT" + kind = "Nonesuch" + cursor = b"\x00" + transaction = b"TRANSACTION" query_pb = self._make_query_pb(kind) partition_id = entity_pb2.PartitionId(project_id=project) read_options = datastore_pb2.ReadOptions(transaction=transaction) @@ -434,21 +438,21 @@ def test_run_query_wo_eventual_w_transaction(self): # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())]) + [_make_response(content=rsp_pb.SerializeToString())] + ) client = mock.Mock( - _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) + _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + ) # Make request. ds_api = self._make_one(client) - response = ds_api.run_query( - project, partition_id, read_options, query=query_pb) + response = ds_api.run_query(project, partition_id, read_options, query=query_pb) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(client._base_url, project, 'runQuery') - request = _verify_protobuf_call( - http, uri, datastore_pb2.RunQueryRequest()) + uri = _build_expected_url(client._base_url, project, "runQuery") + request = _verify_protobuf_call(http, uri, datastore_pb2.RunQueryRequest()) self.assertEqual(request.partition_id, partition_id) self.assertEqual(request.query, query_pb) self.assertEqual(request.read_options, read_options) @@ -458,9 +462,9 @@ def test_run_query_wo_namespace_empty_result(self): from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore_v1.proto import query_pb2 - project = 'PROJECT' - kind = 'Nonesuch' - cursor = b'\x00' + project = "PROJECT" + kind = "Nonesuch" + cursor = b"\x00" query_pb = self._make_query_pb(kind) partition_id = entity_pb2.PartitionId(project_id=project) read_options = datastore_pb2.ReadOptions() @@ -474,21 +478,21 @@ def test_run_query_wo_namespace_empty_result(self): # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())]) + [_make_response(content=rsp_pb.SerializeToString())] + ) client = mock.Mock( - _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) + _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + ) # Make request. ds_api = self._make_one(client) - response = ds_api.run_query( - project, partition_id, read_options, query=query_pb) + response = ds_api.run_query(project, partition_id, read_options, query=query_pb) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(client._base_url, project, 'runQuery') - request = _verify_protobuf_call( - http, uri, datastore_pb2.RunQueryRequest()) + uri = _build_expected_url(client._base_url, project, "runQuery") + request = _verify_protobuf_call(http, uri, datastore_pb2.RunQueryRequest()) self.assertEqual(request.partition_id, partition_id) self.assertEqual(request.query, query_pb) self.assertEqual(request.read_options, read_options) @@ -498,56 +502,57 @@ def test_run_query_w_namespace_nonempty_result(self): from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore_v1.proto import query_pb2 - project = 'PROJECT' - kind = 'Kind' - namespace = 'NS' + project = "PROJECT" + kind = "Kind" + namespace = "NS" query_pb = self._make_query_pb(kind) partition_id = entity_pb2.PartitionId( - project_id=project, namespace_id=namespace) + project_id=project, namespace_id=namespace + ) read_options = datastore_pb2.ReadOptions() rsp_pb = datastore_pb2.RunQueryResponse( batch=query_pb2.QueryResultBatch( entity_result_type=query_pb2.EntityResult.FULL, - entity_results=[ - query_pb2.EntityResult(entity=entity_pb2.Entity()), - ], + entity_results=[query_pb2.EntityResult(entity=entity_pb2.Entity())], more_results=query_pb2.QueryResultBatch.NO_MORE_RESULTS, ) ) # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())]) + [_make_response(content=rsp_pb.SerializeToString())] + ) client = mock.Mock( - _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) + _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + ) # Make request. ds_api = self._make_one(client) - response = ds_api.run_query( - project, partition_id, read_options, query=query_pb) + response = ds_api.run_query(project, partition_id, read_options, query=query_pb) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(client._base_url, project, 'runQuery') - request = _verify_protobuf_call( - http, uri, datastore_pb2.RunQueryRequest()) + uri = _build_expected_url(client._base_url, project, "runQuery") + request = _verify_protobuf_call(http, uri, datastore_pb2.RunQueryRequest()) self.assertEqual(request.partition_id, partition_id) self.assertEqual(request.query, query_pb) def test_begin_transaction(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' - transaction = b'TRANSACTION' + project = "PROJECT" + transaction = b"TRANSACTION" rsp_pb = datastore_pb2.BeginTransactionResponse() rsp_pb.transaction = transaction # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())]) + [_make_response(content=rsp_pb.SerializeToString())] + ) client = mock.Mock( - _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) + _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + ) # Make request. ds_api = self._make_one(client) @@ -556,32 +561,34 @@ def test_begin_transaction(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url( - client._base_url, project, 'beginTransaction') + uri = _build_expected_url(client._base_url, project, "beginTransaction") request = _verify_protobuf_call( - http, uri, datastore_pb2.BeginTransactionRequest()) + http, uri, datastore_pb2.BeginTransactionRequest() + ) # The RPC-over-HTTP request does not set the project in the request. - self.assertEqual(request.project_id, u'') + self.assertEqual(request.project_id, u"") def test_commit_wo_transaction(self): from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb - project = 'PROJECT' + project = "PROJECT" key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.CommitResponse() req_pb = datastore_pb2.CommitRequest() mutation = req_pb.mutations.add() insert = mutation.upsert insert.key.CopyFrom(key_pb) - value_pb = _new_value_pb(insert, 'foo') - value_pb.string_value = u'Foo' + value_pb = _new_value_pb(insert, "foo") + value_pb.string_value = u"Foo" # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())]) + [_make_response(content=rsp_pb.SerializeToString())] + ) client = mock.Mock( - _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) + _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + ) # Make request. rq_class = datastore_pb2.CommitRequest @@ -592,9 +599,9 @@ def test_commit_wo_transaction(self): # Check the result and verify the callers. self.assertEqual(result, rsp_pb) - uri = _build_expected_url(client._base_url, project, 'commit') + uri = _build_expected_url(client._base_url, project, "commit") request = _verify_protobuf_call(http, uri, rq_class()) - self.assertEqual(request.transaction, b'') + self.assertEqual(request.transaction, b"") self.assertEqual(list(request.mutations), [mutation]) self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL) @@ -602,49 +609,53 @@ def test_commit_w_transaction(self): from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb - project = 'PROJECT' + project = "PROJECT" key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.CommitResponse() req_pb = datastore_pb2.CommitRequest() mutation = req_pb.mutations.add() insert = mutation.upsert insert.key.CopyFrom(key_pb) - value_pb = _new_value_pb(insert, 'foo') - value_pb.string_value = u'Foo' + value_pb = _new_value_pb(insert, "foo") + value_pb.string_value = u"Foo" # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())]) + [_make_response(content=rsp_pb.SerializeToString())] + ) client = mock.Mock( - _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) + _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + ) # Make request. rq_class = datastore_pb2.CommitRequest ds_api = self._make_one(client) mode = rq_class.TRANSACTIONAL - result = ds_api.commit(project, mode, [mutation], transaction=b'xact') + result = ds_api.commit(project, mode, [mutation], transaction=b"xact") # Check the result and verify the callers. self.assertEqual(result, rsp_pb) - uri = _build_expected_url(client._base_url, project, 'commit') + uri = _build_expected_url(client._base_url, project, "commit") request = _verify_protobuf_call(http, uri, rq_class()) - self.assertEqual(request.transaction, b'xact') + self.assertEqual(request.transaction, b"xact") self.assertEqual(list(request.mutations), [mutation]) self.assertEqual(request.mode, rq_class.TRANSACTIONAL) def test_rollback_ok(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' - transaction = b'xact' + project = "PROJECT" + transaction = b"xact" rsp_pb = datastore_pb2.RollbackResponse() # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())]) + [_make_response(content=rsp_pb.SerializeToString())] + ) client = mock.Mock( - _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) + _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + ) # Make request. ds_api = self._make_one(client) @@ -653,22 +664,23 @@ def test_rollback_ok(self): # Check the result and verify the callers. self.assertEqual(response, rsp_pb) - uri = _build_expected_url(client._base_url, project, 'rollback') - request = _verify_protobuf_call( - http, uri, datastore_pb2.RollbackRequest()) + uri = _build_expected_url(client._base_url, project, "rollback") + request = _verify_protobuf_call(http, uri, datastore_pb2.RollbackRequest()) self.assertEqual(request.transaction, transaction) def test_allocate_ids_empty(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' + project = "PROJECT" rsp_pb = datastore_pb2.AllocateIdsResponse() # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())]) + [_make_response(content=rsp_pb.SerializeToString())] + ) client = mock.Mock( - _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) + _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + ) # Make request. ds_api = self._make_one(client) @@ -678,32 +690,30 @@ def test_allocate_ids_empty(self): self.assertEqual(response, rsp_pb) self.assertEqual(list(response.keys), []) - uri = _build_expected_url(client._base_url, project, 'allocateIds') - request = _verify_protobuf_call( - http, uri, datastore_pb2.AllocateIdsRequest()) + uri = _build_expected_url(client._base_url, project, "allocateIds") + request = _verify_protobuf_call(http, uri, datastore_pb2.AllocateIdsRequest()) self.assertEqual(list(request.keys), []) def test_allocate_ids_non_empty(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' + project = "PROJECT" before_key_pbs = [ _make_key_pb(project, id_=None), _make_key_pb(project, id_=None), ] - after_key_pbs = [ - _make_key_pb(project), - _make_key_pb(project, id_=2345), - ] + after_key_pbs = [_make_key_pb(project), _make_key_pb(project, id_=2345)] rsp_pb = datastore_pb2.AllocateIdsResponse() rsp_pb.keys.add().CopyFrom(after_key_pbs[0]) rsp_pb.keys.add().CopyFrom(after_key_pbs[1]) # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())]) + [_make_response(content=rsp_pb.SerializeToString())] + ) client = mock.Mock( - _http=http, _base_url='test.invalid', spec=['_http', '_base_url']) + _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + ) # Make request. ds_api = self._make_one(client) @@ -713,15 +723,14 @@ def test_allocate_ids_non_empty(self): self.assertEqual(list(response.keys), after_key_pbs) self.assertEqual(response, rsp_pb) - uri = _build_expected_url(client._base_url, project, 'allocateIds') - request = _verify_protobuf_call( - http, uri, datastore_pb2.AllocateIdsRequest()) + uri = _build_expected_url(client._base_url, project, "allocateIds") + request = _verify_protobuf_call(http, uri, datastore_pb2.AllocateIdsRequest()) self.assertEqual(len(request.keys), len(before_key_pbs)) for key_before, key_after in zip(before_key_pbs, request.keys): self.assertEqual(key_before, key_after) -def _make_response(status=http_client.OK, content=b'', headers={}): +def _make_response(status=http_client.OK, content=b"", headers={}): response = requests.Response() response.status_code = status response._content = content @@ -739,18 +748,13 @@ def _make_requests_session(responses): def _build_expected_url(api_base_url, project, method): from google.cloud.datastore._http import API_VERSION - return '/'.join([ - api_base_url, - API_VERSION, - 'projects', - project + ':' + method, - ]) + return "/".join([api_base_url, API_VERSION, "projects", project + ":" + method]) def _make_key_pb(project, id_=1234): from google.cloud.datastore.key import Key - path_args = ('Kind',) + path_args = ("Kind",) if id_ is not None: path_args += (id_,) return Key(*path_args, project=project).to_protobuf() @@ -761,15 +765,15 @@ def _verify_protobuf_call(http, expected_url, pb): from google.cloud.datastore._http import _CLIENT_INFO expected_headers = { - 'Content-Type': 'application/x-protobuf', - 'User-Agent': connection_module.DEFAULT_USER_AGENT, + "Content-Type": "application/x-protobuf", + "User-Agent": connection_module.DEFAULT_USER_AGENT, connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, } http.request.assert_called_once_with( - method='POST', url=expected_url, headers=expected_headers, - data=mock.ANY) + method="POST", url=expected_url, headers=expected_headers, data=mock.ANY + ) - data = http.request.mock_calls[0][2]['data'] + data = http.request.mock_calls[0][2]["data"] pb.ParseFromString(data) return pb diff --git a/packages/google-cloud-datastore/tests/unit/test_batch.py b/packages/google-cloud-datastore/tests/unit/test_batch.py index 4b94b657e9d0..8516e78c0c7a 100644 --- a/packages/google-cloud-datastore/tests/unit/test_batch.py +++ b/packages/google-cloud-datastore/tests/unit/test_batch.py @@ -18,7 +18,6 @@ class TestBatch(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.datastore.batch import Batch @@ -29,8 +28,8 @@ def _make_one(self, client): return self._get_target_class()(client) def test_ctor(self): - project = 'PROJECT' - namespace = 'NAMESPACE' + project = "PROJECT" + namespace = "NAMESPACE" client = _Client(project, namespace=namespace) batch = self._make_one(client) @@ -45,7 +44,7 @@ def test_ctor(self): def test_current(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' + project = "PROJECT" client = _Client(project) batch1 = self._make_one(client) batch2 = self._make_one(client) @@ -68,7 +67,7 @@ def test_current(self): commit_method.assert_called_with(project, mode, [], transaction=None) def test_put_entity_wo_key(self): - project = 'PROJECT' + project = "PROJECT" client = _Client(project) batch = self._make_one(client) @@ -76,28 +75,28 @@ def test_put_entity_wo_key(self): self.assertRaises(ValueError, batch.put, _Entity()) def test_put_entity_wrong_status(self): - project = 'PROJECT' + project = "PROJECT" client = _Client(project) batch = self._make_one(client) entity = _Entity() - entity.key = _Key('OTHER') + entity.key = _Key("OTHER") self.assertEqual(batch._status, batch._INITIAL) self.assertRaises(ValueError, batch.put, entity) def test_put_entity_w_key_wrong_project(self): - project = 'PROJECT' + project = "PROJECT" client = _Client(project) batch = self._make_one(client) entity = _Entity() - entity.key = _Key('OTHER') + entity.key = _Key("OTHER") batch.begin() self.assertRaises(ValueError, batch.put, entity) def test_put_entity_w_partial_key(self): - project = 'PROJECT' - properties = {'foo': 'bar'} + project = "PROJECT" + properties = {"foo": "bar"} client = _Client(project) batch = self._make_one(client) entity = _Entity(properties) @@ -107,45 +106,40 @@ def test_put_entity_w_partial_key(self): batch.begin() batch.put(entity) - mutated_entity = _mutated_pb(self, batch.mutations, 'insert') + mutated_entity = _mutated_pb(self, batch.mutations, "insert") self.assertEqual(mutated_entity.key, key._key) self.assertEqual(batch._partial_key_entities, [entity]) def test_put_entity_w_completed_key(self): from google.cloud.datastore.helpers import _property_tuples - project = 'PROJECT' - properties = { - 'foo': 'bar', - 'baz': 'qux', - 'spam': [1, 2, 3], - 'frotz': [], - } + project = "PROJECT" + properties = {"foo": "bar", "baz": "qux", "spam": [1, 2, 3], "frotz": []} client = _Client(project) batch = self._make_one(client) entity = _Entity(properties) - entity.exclude_from_indexes = ('baz', 'spam') + entity.exclude_from_indexes = ("baz", "spam") key = entity.key = _Key(project) batch.begin() batch.put(entity) - mutated_entity = _mutated_pb(self, batch.mutations, 'upsert') + mutated_entity = _mutated_pb(self, batch.mutations, "upsert") self.assertEqual(mutated_entity.key, key._key) prop_dict = dict(_property_tuples(mutated_entity)) self.assertEqual(len(prop_dict), 4) - self.assertFalse(prop_dict['foo'].exclude_from_indexes) - self.assertTrue(prop_dict['baz'].exclude_from_indexes) - self.assertFalse(prop_dict['spam'].exclude_from_indexes) - spam_values = prop_dict['spam'].array_value.values + self.assertFalse(prop_dict["foo"].exclude_from_indexes) + self.assertTrue(prop_dict["baz"].exclude_from_indexes) + self.assertFalse(prop_dict["spam"].exclude_from_indexes) + spam_values = prop_dict["spam"].array_value.values self.assertTrue(spam_values[0].exclude_from_indexes) self.assertTrue(spam_values[1].exclude_from_indexes) self.assertTrue(spam_values[2].exclude_from_indexes) - self.assertTrue('frotz' in prop_dict) + self.assertTrue("frotz" in prop_dict) def test_delete_wrong_status(self): - project = 'PROJECT' + project = "PROJECT" client = _Client(project) batch = self._make_one(client) key = _Key(project) @@ -155,7 +149,7 @@ def test_delete_wrong_status(self): self.assertRaises(ValueError, batch.delete, key) def test_delete_w_partial_key(self): - project = 'PROJECT' + project = "PROJECT" client = _Client(project) batch = self._make_one(client) key = _Key(project) @@ -165,16 +159,16 @@ def test_delete_w_partial_key(self): self.assertRaises(ValueError, batch.delete, key) def test_delete_w_key_wrong_project(self): - project = 'PROJECT' + project = "PROJECT" client = _Client(project) batch = self._make_one(client) - key = _Key('OTHER') + key = _Key("OTHER") batch.begin() self.assertRaises(ValueError, batch.delete, key) def test_delete_w_completed_key(self): - project = 'PROJECT' + project = "PROJECT" client = _Client(project) batch = self._make_one(client) key = _Key(project) @@ -182,11 +176,11 @@ def test_delete_w_completed_key(self): batch.begin() batch.delete(key) - mutated_key = _mutated_pb(self, batch.mutations, 'delete') + mutated_key = _mutated_pb(self, batch.mutations, "delete") self.assertEqual(mutated_key, key._key) def test_begin(self): - project = 'PROJECT' + project = "PROJECT" client = _Client(project, None) batch = self._make_one(client) self.assertEqual(batch._status, batch._INITIAL) @@ -194,7 +188,7 @@ def test_begin(self): self.assertEqual(batch._status, batch._IN_PROGRESS) def test_begin_fail(self): - project = 'PROJECT' + project = "PROJECT" client = _Client(project, None) batch = self._make_one(client) batch._status = batch._IN_PROGRESS @@ -202,7 +196,7 @@ def test_begin_fail(self): batch.begin() def test_rollback(self): - project = 'PROJECT' + project = "PROJECT" client = _Client(project, None) batch = self._make_one(client) batch.begin() @@ -211,7 +205,7 @@ def test_rollback(self): self.assertEqual(batch._status, batch._ABORTED) def test_rollback_wrong_status(self): - project = 'PROJECT' + project = "PROJECT" client = _Client(project, None) batch = self._make_one(client) @@ -221,7 +215,7 @@ def test_rollback_wrong_status(self): def test_commit(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' + project = "PROJECT" client = _Client(project) batch = self._make_one(client) @@ -236,7 +230,7 @@ def test_commit(self): commit_method.assert_called_with(project, mode, [], transaction=None) def test_commit_wrong_status(self): - project = 'PROJECT' + project = "PROJECT" client = _Client(project) batch = self._make_one(client) @@ -246,7 +240,7 @@ def test_commit_wrong_status(self): def test_commit_w_partial_key_entities(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' + project = "PROJECT" new_id = 1234 ds_api = _make_datastore_api(new_id) client = _Client(project, datastore_api=ds_api) @@ -263,16 +257,15 @@ def test_commit_w_partial_key_entities(self): self.assertEqual(batch._status, batch._FINISHED) mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL - ds_api.commit.assert_called_once_with( - project, mode, [], transaction=None) + ds_api.commit.assert_called_once_with(project, mode, [], transaction=None) self.assertFalse(entity.key.is_partial) self.assertEqual(entity.key._id, new_id) def test_as_context_mgr_wo_error(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' - properties = {'foo': 'bar'} + project = "PROJECT" + properties = {"foo": "bar"} entity = _Entity(properties) key = entity.key = _Key(project) @@ -285,18 +278,19 @@ def test_as_context_mgr_wo_error(self): self.assertEqual(list(client._batches), []) - mutated_entity = _mutated_pb(self, batch.mutations, 'upsert') + mutated_entity = _mutated_pb(self, batch.mutations, "upsert") self.assertEqual(mutated_entity.key, key._key) commit_method = client._datastore_api.commit mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL commit_method.assert_called_with( - project, mode, batch.mutations, transaction=None) + project, mode, batch.mutations, transaction=None + ) def test_as_context_mgr_nested(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' - properties = {'foo': 'bar'} + project = "PROJECT" + properties = {"foo": "bar"} entity1 = _Entity(properties) key1 = entity1.key = _Key(project) entity2 = _Entity(properties) @@ -316,23 +310,25 @@ def test_as_context_mgr_nested(self): self.assertEqual(list(client._batches), []) - mutated_entity1 = _mutated_pb(self, batch1.mutations, 'upsert') + mutated_entity1 = _mutated_pb(self, batch1.mutations, "upsert") self.assertEqual(mutated_entity1.key, key1._key) - mutated_entity2 = _mutated_pb(self, batch2.mutations, 'upsert') + mutated_entity2 = _mutated_pb(self, batch2.mutations, "upsert") self.assertEqual(mutated_entity2.key, key2._key) commit_method = client._datastore_api.commit self.assertEqual(commit_method.call_count, 2) mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL commit_method.assert_called_with( - project, mode, batch1.mutations, transaction=None) + project, mode, batch1.mutations, transaction=None + ) commit_method.assert_called_with( - project, mode, batch2.mutations, transaction=None) + project, mode, batch2.mutations, transaction=None + ) def test_as_context_mgr_w_error(self): - project = 'PROJECT' - properties = {'foo': 'bar'} + project = "PROJECT" + properties = {"foo": "bar"} entity = _Entity(properties) key = entity.key = _Key(project) @@ -349,14 +345,13 @@ def test_as_context_mgr_w_error(self): self.assertEqual(list(client._batches), []) - mutated_entity = _mutated_pb(self, batch.mutations, 'upsert') + mutated_entity = _mutated_pb(self, batch.mutations, "upsert") self.assertEqual(mutated_entity.key, key._key) def test_as_context_mgr_enter_fails(self): klass = self._get_target_class() class FailedBegin(klass): - def begin(self): raise RuntimeError @@ -374,7 +369,6 @@ def begin(self): class Test__parse_commit_response(unittest.TestCase): - def _call_fut(self, commit_response_pb): from google.cloud.datastore.batch import _parse_commit_response @@ -386,27 +380,11 @@ def test_it(self): index_updates = 1337 keys = [ - entity_pb2.Key( - path=[ - entity_pb2.Key.PathElement( - kind='Foo', - id=1234, - ), - ], - ), - entity_pb2.Key( - path=[ - entity_pb2.Key.PathElement( - kind='Bar', - name='baz', - ), - ], - ), + entity_pb2.Key(path=[entity_pb2.Key.PathElement(kind="Foo", id=1234)]), + entity_pb2.Key(path=[entity_pb2.Key.PathElement(kind="Bar", name="baz")]), ] response = datastore_pb2.CommitResponse( - mutation_results=[ - datastore_pb2.MutationResult(key=key) for key in keys - ], + mutation_results=[datastore_pb2.MutationResult(key=key) for key in keys], index_updates=index_updates, ) result = self._call_fut(response) @@ -420,8 +398,8 @@ class _Entity(dict): class _Key(object): - _kind = 'KIND' - _key = 'KEY' + _kind = "KIND" + _key = "KEY" _path = None _id = 1234 _stored = None @@ -455,7 +433,6 @@ def completed_key(self, new_id): class _Client(object): - def __init__(self, project, datastore_api=None, namespace=None): self.project = project if datastore_api is None: @@ -487,8 +464,7 @@ def _mutated_pb(test_case, mutation_pb_list, mutation_type): # We grab the only mutation. mutated_pb = mutation_pb_list[0] # Then check if it is the correct type. - test_case.assertEqual(mutated_pb.WhichOneof('operation'), - mutation_type) + test_case.assertEqual(mutated_pb.WhichOneof("operation"), mutation_type) return getattr(mutated_pb, mutation_type) @@ -498,9 +474,9 @@ def _make_mutation(id_): from google.cloud.datastore_v1.proto import entity_pb2 key = entity_pb2.Key() - key.partition_id.project_id = 'PROJECT' + key.partition_id.project_id = "PROJECT" elem = key.path.add() - elem.kind = 'Kind' + elem.kind = "Kind" elem.id = id_ return datastore_pb2.MutationResult(key=key) @@ -508,12 +484,10 @@ def _make_mutation(id_): def _make_commit_response(*new_key_ids): from google.cloud.datastore_v1.proto import datastore_pb2 - mutation_results = [ - _make_mutation(key_id) for key_id in new_key_ids] + mutation_results = [_make_mutation(key_id) for key_id in new_key_ids] return datastore_pb2.CommitResponse(mutation_results=mutation_results) def _make_datastore_api(*new_key_ids): - commit_method = mock.Mock( - return_value=_make_commit_response(*new_key_ids), spec=[]) - return mock.Mock(commit=commit_method, spec=['commit']) + commit_method = mock.Mock(return_value=_make_commit_response(*new_key_ids), spec=[]) + return mock.Mock(commit=commit_method, spec=["commit"]) diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 03c527b5e2be..10294db3cf62 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -40,7 +40,6 @@ def _make_entity_pb(project, kind, integer_id, name=None, str_val=None): class Test__get_gcd_project(unittest.TestCase): - def _call_fut(self): from google.cloud.datastore.client import _get_gcd_project @@ -48,7 +47,7 @@ def _call_fut(self): def test_no_value(self): environ = {} - with mock.patch('os.getenv', new=environ.get): + with mock.patch("os.getenv", new=environ.get): project = self._call_fut() self.assertIsNone(project) @@ -57,35 +56,33 @@ def test_value_set(self): MOCK_PROJECT = object() environ = {GCD_DATASET: MOCK_PROJECT} - with mock.patch('os.getenv', new=environ.get): + with mock.patch("os.getenv", new=environ.get): project = self._call_fut() self.assertEqual(project, MOCK_PROJECT) class Test__determine_default_project(unittest.TestCase): - def _call_fut(self, project=None): - from google.cloud.datastore.client import ( - _determine_default_project) + from google.cloud.datastore.client import _determine_default_project return _determine_default_project(project=project) - def _determine_default_helper(self, gcd=None, fallback=None, - project_called=None): + def _determine_default_helper(self, gcd=None, fallback=None, project_called=None): _callers = [] def gcd_mock(): - _callers.append('gcd_mock') + _callers.append("gcd_mock") return gcd def fallback_mock(project=None): - _callers.append(('fallback_mock', project)) + _callers.append(("fallback_mock", project)) return fallback patch = mock.patch.multiple( - 'google.cloud.datastore.client', + "google.cloud.datastore.client", _get_gcd_project=gcd_mock, - _base_default_project=fallback_mock) + _base_default_project=fallback_mock, + ) with patch: returned_project = self._call_fut(project_called) @@ -94,12 +91,11 @@ def fallback_mock(project=None): def test_no_value(self): project, callers = self._determine_default_helper() self.assertIsNone(project) - self.assertEqual(callers, ['gcd_mock', ('fallback_mock', None)]) + self.assertEqual(callers, ["gcd_mock", ("fallback_mock", None)]) def test_explicit(self): PROJECT = object() - project, callers = self._determine_default_helper( - project_called=PROJECT) + project, callers = self._determine_default_helper(project_called=PROJECT) self.assertEqual(project, PROJECT) self.assertEqual(callers, []) @@ -107,18 +103,18 @@ def test_gcd(self): PROJECT = object() project, callers = self._determine_default_helper(gcd=PROJECT) self.assertEqual(project, PROJECT) - self.assertEqual(callers, ['gcd_mock']) + self.assertEqual(callers, ["gcd_mock"]) def test_fallback(self): PROJECT = object() project, callers = self._determine_default_helper(fallback=PROJECT) self.assertEqual(project, PROJECT) - self.assertEqual(callers, ['gcd_mock', ('fallback_mock', None)]) + self.assertEqual(callers, ["gcd_mock", ("fallback_mock", None)]) class TestClient(unittest.TestCase): - PROJECT = 'PROJECT' + PROJECT = "PROJECT" @staticmethod def _get_target_class(): @@ -126,35 +122,43 @@ def _get_target_class(): return Client - def _make_one(self, project=PROJECT, namespace=None, - credentials=None, _http=None, _use_grpc=None): - return self._get_target_class()(project=project, - namespace=namespace, - credentials=credentials, - _http=_http, - _use_grpc=_use_grpc) + def _make_one( + self, + project=PROJECT, + namespace=None, + credentials=None, + _http=None, + _use_grpc=None, + ): + return self._get_target_class()( + project=project, + namespace=namespace, + credentials=credentials, + _http=_http, + _use_grpc=_use_grpc, + ) def test_constructor_w_project_no_environ(self): # Some environments (e.g. AppVeyor CI) run in GCE, so # this test would fail artificially. patch = mock.patch( - 'google.cloud.datastore.client._base_default_project', - return_value=None) + "google.cloud.datastore.client._base_default_project", return_value=None + ) with patch: self.assertRaises(EnvironmentError, self._make_one, None) def test_constructor_w_implicit_inputs(self): from google.cloud.datastore.client import _DATASTORE_BASE_URL - other = 'other' + other = "other" creds = _make_credentials() klass = self._get_target_class() patch1 = mock.patch( - 'google.cloud.datastore.client._determine_default_project', - return_value=other) - patch2 = mock.patch( - 'google.auth.default', return_value=(creds, None)) + "google.cloud.datastore.client._determine_default_project", + return_value=other, + ) + patch2 = mock.patch("google.auth.default", return_value=(creds, None)) with patch1 as _determine_default_project: with patch2 as default: @@ -175,14 +179,13 @@ def test_constructor_w_implicit_inputs(self): def test_constructor_w_explicit_inputs(self): from google.cloud.datastore.client import _DATASTORE_BASE_URL - other = 'other' - namespace = 'namespace' + other = "other" + namespace = "namespace" creds = _make_credentials() http = object() - client = self._make_one(project=other, - namespace=namespace, - credentials=creds, - _http=http) + client = self._make_one( + project=other, namespace=namespace, credentials=creds, _http=http + ) self.assertEqual(client.project, other) self.assertEqual(client.namespace, namespace) self.assertIs(client._credentials, creds) @@ -194,73 +197,71 @@ def test_constructor_w_explicit_inputs(self): def test_constructor_use_grpc_default(self): import google.cloud.datastore.client as MUT - project = 'PROJECT' + project = "PROJECT" creds = _make_credentials() http = object() - with mock.patch.object(MUT, '_USE_GRPC', new=True): - client1 = self._make_one( - project=project, credentials=creds, _http=http) + with mock.patch.object(MUT, "_USE_GRPC", new=True): + client1 = self._make_one(project=project, credentials=creds, _http=http) self.assertTrue(client1._use_grpc) # Explicitly over-ride the environment. client2 = self._make_one( - project=project, credentials=creds, _http=http, - _use_grpc=False) + project=project, credentials=creds, _http=http, _use_grpc=False + ) self.assertFalse(client2._use_grpc) - with mock.patch.object(MUT, '_USE_GRPC', new=False): - client3 = self._make_one( - project=project, credentials=creds, _http=http) + with mock.patch.object(MUT, "_USE_GRPC", new=False): + client3 = self._make_one(project=project, credentials=creds, _http=http) self.assertFalse(client3._use_grpc) # Explicitly over-ride the environment. client4 = self._make_one( - project=project, credentials=creds, _http=http, - _use_grpc=True) + project=project, credentials=creds, _http=http, _use_grpc=True + ) self.assertTrue(client4._use_grpc) def test_constructor_gcd_host(self): from google.cloud.environment_vars import GCD_HOST - host = 'localhost:1234' + host = "localhost:1234" fake_environ = {GCD_HOST: host} - project = 'PROJECT' + project = "PROJECT" creds = _make_credentials() http = object() - with mock.patch('os.environ', new=fake_environ): - client = self._make_one( - project=project, credentials=creds, _http=http) - self.assertEqual(client.base_url, 'http://' + host) + with mock.patch("os.environ", new=fake_environ): + client = self._make_one(project=project, credentials=creds, _http=http) + self.assertEqual(client.base_url, "http://" + host) def test__datastore_api_property_gapic(self): client = self._make_one( - project='prahj-ekt', credentials=_make_credentials(), - _http=object(), _use_grpc=True) + project="prahj-ekt", + credentials=_make_credentials(), + _http=object(), + _use_grpc=True, + ) self.assertIsNone(client._datastore_api_internal) patch = mock.patch( - 'google.cloud.datastore.client.make_datastore_api', - return_value=mock.sentinel.ds_api) + "google.cloud.datastore.client.make_datastore_api", + return_value=mock.sentinel.ds_api, + ) with patch as make_api: ds_api = client._datastore_api self.assertIs(ds_api, mock.sentinel.ds_api) make_api.assert_called_once_with(client) - self.assertIs( - client._datastore_api_internal, mock.sentinel.ds_api) + self.assertIs(client._datastore_api_internal, mock.sentinel.ds_api) # Make sure the cached value is used. self.assertEqual(make_api.call_count, 1) - self.assertIs( - client._datastore_api, mock.sentinel.ds_api) + self.assertIs(client._datastore_api, mock.sentinel.ds_api) self.assertEqual(make_api.call_count, 1) def test_base_url_property(self): - alternate_url = 'https://alias.example.com/' - project = 'PROJECT' + alternate_url = "https://alias.example.com/" + project = "PROJECT" creds = _make_credentials() http = object() - client = self._make_one( - project=project, credentials=creds, _http=http) + client = self._make_one(project=project, credentials=creds, _http=http) client.base_url = alternate_url self.assertEqual(client.base_url, alternate_url) @@ -268,8 +269,11 @@ def test__datastore_api_property_http(self): from google.cloud.datastore._http import HTTPDatastoreAPI client = self._make_one( - project='prahj-ekt', credentials=_make_credentials(), - _http=object(), _use_grpc=False) + project="prahj-ekt", + credentials=_make_credentials(), + _http=object(), + _use_grpc=False, + ) self.assertIsNone(client._datastore_api_internal) ds_api = client._datastore_api @@ -314,13 +318,13 @@ def _get_multi(*args, **kw): self.assertIsNone(client.get(key)) self.assertEqual(_called_with[0][0], ()) - self.assertEqual(_called_with[0][1]['keys'], [key]) - self.assertIsNone(_called_with[0][1]['missing']) - self.assertIsNone(_called_with[0][1]['deferred']) - self.assertIsNone(_called_with[0][1]['transaction']) + self.assertEqual(_called_with[0][1]["keys"], [key]) + self.assertIsNone(_called_with[0][1]["missing"]) + self.assertIsNone(_called_with[0][1]["deferred"]) + self.assertIsNone(_called_with[0][1]["transaction"]) def test_get_hit(self): - TXN_ID = '123' + TXN_ID = "123" _called_with = [] _entity = object() @@ -337,10 +341,10 @@ def _get_multi(*args, **kw): self.assertIs(client.get(key, missing, deferred, TXN_ID), _entity) self.assertEqual(_called_with[0][0], ()) - self.assertEqual(_called_with[0][1]['keys'], [key]) - self.assertIs(_called_with[0][1]['missing'], missing) - self.assertIs(_called_with[0][1]['deferred'], deferred) - self.assertEqual(_called_with[0][1]['transaction'], TXN_ID) + self.assertEqual(_called_with[0][1]["keys"], [key]) + self.assertIs(_called_with[0][1]["missing"], missing) + self.assertIs(_called_with[0][1]["deferred"], deferred) + self.assertEqual(_called_with[0][1]["transaction"], TXN_ID) def test_get_multi_no_keys(self): creds = _make_credentials() @@ -357,15 +361,13 @@ def test_get_multi_miss(self): ds_api = _make_datastore_api() client._datastore_api_internal = ds_api - key = Key('Kind', 1234, project=self.PROJECT) + key = Key("Kind", 1234, project=self.PROJECT) results = client.get_multi([key]) self.assertEqual(results, []) read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, - [key.to_protobuf()], - read_options=read_options, + self.PROJECT, [key.to_protobuf()], read_options=read_options ) def test_get_multi_miss_w_missing(self): @@ -373,7 +375,7 @@ def test_get_multi_miss_w_missing(self): from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.key import Key - KIND = 'Kind' + KIND = "Kind" ID = 1234 # Make a missing entity pb to be returned from mock backend. @@ -395,14 +397,11 @@ def test_get_multi_miss_w_missing(self): entities = client.get_multi([key], missing=missing) self.assertEqual(entities, []) key_pb = key.to_protobuf() - self.assertEqual( - [missed.key.to_protobuf() for missed in missing], [key_pb]) + self.assertEqual([missed.key.to_protobuf() for missed in missing], [key_pb]) read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, - [key_pb], - read_options=read_options, + self.PROJECT, [key_pb], read_options=read_options ) def test_get_multi_w_missing_non_empty(self): @@ -410,28 +409,26 @@ def test_get_multi_w_missing_non_empty(self): creds = _make_credentials() client = self._make_one(credentials=creds) - key = Key('Kind', 1234, project=self.PROJECT) + key = Key("Kind", 1234, project=self.PROJECT) - missing = ['this', 'list', 'is', 'not', 'empty'] - self.assertRaises(ValueError, client.get_multi, - [key], missing=missing) + missing = ["this", "list", "is", "not", "empty"] + self.assertRaises(ValueError, client.get_multi, [key], missing=missing) def test_get_multi_w_deferred_non_empty(self): from google.cloud.datastore.key import Key creds = _make_credentials() client = self._make_one(credentials=creds) - key = Key('Kind', 1234, project=self.PROJECT) + key = Key("Kind", 1234, project=self.PROJECT) - deferred = ['this', 'list', 'is', 'not', 'empty'] - self.assertRaises(ValueError, client.get_multi, - [key], deferred=deferred) + deferred = ["this", "list", "is", "not", "empty"] + self.assertRaises(ValueError, client.get_multi, [key], deferred=deferred) def test_get_multi_miss_w_deferred(self): from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.key import Key - key = Key('Kind', 1234, project=self.PROJECT) + key = Key("Kind", 1234, project=self.PROJECT) key_pb = key.to_protobuf() # Set deferred entity on mock connection. @@ -444,14 +441,11 @@ def test_get_multi_miss_w_deferred(self): deferred = [] entities = client.get_multi([key], deferred=deferred) self.assertEqual(entities, []) - self.assertEqual( - [def_key.to_protobuf() for def_key in deferred], [key_pb]) + self.assertEqual([def_key.to_protobuf() for def_key in deferred], [key_pb]) read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, - [key_pb], - read_options=read_options, + self.PROJECT, [key_pb], read_options=read_options ) def test_get_multi_w_deferred_from_backend_but_not_passed(self): @@ -460,9 +454,9 @@ def test_get_multi_w_deferred_from_backend_but_not_passed(self): from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key - key1 = Key('Kind', project=self.PROJECT) + key1 = Key("Kind", project=self.PROJECT) key1_pb = key1.to_protobuf() - key2 = Key('Kind', 2345, project=self.PROJECT) + key2 = Key("Kind", 2345, project=self.PROJECT) key2_pb = key2.to_protobuf() entity1_pb = entity_pb2.Entity() @@ -475,11 +469,13 @@ def test_get_multi_w_deferred_from_backend_but_not_passed(self): # Mock up two separate requests. Using an iterable as side_effect # allows multiple return values. lookup_response1 = _make_lookup_response( - results=[entity1_pb], deferred=[key2_pb]) + results=[entity1_pb], deferred=[key2_pb] + ) lookup_response2 = _make_lookup_response(results=[entity2_pb]) ds_api = _make_datastore_api() ds_api.lookup = mock.Mock( - side_effect=[lookup_response1, lookup_response2], spec=[]) + side_effect=[lookup_response1, lookup_response2], spec=[] + ) client._datastore_api_internal = ds_api missing = [] @@ -499,26 +495,22 @@ def test_get_multi_w_deferred_from_backend_but_not_passed(self): self.assertEqual(ds_api.lookup.call_count, 2) read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_any_call( - self.PROJECT, - [key2_pb], - read_options=read_options, + self.PROJECT, [key2_pb], read_options=read_options ) ds_api.lookup.assert_any_call( - self.PROJECT, - [key1_pb, key2_pb], - read_options=read_options, + self.PROJECT, [key1_pb, key2_pb], read_options=read_options ) def test_get_multi_hit(self): from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.key import Key - kind = 'Kind' + kind = "Kind" id_ = 1234 - path = [{'kind': kind, 'id': id_}] + path = [{"kind": kind, "id": id_}] # Make a found entity pb to be returned from mock backend. - entity_pb = _make_entity_pb(self.PROJECT, kind, id_, 'foo', 'Foo') + entity_pb = _make_entity_pb(self.PROJECT, kind, id_, "foo", "Foo") # Make a connection to return the entity pb. creds = _make_credentials() @@ -535,27 +527,25 @@ def test_get_multi_hit(self): self.assertIsNot(new_key, key) self.assertEqual(new_key.project, self.PROJECT) self.assertEqual(new_key.path, path) - self.assertEqual(list(result), ['foo']) - self.assertEqual(result['foo'], 'Foo') + self.assertEqual(list(result), ["foo"]) + self.assertEqual(result["foo"], "Foo") read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, - [key.to_protobuf()], - read_options=read_options, + self.PROJECT, [key.to_protobuf()], read_options=read_options ) def test_get_multi_hit_w_transaction(self): from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.key import Key - txn_id = b'123' - kind = 'Kind' + txn_id = b"123" + kind = "Kind" id_ = 1234 - path = [{'kind': kind, 'id': id_}] + path = [{"kind": kind, "id": id_}] # Make a found entity pb to be returned from mock backend. - entity_pb = _make_entity_pb(self.PROJECT, kind, id_, 'foo', 'Foo') + entity_pb = _make_entity_pb(self.PROJECT, kind, id_, "foo", "Foo") # Make a connection to return the entity pb. creds = _make_credentials() @@ -574,21 +564,19 @@ def test_get_multi_hit_w_transaction(self): self.assertIsNot(new_key, key) self.assertEqual(new_key.project, self.PROJECT) self.assertEqual(new_key.path, path) - self.assertEqual(list(result), ['foo']) - self.assertEqual(result['foo'], 'Foo') + self.assertEqual(list(result), ["foo"]) + self.assertEqual(result["foo"], "Foo") read_options = datastore_pb2.ReadOptions(transaction=txn_id) ds_api.lookup.assert_called_once_with( - self.PROJECT, - [key.to_protobuf()], - read_options=read_options, + self.PROJECT, [key.to_protobuf()], read_options=read_options ) def test_get_multi_hit_multiple_keys_same_project(self): from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.key import Key - kind = 'Kind' + kind = "Kind" id1 = 1234 id2 = 2345 @@ -599,8 +587,7 @@ def test_get_multi_hit_multiple_keys_same_project(self): # Make a connection to return the entity pbs. creds = _make_credentials() client = self._make_one(credentials=creds) - lookup_response = _make_lookup_response( - results=[entity_pb1, entity_pb2]) + lookup_response = _make_lookup_response(results=[entity_pb1, entity_pb2]) ds_api = _make_datastore_api(lookup_response=lookup_response) client._datastore_api_internal = ds_api @@ -624,14 +611,14 @@ def test_get_multi_hit_multiple_keys_same_project(self): def test_get_multi_hit_multiple_keys_different_project(self): from google.cloud.datastore.key import Key - PROJECT1 = 'PROJECT' - PROJECT2 = 'PROJECT-ALT' + PROJECT1 = "PROJECT" + PROJECT2 = "PROJECT-ALT" # Make sure our IDs are actually different. self.assertNotEqual(PROJECT1, PROJECT2) - key1 = Key('KIND', 1234, project=PROJECT1) - key2 = Key('KIND', 1234, project=PROJECT2) + key1 = Key("KIND", 1234, project=PROJECT1) + key2 = Key("KIND", 1234, project=PROJECT2) creds = _make_credentials() client = self._make_one(credentials=creds) @@ -642,11 +629,11 @@ def test_get_multi_hit_multiple_keys_different_project(self): def test_get_multi_max_loops(self): from google.cloud.datastore.key import Key - kind = 'Kind' + kind = "Kind" id_ = 1234 # Make a found entity pb to be returned from mock backend. - entity_pb = _make_entity_pb(self.PROJECT, kind, id_, 'foo', 'Foo') + entity_pb = _make_entity_pb(self.PROJECT, kind, id_, "foo", "Foo") # Make a connection to return the entity pb. creds = _make_credentials() @@ -659,11 +646,9 @@ def test_get_multi_max_loops(self): deferred = [] missing = [] - patch = mock.patch( - 'google.cloud.datastore.client._MAX_LOOPS', new=-1) + patch = mock.patch("google.cloud.datastore.client._MAX_LOOPS", new=-1) with patch: - result = client.get_multi([key], missing=missing, - deferred=deferred) + result = client.get_multi([key], missing=missing, deferred=deferred) # Make sure we have no results, even though the connection has been # set up as in `test_hit` to return a single result. @@ -686,7 +671,7 @@ def _put_multi(*args, **kw): client.put(entity) self.assertEqual(_called_with[0][0], ()) - self.assertEqual(_called_with[0][1]['entities'], [entity]) + self.assertEqual(_called_with[0][1]["entities"], [entity]) def test_put_multi_no_entities(self): creds = _make_credentials() @@ -705,7 +690,7 @@ def test_put_multi_no_batch_w_partial_key(self): from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.helpers import _property_tuples - entity = _Entity(foo=u'bar') + entity = _Entity(foo=u"bar") key = entity.key = _Key(self.PROJECT) key._id = None @@ -720,43 +705,42 @@ def test_put_multi_no_batch_w_partial_key(self): self.assertEqual(ds_api.commit.call_count, 1) _, positional, keyword = ds_api.commit.mock_calls[0] - self.assertEqual(keyword, {'transaction': None}) + self.assertEqual(keyword, {"transaction": None}) self.assertEqual(len(positional), 3) self.assertEqual(positional[0], self.PROJECT) - self.assertEqual( - positional[1], datastore_pb2.CommitRequest.NON_TRANSACTIONAL) + self.assertEqual(positional[1], datastore_pb2.CommitRequest.NON_TRANSACTIONAL) mutations = positional[2] - mutated_entity = _mutated_pb(self, mutations, 'insert') + mutated_entity = _mutated_pb(self, mutations, "insert") self.assertEqual(mutated_entity.key, key.to_protobuf()) prop_list = list(_property_tuples(mutated_entity)) self.assertTrue(len(prop_list), 1) name, value_pb = prop_list[0] - self.assertEqual(name, 'foo') - self.assertEqual(value_pb.string_value, u'bar') + self.assertEqual(name, "foo") + self.assertEqual(value_pb.string_value, u"bar") def test_put_multi_existing_batch_w_completed_key(self): from google.cloud.datastore.helpers import _property_tuples creds = _make_credentials() client = self._make_one(credentials=creds) - entity = _Entity(foo=u'bar') + entity = _Entity(foo=u"bar") key = entity.key = _Key(self.PROJECT) with _NoCommitBatch(client) as CURR_BATCH: result = client.put_multi([entity]) self.assertIsNone(result) - mutated_entity = _mutated_pb(self, CURR_BATCH.mutations, 'upsert') + mutated_entity = _mutated_pb(self, CURR_BATCH.mutations, "upsert") self.assertEqual(mutated_entity.key, key.to_protobuf()) prop_list = list(_property_tuples(mutated_entity)) self.assertTrue(len(prop_list), 1) name, value_pb = prop_list[0] - self.assertEqual(name, 'foo') - self.assertEqual(value_pb.string_value, u'bar') + self.assertEqual(name, "foo") + self.assertEqual(value_pb.string_value, u"bar") def test_delete(self): _called_with = [] @@ -772,7 +756,7 @@ def _delete_multi(*args, **kw): client.delete(key) self.assertEqual(_called_with[0][0], ()) - self.assertEqual(_called_with[0][1]['keys'], [key]) + self.assertEqual(_called_with[0][1]["keys"], [key]) def test_delete_multi_no_keys(self): creds = _make_credentials() @@ -798,15 +782,14 @@ def test_delete_multi_no_batch(self): self.assertEqual(ds_api.commit.call_count, 1) _, positional, keyword = ds_api.commit.mock_calls[0] - self.assertEqual(keyword, {'transaction': None}) + self.assertEqual(keyword, {"transaction": None}) self.assertEqual(len(positional), 3) self.assertEqual(positional[0], self.PROJECT) - self.assertEqual( - positional[1], datastore_pb2.CommitRequest.NON_TRANSACTIONAL) + self.assertEqual(positional[1], datastore_pb2.CommitRequest.NON_TRANSACTIONAL) mutations = positional[2] - mutated_key = _mutated_pb(self, mutations, 'delete') + mutated_key = _mutated_pb(self, mutations, "delete") self.assertEqual(mutated_key, key.to_protobuf()) def test_delete_multi_w_existing_batch(self): @@ -820,7 +803,7 @@ def test_delete_multi_w_existing_batch(self): result = client.delete_multi([key]) self.assertIsNone(result) - mutated_key = _mutated_pb(self, CURR_BATCH.mutations, 'delete') + mutated_key = _mutated_pb(self, CURR_BATCH.mutations, "delete") self.assertEqual(mutated_key, key._key) client._datastore_api_internal.commit.assert_not_called() @@ -835,7 +818,7 @@ def test_delete_multi_w_existing_transaction(self): result = client.delete_multi([key]) self.assertIsNone(result) - mutated_key = _mutated_pb(self, CURR_XACT.mutations, 'delete') + mutated_key = _mutated_pb(self, CURR_XACT.mutations, "delete") self.assertEqual(mutated_key, key._key) client._datastore_api_internal.commit.assert_not_called() @@ -847,10 +830,9 @@ def test_allocate_ids_w_partial_key(self): creds = _make_credentials() client = self._make_one(credentials=creds, _use_grpc=False) - allocated = mock.Mock( - keys=[_KeyPB(i) for i in range(num_ids)], spec=['keys']) + allocated = mock.Mock(keys=[_KeyPB(i) for i in range(num_ids)], spec=["keys"]) alloc_ids = mock.Mock(return_value=allocated, spec=[]) - ds_api = mock.Mock(allocate_ids=alloc_ids, spec=['allocate_ids']) + ds_api = mock.Mock(allocate_ids=alloc_ids, spec=["allocate_ids"]) client._datastore_api_internal = ds_api result = client.allocate_ids(incomplete_key, num_ids) @@ -866,48 +848,47 @@ def test_allocate_ids_with_completed_key(self): self.assertRaises(ValueError, client.allocate_ids, COMPLETE_KEY, 2) def test_key_w_project(self): - KIND = 'KIND' + KIND = "KIND" ID = 1234 creds = _make_credentials() client = self._make_one(credentials=creds) - self.assertRaises(TypeError, - client.key, KIND, ID, project=self.PROJECT) + self.assertRaises(TypeError, client.key, KIND, ID, project=self.PROJECT) def test_key_wo_project(self): - kind = 'KIND' + kind = "KIND" id_ = 1234 creds = _make_credentials() client = self._make_one(credentials=creds) - patch = mock.patch( - 'google.cloud.datastore.client.Key', spec=['__call__']) + patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"]) with patch as mock_klass: key = client.key(kind, id_) self.assertIs(key, mock_klass.return_value) mock_klass.assert_called_once_with( - kind, id_, project=self.PROJECT, namespace=None) + kind, id_, project=self.PROJECT, namespace=None + ) def test_key_w_namespace(self): - kind = 'KIND' + kind = "KIND" id_ = 1234 namespace = object() creds = _make_credentials() client = self._make_one(namespace=namespace, credentials=creds) - patch = mock.patch( - 'google.cloud.datastore.client.Key', spec=['__call__']) + patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"]) with patch as mock_klass: key = client.key(kind, id_) self.assertIs(key, mock_klass.return_value) mock_klass.assert_called_once_with( - kind, id_, project=self.PROJECT, namespace=namespace) + kind, id_, project=self.PROJECT, namespace=namespace + ) def test_key_w_namespace_collision(self): - kind = 'KIND' + kind = "KIND" id_ = 1234 namespace1 = object() namespace2 = object() @@ -915,20 +896,19 @@ def test_key_w_namespace_collision(self): creds = _make_credentials() client = self._make_one(namespace=namespace1, credentials=creds) - patch = mock.patch( - 'google.cloud.datastore.client.Key', spec=['__call__']) + patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"]) with patch as mock_klass: key = client.key(kind, id_, namespace=namespace2) self.assertIs(key, mock_klass.return_value) mock_klass.assert_called_once_with( - kind, id_, project=self.PROJECT, namespace=namespace2) + kind, id_, project=self.PROJECT, namespace=namespace2 + ) def test_batch(self): creds = _make_credentials() client = self._make_one(credentials=creds) - patch = mock.patch( - 'google.cloud.datastore.client.Batch', spec=['__call__']) + patch = mock.patch("google.cloud.datastore.client.Batch", spec=["__call__"]) with patch as mock_klass: batch = client.batch() self.assertIs(batch, mock_klass.return_value) @@ -939,7 +919,8 @@ def test_transaction_defaults(self): client = self._make_one(credentials=creds) patch = mock.patch( - 'google.cloud.datastore.client.Transaction', spec=['__call__']) + "google.cloud.datastore.client.Transaction", spec=["__call__"] + ) with patch as mock_klass: xact = client.transaction() self.assertIs(xact, mock_klass.return_value) @@ -947,22 +928,19 @@ def test_transaction_defaults(self): def test_read_only_transaction_defaults(self): from google.cloud.datastore_v1.types import TransactionOptions + creds = _make_credentials() client = self._make_one(credentials=creds) xact = client.transaction(read_only=True) self.assertEqual( - xact._options, - TransactionOptions( - read_only=TransactionOptions.ReadOnly() - ) + xact._options, TransactionOptions(read_only=TransactionOptions.ReadOnly()) ) self.assertFalse(xact._options.HasField("read_write")) self.assertTrue(xact._options.HasField("read_only")) - self.assertEqual(xact._options.read_only, - TransactionOptions.ReadOnly()) + self.assertEqual(xact._options.read_only, TransactionOptions.ReadOnly()) def test_query_w_client(self): - KIND = 'KIND' + KIND = "KIND" creds = _make_credentials() client = self._make_one(credentials=creds) @@ -971,40 +949,38 @@ def test_query_w_client(self): self.assertRaises(TypeError, client.query, kind=KIND, client=other) def test_query_w_project(self): - KIND = 'KIND' + KIND = "KIND" creds = _make_credentials() client = self._make_one(credentials=creds) - self.assertRaises(TypeError, - client.query, kind=KIND, project=self.PROJECT) + self.assertRaises(TypeError, client.query, kind=KIND, project=self.PROJECT) def test_query_w_defaults(self): creds = _make_credentials() client = self._make_one(credentials=creds) - patch = mock.patch( - 'google.cloud.datastore.client.Query', spec=['__call__']) + patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"]) with patch as mock_klass: query = client.query() self.assertIs(query, mock_klass.return_value) mock_klass.assert_called_once_with( - client, project=self.PROJECT, namespace=None) + client, project=self.PROJECT, namespace=None + ) def test_query_explicit(self): - kind = 'KIND' - namespace = 'NAMESPACE' + kind = "KIND" + namespace = "NAMESPACE" ancestor = object() - filters = [('PROPERTY', '==', 'VALUE')] - projection = ['__key__'] - order = ['PROPERTY'] - distinct_on = ['DISTINCT_ON'] + filters = [("PROPERTY", "==", "VALUE")] + projection = ["__key__"] + order = ["PROPERTY"] + distinct_on = ["DISTINCT_ON"] creds = _make_credentials() client = self._make_one(credentials=creds) - patch = mock.patch( - 'google.cloud.datastore.client.Query', spec=['__call__']) + patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"]) with patch as mock_klass: query = client.query( kind=kind, @@ -1029,39 +1005,38 @@ def test_query_explicit(self): ) def test_query_w_namespace(self): - kind = 'KIND' + kind = "KIND" namespace = object() creds = _make_credentials() client = self._make_one(namespace=namespace, credentials=creds) - patch = mock.patch( - 'google.cloud.datastore.client.Query', spec=['__call__']) + patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"]) with patch as mock_klass: query = client.query(kind=kind) self.assertIs(query, mock_klass.return_value) mock_klass.assert_called_once_with( - client, project=self.PROJECT, namespace=namespace, kind=kind) + client, project=self.PROJECT, namespace=namespace, kind=kind + ) def test_query_w_namespace_collision(self): - kind = 'KIND' + kind = "KIND" namespace1 = object() namespace2 = object() creds = _make_credentials() client = self._make_one(namespace=namespace1, credentials=creds) - patch = mock.patch( - 'google.cloud.datastore.client.Query', spec=['__call__']) + patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"]) with patch as mock_klass: query = client.query(kind=kind, namespace=namespace2) self.assertIs(query, mock_klass.return_value) mock_klass.assert_called_once_with( - client, project=self.PROJECT, namespace=namespace2, kind=kind) + client, project=self.PROJECT, namespace=namespace2, kind=kind + ) class _NoCommitBatch(object): - def __init__(self, client): from google.cloud.datastore.batch import Batch @@ -1078,8 +1053,7 @@ def __exit__(self, *args): class _NoCommitTransaction(object): - - def __init__(self, client, transaction_id='TRANSACTION'): + def __init__(self, client, transaction_id="TRANSACTION"): from google.cloud.datastore.batch import Batch from google.cloud.datastore.transaction import Transaction @@ -1104,8 +1078,8 @@ class _Entity(dict): class _Key(object): _MARKER = object() - _kind = 'KIND' - _key = 'KEY' + _kind = "KIND" + _key = "KEY" _path = None _id = 1234 _stored = None @@ -1139,13 +1113,11 @@ def completed_key(self, new_id): class _PathElementPB(object): - def __init__(self, id_): self.id = id_ class _KeyPB(object): - def __init__(self, id_): self.path = [_PathElementPB(id_)] @@ -1161,8 +1133,7 @@ def _mutated_pb(test_case, mutation_pb_list, mutation_type): # We grab the only mutation. mutated_pb = mutation_pb_list[0] # Then check if it is the correct type. - test_case.assertEqual(mutated_pb.WhichOneof('operation'), - mutation_type) + test_case.assertEqual(mutated_pb.WhichOneof("operation"), mutation_type) return getattr(mutated_pb, mutation_type) @@ -1179,31 +1150,29 @@ def _make_key(id_): def _make_commit_response(*keys): from google.cloud.datastore_v1.proto import datastore_pb2 - mutation_results = [ - datastore_pb2.MutationResult(key=key) for key in keys] + mutation_results = [datastore_pb2.MutationResult(key=key) for key in keys] return datastore_pb2.CommitResponse(mutation_results=mutation_results) def _make_lookup_response(results=(), missing=(), deferred=()): entity_results_found = [ - mock.Mock(entity=result, spec=['entity']) for result in results] + mock.Mock(entity=result, spec=["entity"]) for result in results + ] entity_results_missing = [ - mock.Mock(entity=missing_entity, spec=['entity']) - for missing_entity in missing] + mock.Mock(entity=missing_entity, spec=["entity"]) for missing_entity in missing + ] return mock.Mock( found=entity_results_found, missing=entity_results_missing, deferred=deferred, - spec=['found', 'missing', 'deferred']) + spec=["found", "missing", "deferred"], + ) def _make_datastore_api(*keys, **kwargs): - commit_method = mock.Mock( - return_value=_make_commit_response(*keys), spec=[]) - lookup_response = kwargs.pop( - 'lookup_response', _make_lookup_response()) - lookup_method = mock.Mock( - return_value=lookup_response, spec=[]) + commit_method = mock.Mock(return_value=_make_commit_response(*keys), spec=[]) + lookup_response = kwargs.pop("lookup_response", _make_lookup_response()) + lookup_method = mock.Mock(return_value=lookup_response, spec=[]) return mock.Mock( - commit=commit_method, lookup=lookup_method, - spec=['commit', 'lookup']) + commit=commit_method, lookup=lookup_method, spec=["commit", "lookup"] + ) diff --git a/packages/google-cloud-datastore/tests/unit/test_entity.py b/packages/google-cloud-datastore/tests/unit/test_entity.py index 37faec861e02..c65541a45854 100644 --- a/packages/google-cloud-datastore/tests/unit/test_entity.py +++ b/packages/google-cloud-datastore/tests/unit/test_entity.py @@ -14,13 +14,12 @@ import unittest -_PROJECT = 'PROJECT' -_KIND = 'KIND' +_PROJECT = "PROJECT" +_KIND = "KIND" _ID = 1234 class TestEntity(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.datastore.entity import Entity @@ -39,18 +38,22 @@ def test_ctor_defaults(self): self.assertEqual(sorted(entity.exclude_from_indexes), []) def test_ctor_explicit(self): - _EXCLUDE_FROM_INDEXES = ['foo', 'bar'] + _EXCLUDE_FROM_INDEXES = ["foo", "bar"] key = _Key() - entity = self._make_one( - key=key, exclude_from_indexes=_EXCLUDE_FROM_INDEXES) - self.assertEqual(sorted(entity.exclude_from_indexes), - sorted(_EXCLUDE_FROM_INDEXES)) + entity = self._make_one(key=key, exclude_from_indexes=_EXCLUDE_FROM_INDEXES) + self.assertEqual( + sorted(entity.exclude_from_indexes), sorted(_EXCLUDE_FROM_INDEXES) + ) def test_ctor_bad_exclude_from_indexes(self): BAD_EXCLUDE_FROM_INDEXES = object() key = _Key() - self.assertRaises(TypeError, self._make_one, key=key, - exclude_from_indexes=BAD_EXCLUDE_FROM_INDEXES) + self.assertRaises( + TypeError, + self._make_one, + key=key, + exclude_from_indexes=BAD_EXCLUDE_FROM_INDEXES, + ) def test___eq_____ne___w_non_entity(self): from google.cloud.datastore.key import Key @@ -75,7 +78,7 @@ def test___eq_____ne___w_different_keys(self): def test___eq_____ne___w_same_keys(self): from google.cloud.datastore.key import Key - name = 'foo' + name = "foo" value = 42 meaning = 9 @@ -97,10 +100,10 @@ def test___eq_____ne___w_same_keys_different_props(self): key1 = Key(_KIND, _ID, project=_PROJECT) entity1 = self._make_one(key=key1) - entity1['foo'] = 'Foo' + entity1["foo"] = "Foo" key2 = Key(_KIND, _ID, project=_PROJECT) entity2 = self._make_one(key=key2) - entity1['bar'] = 'Bar' + entity1["bar"] = "Bar" self.assertFalse(entity1 == entity2) self.assertTrue(entity1 != entity2) @@ -110,9 +113,9 @@ def test___eq_____ne___w_same_keys_props_w_equiv_keys_as_value(self): key1 = Key(_KIND, _ID, project=_PROJECT) key2 = Key(_KIND, _ID, project=_PROJECT) entity1 = self._make_one(key=key1) - entity1['some_key'] = key1 + entity1["some_key"] = key1 entity2 = self._make_one(key=key1) - entity2['some_key'] = key2 + entity2["some_key"] = key2 self.assertTrue(entity1 == entity2) self.assertFalse(entity1 != entity2) @@ -124,9 +127,9 @@ def test___eq_____ne___w_same_keys_props_w_diff_keys_as_value(self): key1 = Key(_KIND, _ID1, project=_PROJECT) key2 = Key(_KIND, _ID2, project=_PROJECT) entity1 = self._make_one(key=key1) - entity1['some_key'] = key1 + entity1["some_key"] = key1 entity2 = self._make_one(key=key1) - entity2['some_key'] = key2 + entity2["some_key"] = key2 self.assertFalse(entity1 == entity2) self.assertTrue(entity1 != entity2) @@ -136,12 +139,12 @@ def test___eq_____ne___w_same_keys_props_w_equiv_entities_as_value(self): key = Key(_KIND, _ID, project=_PROJECT) entity1 = self._make_one(key=key) sub1 = self._make_one() - sub1.update({'foo': 'Foo'}) - entity1['some_entity'] = sub1 + sub1.update({"foo": "Foo"}) + entity1["some_entity"] = sub1 entity2 = self._make_one(key=key) sub2 = self._make_one() - sub2.update({'foo': 'Foo'}) - entity2['some_entity'] = sub2 + sub2.update({"foo": "Foo"}) + entity2["some_entity"] = sub2 self.assertTrue(entity1 == entity2) self.assertFalse(entity1 != entity2) @@ -151,19 +154,19 @@ def test___eq_____ne___w_same_keys_props_w_diff_entities_as_value(self): key = Key(_KIND, _ID, project=_PROJECT) entity1 = self._make_one(key=key) sub1 = self._make_one() - sub1.update({'foo': 'Foo'}) - entity1['some_entity'] = sub1 + sub1.update({"foo": "Foo"}) + entity1["some_entity"] = sub1 entity2 = self._make_one(key=key) sub2 = self._make_one() - sub2.update({'foo': 'Bar'}) - entity2['some_entity'] = sub2 + sub2.update({"foo": "Bar"}) + entity2["some_entity"] = sub2 self.assertFalse(entity1 == entity2) self.assertTrue(entity1 != entity2) def test__eq__same_value_different_exclude(self): from google.cloud.datastore.key import Key - name = 'foo' + name = "foo" value = 42 key = Key(_KIND, _ID, project=_PROJECT) @@ -178,7 +181,7 @@ def test__eq__same_value_different_exclude(self): def test__eq__same_value_different_meanings(self): from google.cloud.datastore.key import Key - name = 'foo' + name = "foo" value = 42 meaning = 9 key = Key(_KIND, _ID, project=_PROJECT) @@ -197,31 +200,31 @@ def test_id(self): key = Key(_KIND, _ID, project=_PROJECT) entity = self._make_one(key=key) - self.assertEqual(entity.id, _ID) + self.assertEqual(entity.id, _ID) def test_id_none(self): entity = self._make_one(key=None) - self.assertEqual(entity.id, None) + self.assertEqual(entity.id, None) def test___repr___no_key_empty(self): entity = self._make_one() - self.assertEqual(repr(entity), '') + self.assertEqual(repr(entity), "") def test___repr___w_key_non_empty(self): key = _Key() - flat_path = ('bar', 12, 'baz', 'himom') + flat_path = ("bar", 12, "baz", "himom") key._flat_path = flat_path entity = self._make_one(key=key) - entity_vals = {'foo': 'Foo'} + entity_vals = {"foo": "Foo"} entity.update(entity_vals) - expected = '' % (flat_path, entity_vals) + expected = "" % (flat_path, entity_vals) self.assertEqual(repr(entity), expected) class _Key(object): _MARKER = object() - _key = 'KEY' + _key = "KEY" _partial = False _path = None _id = None diff --git a/packages/google-cloud-datastore/tests/unit/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py index f4338812be54..995d9cfa2330 100644 --- a/packages/google-cloud-datastore/tests/unit/test_helpers.py +++ b/packages/google-cloud-datastore/tests/unit/test_helpers.py @@ -16,7 +16,6 @@ class Test__new_value_pb(unittest.TestCase): - def _call_fut(self, entity_pb, name): from google.cloud.datastore.helpers import _new_value_pb @@ -26,7 +25,7 @@ def test_it(self): from google.cloud.datastore_v1.proto import entity_pb2 entity_pb = entity_pb2.Entity() - name = 'foo' + name = "foo" result = self._call_fut(entity_pb, name) self.assertIsInstance(result, entity_pb2.Value) @@ -35,7 +34,6 @@ def test_it(self): class Test__property_tuples(unittest.TestCase): - def _call_fut(self, entity_pb): from google.cloud.datastore.helpers import _property_tuples @@ -47,19 +45,17 @@ def test_it(self): from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() - name1 = 'foo' - name2 = 'bar' + name1 = "foo" + name2 = "bar" val_pb1 = _new_value_pb(entity_pb, name1) val_pb2 = _new_value_pb(entity_pb, name2) result = self._call_fut(entity_pb) self.assertIsInstance(result, types.GeneratorType) - self.assertEqual(sorted(result), - sorted([(name1, val_pb1), (name2, val_pb2)])) + self.assertEqual(sorted(result), sorted([(name1, val_pb1), (name2, val_pb2)])) class Test_entity_from_protobuf(unittest.TestCase): - def _call_fut(self, val): from google.cloud.datastore.helpers import entity_from_protobuf @@ -69,28 +65,28 @@ def test_it(self): from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb - _PROJECT = 'PROJECT' - _KIND = 'KIND' + _PROJECT = "PROJECT" + _KIND = "KIND" _ID = 1234 entity_pb = entity_pb2.Entity() entity_pb.key.partition_id.project_id = _PROJECT entity_pb.key.path.add(kind=_KIND, id=_ID) - value_pb = _new_value_pb(entity_pb, 'foo') - value_pb.string_value = 'Foo' + value_pb = _new_value_pb(entity_pb, "foo") + value_pb.string_value = "Foo" - unindexed_val_pb = _new_value_pb(entity_pb, 'bar') + unindexed_val_pb = _new_value_pb(entity_pb, "bar") unindexed_val_pb.integer_value = 10 unindexed_val_pb.exclude_from_indexes = True - array_val_pb1 = _new_value_pb(entity_pb, 'baz') + array_val_pb1 = _new_value_pb(entity_pb, "baz") array_pb1 = array_val_pb1.array_value.values unindexed_array_val_pb = array_pb1.add() unindexed_array_val_pb.integer_value = 11 unindexed_array_val_pb.exclude_from_indexes = True - array_val_pb2 = _new_value_pb(entity_pb, 'qux') + array_val_pb2 = _new_value_pb(entity_pb, "qux") array_pb2 = array_val_pb2.array_value.values indexed_array_val_pb = array_pb2.add() @@ -98,11 +94,11 @@ def test_it(self): entity = self._call_fut(entity_pb) self.assertEqual(entity.kind, _KIND) - self.assertEqual(entity.exclude_from_indexes, - frozenset(['bar', 'baz'])) + self.assertEqual(entity.exclude_from_indexes, frozenset(["bar", "baz"])) entity_props = dict(entity) - self.assertEqual(entity_props, - {'foo': 'Foo', 'bar': 10, 'baz': [11], 'qux': [12]}) + self.assertEqual( + entity_props, {"foo": "Foo", "bar": 10, "baz": [11], "qux": [12]} + ) # Also check the key. key = entity.key @@ -115,14 +111,14 @@ def test_mismatched_value_indexed(self): from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb - _PROJECT = 'PROJECT' - _KIND = 'KIND' + _PROJECT = "PROJECT" + _KIND = "KIND" _ID = 1234 entity_pb = entity_pb2.Entity() entity_pb.key.partition_id.project_id = _PROJECT entity_pb.key.path.add(kind=_KIND, id=_ID) - array_val_pb = _new_value_pb(entity_pb, 'baz') + array_val_pb = _new_value_pb(entity_pb, "baz") array_pb = array_val_pb.array_value.values unindexed_value_pb1 = array_pb.add() @@ -149,10 +145,10 @@ def test_entity_with_meaning(self): from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() - name = 'hello' + name = "hello" value_pb = _new_value_pb(entity_pb, name) value_pb.meaning = meaning = 9 - value_pb.string_value = val = u'something' + value_pb.string_value = val = u"something" entity = self._call_fut(entity_pb) self.assertIsNone(entity.key) @@ -163,10 +159,10 @@ def test_nested_entity_no_key(self): from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb - PROJECT = 'FOO' - KIND = 'KIND' - INSIDE_NAME = 'IFOO' - OUTSIDE_NAME = 'OBAR' + PROJECT = "FOO" + KIND = "KIND" + INSIDE_NAME = "IFOO" + OUTSIDE_NAME = "OBAR" INSIDE_VALUE = 1337 entity_inside = entity_pb2.Entity() @@ -194,28 +190,22 @@ def test_nested_entity_no_key(self): def test_index_mismatch_ignores_empty_list(self): from google.cloud.datastore_v1.proto import entity_pb2 - _PROJECT = 'PROJECT' - _KIND = 'KIND' + _PROJECT = "PROJECT" + _KIND = "KIND" _ID = 1234 - array_val_pb = entity_pb2.Value( - array_value=entity_pb2.ArrayValue(values=[])) + array_val_pb = entity_pb2.Value(array_value=entity_pb2.ArrayValue(values=[])) - entity_pb = entity_pb2.Entity( - properties={ - 'baz': array_val_pb, - }, - ) + entity_pb = entity_pb2.Entity(properties={"baz": array_val_pb}) entity_pb.key.partition_id.project_id = _PROJECT entity_pb.key.path.add(kind=_KIND, id=_ID) entity = self._call_fut(entity_pb) entity_dict = dict(entity) - self.assertEqual(entity_dict['baz'], []) + self.assertEqual(entity_dict["baz"], []) class Test_entity_to_protobuf(unittest.TestCase): - def _call_fut(self, entity): from google.cloud.datastore.helpers import entity_to_protobuf @@ -232,10 +222,9 @@ def _compare_entity_proto(self, entity_pb1, entity_pb2): name1, val1 = pair1 name2, val2 = pair2 self.assertEqual(name1, name2) - if val1.HasField('entity_value'): # Message field (Entity) + if val1.HasField("entity_value"): # Message field (Entity) self.assertEqual(val1.meaning, val2.meaning) - self._compare_entity_proto( - val1.entity_value, val2.entity_value) + self._compare_entity_proto(val1.entity_value, val2.entity_value) else: self.assertEqual(val1, val2) @@ -252,8 +241,8 @@ def test_key_only(self): from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key - kind, name = 'PATH', 'NAME' - project = 'PROJECT' + kind, name = "PATH", "NAME" + project = "PROJECT" key = Key(kind, name, project=project) entity = Entity(key=key) entity_pb = self._call_fut(entity) @@ -272,10 +261,10 @@ def test_simple_fields(self): from google.cloud.datastore.helpers import _new_value_pb entity = Entity() - name1 = 'foo' + name1 = "foo" entity[name1] = value1 = 42 - name2 = 'bar' - entity[name2] = value2 = u'some-string' + name2 = "bar" + entity[name2] = value2 = u"some-string" entity_pb = self._call_fut(entity) expected_pb = entity_pb2.Entity() @@ -291,11 +280,11 @@ def test_with_empty_list(self): from google.cloud.datastore.entity import Entity entity = Entity() - entity['foo'] = [] + entity["foo"] = [] entity_pb = self._call_fut(entity) expected_pb = entity_pb2.Entity() - prop = expected_pb.properties.get_or_create('foo') + prop = expected_pb.properties.get_or_create("foo") prop.array_value.CopyFrom(entity_pb2.ArrayValue(values=[])) self._compare_entity_proto(entity_pb, expected_pb) @@ -307,42 +296,42 @@ def test_inverts_to_protobuf(self): original_pb = entity_pb2.Entity() # Add a key. - original_pb.key.partition_id.project_id = project = 'PROJECT' + original_pb.key.partition_id.project_id = project = "PROJECT" elem1 = original_pb.key.path.add() - elem1.kind = 'Family' + elem1.kind = "Family" elem1.id = 1234 elem2 = original_pb.key.path.add() - elem2.kind = 'King' - elem2.name = 'Spades' + elem2.kind = "King" + elem2.name = "Spades" # Add an integer property. - val_pb1 = _new_value_pb(original_pb, 'foo') + val_pb1 = _new_value_pb(original_pb, "foo") val_pb1.integer_value = 1337 val_pb1.exclude_from_indexes = True # Add a string property. - val_pb2 = _new_value_pb(original_pb, 'bar') - val_pb2.string_value = u'hello' + val_pb2 = _new_value_pb(original_pb, "bar") + val_pb2.string_value = u"hello" # Add a nested (entity) property. - val_pb3 = _new_value_pb(original_pb, 'entity-baz') + val_pb3 = _new_value_pb(original_pb, "entity-baz") sub_pb = entity_pb2.Entity() - sub_val_pb1 = _new_value_pb(sub_pb, 'x') + sub_val_pb1 = _new_value_pb(sub_pb, "x") sub_val_pb1.double_value = 3.14 - sub_val_pb2 = _new_value_pb(sub_pb, 'y') + sub_val_pb2 = _new_value_pb(sub_pb, "y") sub_val_pb2.double_value = 2.718281828 val_pb3.meaning = 9 val_pb3.entity_value.CopyFrom(sub_pb) # Add a list property. - val_pb4 = _new_value_pb(original_pb, 'list-quux') + val_pb4 = _new_value_pb(original_pb, "list-quux") array_val1 = val_pb4.array_value.values.add() array_val1.exclude_from_indexes = False array_val1.meaning = meaning = 22 - array_val1.blob_value = b'\xe2\x98\x83' + array_val1.blob_value = b"\xe2\x98\x83" array_val2 = val_pb4.array_value.values.add() array_val2.exclude_from_indexes = False array_val2.meaning = meaning - array_val2.blob_value = b'\xe2\x98\x85' + array_val2.blob_value = b"\xe2\x98\x85" # Convert to the user-space Entity. entity = entity_from_protobuf(original_pb) @@ -359,7 +348,7 @@ def test_meaning_with_change(self): from google.cloud.datastore.helpers import _new_value_pb entity = Entity() - name = 'foo' + name = "foo" entity[name] = value = 42 entity._meanings[name] = (9, 1337) entity_pb = self._call_fut(entity) @@ -377,7 +366,7 @@ def test_variable_meanings(self): from google.cloud.datastore.helpers import _new_value_pb entity = Entity() - name = 'quux' + name = "quux" entity[name] = values = [1, 20, 300] meaning = 9 entity._meanings[name] = ([None, meaning, None], values) @@ -402,21 +391,17 @@ def test_dict_to_entity(self): from google.cloud.datastore.entity import Entity entity = Entity() - entity['a'] = {'b': u'c'} + entity["a"] = {"b": u"c"} entity_pb = self._call_fut(entity) expected_pb = entity_pb2.Entity( properties={ - 'a': entity_pb2.Value( + "a": entity_pb2.Value( entity_value=entity_pb2.Entity( - properties={ - 'b': entity_pb2.Value( - string_value='c', - ), - }, - ), - ), - }, + properties={"b": entity_pb2.Value(string_value="c")} + ) + ) + } ) self.assertEqual(entity_pb, expected_pb) @@ -425,52 +410,35 @@ def test_dict_to_entity_recursive(self): from google.cloud.datastore.entity import Entity entity = Entity() - entity['a'] = { - 'b': { - 'c': { - 'd': 1.25, - }, - 'e': True, - }, - 'f': 10, - } + entity["a"] = {"b": {"c": {"d": 1.25}, "e": True}, "f": 10} entity_pb = self._call_fut(entity) b_entity_pb = entity_pb2.Entity( properties={ - 'c': entity_pb2.Value( + "c": entity_pb2.Value( entity_value=entity_pb2.Entity( - properties={ - 'd': entity_pb2.Value( - double_value=1.25, - ), - }, - ), + properties={"d": entity_pb2.Value(double_value=1.25)} + ) ), - 'e': entity_pb2.Value(boolean_value=True), + "e": entity_pb2.Value(boolean_value=True), } ) expected_pb = entity_pb2.Entity( properties={ - 'a': entity_pb2.Value( + "a": entity_pb2.Value( entity_value=entity_pb2.Entity( properties={ - 'b': entity_pb2.Value( - entity_value=b_entity_pb, - ), - 'f': entity_pb2.Value( - integer_value=10, - ), - }, - ), - ), - }, + "b": entity_pb2.Value(entity_value=b_entity_pb), + "f": entity_pb2.Value(integer_value=10), + } + ) + ) + } ) self.assertEqual(entity_pb, expected_pb) class Test_key_from_protobuf(unittest.TestCase): - def _call_fut(self, val): from google.cloud.datastore.helpers import key_from_protobuf @@ -486,36 +454,37 @@ def _makePB(self, project=None, namespace=None, path=()): pb.partition_id.namespace_id = namespace for elem in path: added = pb.path.add() - added.kind = elem['kind'] - if 'id' in elem: - added.id = elem['id'] - if 'name' in elem: - added.name = elem['name'] + added.kind = elem["kind"] + if "id" in elem: + added.id = elem["id"] + if "name" in elem: + added.name = elem["name"] return pb def test_wo_namespace_in_pb(self): - _PROJECT = 'PROJECT' - pb = self._makePB(path=[{'kind': 'KIND'}], project=_PROJECT) + _PROJECT = "PROJECT" + pb = self._makePB(path=[{"kind": "KIND"}], project=_PROJECT) key = self._call_fut(pb) self.assertEqual(key.project, _PROJECT) self.assertIsNone(key.namespace) def test_w_namespace_in_pb(self): - _PROJECT = 'PROJECT' - _NAMESPACE = 'NAMESPACE' - pb = self._makePB(path=[{'kind': 'KIND'}], namespace=_NAMESPACE, - project=_PROJECT) + _PROJECT = "PROJECT" + _NAMESPACE = "NAMESPACE" + pb = self._makePB( + path=[{"kind": "KIND"}], namespace=_NAMESPACE, project=_PROJECT + ) key = self._call_fut(pb) self.assertEqual(key.project, _PROJECT) self.assertEqual(key.namespace, _NAMESPACE) def test_w_nested_path_in_pb(self): _PATH = [ - {'kind': 'PARENT', 'name': 'NAME'}, - {'kind': 'CHILD', 'id': 1234}, - {'kind': 'GRANDCHILD', 'id': 5678}, + {"kind": "PARENT", "name": "NAME"}, + {"kind": "CHILD", "id": 1234}, + {"kind": "GRANDCHILD", "id": 5678}, ] - pb = self._makePB(path=_PATH, project='PROJECT') + pb = self._makePB(path=_PATH, project="PROJECT") key = self._call_fut(pb) self.assertEqual(key.path, _PATH) @@ -525,7 +494,6 @@ def test_w_nothing_in_pb(self): class Test__get_read_options(unittest.TestCase): - def _call_fut(self, eventual, transaction_id): from google.cloud.datastore.helpers import get_read_options @@ -533,20 +501,21 @@ def _call_fut(self, eventual, transaction_id): def test_eventual_w_transaction(self): with self.assertRaises(ValueError): - self._call_fut(True, b'123') + self._call_fut(True, b"123") def test_eventual_wo_transaction(self): from google.cloud.datastore_v1.proto import datastore_pb2 read_options = self._call_fut(True, None) expected = datastore_pb2.ReadOptions( - read_consistency=datastore_pb2.ReadOptions.EVENTUAL) + read_consistency=datastore_pb2.ReadOptions.EVENTUAL + ) self.assertEqual(read_options, expected) def test_default_w_transaction(self): from google.cloud.datastore_v1.proto import datastore_pb2 - txn_id = b'123abc-easy-as' + txn_id = b"123abc-easy-as" read_options = self._call_fut(False, txn_id) expected = datastore_pb2.ReadOptions(transaction=txn_id) self.assertEqual(read_options, expected) @@ -560,7 +529,6 @@ def test_default_wo_transaction(self): class Test__pb_attr_value(unittest.TestCase): - def _call_fut(self, val): from google.cloud.datastore.helpers import _pb_attr_value @@ -575,7 +543,7 @@ def test_datetime_naive(self): naive = datetime.datetime(2014, 9, 16, 10, 19, 32, micros) # No zone. utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) name, value = self._call_fut(naive) - self.assertEqual(name, 'timestamp_value') + self.assertEqual(name, "timestamp_value") self.assertEqual(value.seconds, calendar.timegm(utc.timetuple())) self.assertEqual(value.nanos, 1000 * micros) @@ -587,73 +555,73 @@ def test_datetime_w_zone(self): micros = 4375 utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) name, value = self._call_fut(utc) - self.assertEqual(name, 'timestamp_value') + self.assertEqual(name, "timestamp_value") self.assertEqual(value.seconds, calendar.timegm(utc.timetuple())) self.assertEqual(value.nanos, 1000 * micros) def test_key(self): from google.cloud.datastore.key import Key - key = Key('PATH', 1234, project='PROJECT') + key = Key("PATH", 1234, project="PROJECT") name, value = self._call_fut(key) - self.assertEqual(name, 'key_value') + self.assertEqual(name, "key_value") self.assertEqual(value, key.to_protobuf()) def test_bool(self): name, value = self._call_fut(False) - self.assertEqual(name, 'boolean_value') + self.assertEqual(name, "boolean_value") self.assertEqual(value, False) def test_float(self): name, value = self._call_fut(3.1415926) - self.assertEqual(name, 'double_value') + self.assertEqual(name, "double_value") self.assertEqual(value, 3.1415926) def test_int(self): name, value = self._call_fut(42) - self.assertEqual(name, 'integer_value') + self.assertEqual(name, "integer_value") self.assertEqual(value, 42) def test_long(self): must_be_long = (1 << 63) - 1 name, value = self._call_fut(must_be_long) - self.assertEqual(name, 'integer_value') + self.assertEqual(name, "integer_value") self.assertEqual(value, must_be_long) def test_native_str(self): import six - name, value = self._call_fut('str') + name, value = self._call_fut("str") if six.PY2: - self.assertEqual(name, 'blob_value') + self.assertEqual(name, "blob_value") else: # pragma: NO COVER Python 3 - self.assertEqual(name, 'string_value') - self.assertEqual(value, 'str') + self.assertEqual(name, "string_value") + self.assertEqual(value, "str") def test_bytes(self): - name, value = self._call_fut(b'bytes') - self.assertEqual(name, 'blob_value') - self.assertEqual(value, b'bytes') + name, value = self._call_fut(b"bytes") + self.assertEqual(name, "blob_value") + self.assertEqual(value, b"bytes") def test_unicode(self): - name, value = self._call_fut(u'str') - self.assertEqual(name, 'string_value') - self.assertEqual(value, u'str') + name, value = self._call_fut(u"str") + self.assertEqual(name, "string_value") + self.assertEqual(value, u"str") def test_entity(self): from google.cloud.datastore.entity import Entity entity = Entity() name, value = self._call_fut(entity) - self.assertEqual(name, 'entity_value') + self.assertEqual(name, "entity_value") self.assertIs(value, entity) def test_dict(self): from google.cloud.datastore.entity import Entity - orig_value = {'richard': b'feynman'} + orig_value = {"richard": b"feynman"} name, value = self._call_fut(orig_value) - self.assertEqual(name, 'entity_value') + self.assertEqual(name, "entity_value") self.assertIsInstance(value, Entity) self.assertIsNone(value.key) self.assertEqual(value._meanings, {}) @@ -661,9 +629,9 @@ def test_dict(self): self.assertEqual(dict(value), orig_value) def test_array(self): - values = ['a', 0, 3.14] + values = ["a", 0, 3.14] name, value = self._call_fut(values) - self.assertEqual(name, 'array_value') + self.assertEqual(name, "array_value") self.assertIs(value, values) def test_geo_point(self): @@ -675,14 +643,14 @@ def test_geo_point(self): geo_pt = GeoPoint(latitude=lat, longitude=lng) geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) name, value = self._call_fut(geo_pt) - self.assertEqual(name, 'geo_point_value') + self.assertEqual(name, "geo_point_value") self.assertEqual(value, geo_pt_pb) def test_null(self): from google.protobuf import struct_pb2 name, value = self._call_fut(None) - self.assertEqual(name, 'null_value') + self.assertEqual(name, "null_value") self.assertEqual(value, struct_pb2.NULL_VALUE) def test_object(self): @@ -690,7 +658,6 @@ def test_object(self): class Test__get_value_from_value_pb(unittest.TestCase): - def _call_fut(self, pb): from google.cloud.datastore.helpers import _get_value_from_value_pb @@ -721,30 +688,30 @@ def test_key(self): from google.cloud.datastore.key import Key pb = entity_pb2.Value() - expected = Key('KIND', 1234, project='PROJECT').to_protobuf() + expected = Key("KIND", 1234, project="PROJECT").to_protobuf() pb.key_value.CopyFrom(expected) found = self._call_fut(pb) self.assertEqual(found.to_protobuf(), expected) def test_bool(self): - pb = self._makePB('boolean_value', False) + pb = self._makePB("boolean_value", False) self.assertEqual(self._call_fut(pb), False) def test_float(self): - pb = self._makePB('double_value', 3.1415926) + pb = self._makePB("double_value", 3.1415926) self.assertEqual(self._call_fut(pb), 3.1415926) def test_int(self): - pb = self._makePB('integer_value', 42) + pb = self._makePB("integer_value", 42) self.assertEqual(self._call_fut(pb), 42) def test_bytes(self): - pb = self._makePB('blob_value', b'str') - self.assertEqual(self._call_fut(pb), b'str') + pb = self._makePB("blob_value", b"str") + self.assertEqual(self._call_fut(pb), b"str") def test_unicode(self): - pb = self._makePB('string_value', u'str') - self.assertEqual(self._call_fut(pb), u'str') + pb = self._makePB("string_value", u"str") + self.assertEqual(self._call_fut(pb), u"str") def test_entity(self): from google.cloud.datastore_v1.proto import entity_pb2 @@ -753,14 +720,14 @@ def test_entity(self): pb = entity_pb2.Value() entity_pb = pb.entity_value - entity_pb.key.path.add(kind='KIND') - entity_pb.key.partition_id.project_id = 'PROJECT' + entity_pb.key.path.add(kind="KIND") + entity_pb.key.partition_id.project_id = "PROJECT" - value_pb = _new_value_pb(entity_pb, 'foo') - value_pb.string_value = 'Foo' + value_pb = _new_value_pb(entity_pb, "foo") + value_pb.string_value = "Foo" entity = self._call_fut(pb) self.assertIsInstance(entity, Entity) - self.assertEqual(entity['foo'], 'Foo') + self.assertEqual(entity["foo"], "Foo") def test_array(self): from google.cloud.datastore_v1.proto import entity_pb2 @@ -768,11 +735,11 @@ def test_array(self): pb = entity_pb2.Value() array_pb = pb.array_value.values item_pb = array_pb.add() - item_pb.string_value = 'Foo' + item_pb.string_value = "Foo" item_pb = array_pb.add() - item_pb.string_value = 'Bar' + item_pb.string_value = "Bar" items = self._call_fut(pb) - self.assertEqual(items, ['Foo', 'Bar']) + self.assertEqual(items, ["Foo", "Bar"]) def test_geo_point(self): from google.type import latlng_pb2 @@ -805,7 +772,6 @@ def test_unknown(self): class Test_set_protobuf_value(unittest.TestCase): - def _call_fut(self, value_pb, val): from google.cloud.datastore.helpers import _set_protobuf_value @@ -813,6 +779,7 @@ def _call_fut(self, value_pb, val): def _makePB(self): from google.cloud.datastore_v1.proto import entity_pb2 + return entity_pb2.Value() def test_datetime(self): @@ -832,7 +799,7 @@ def test_key(self): from google.cloud.datastore.key import Key pb = self._makePB() - key = Key('KIND', 1234, project='PROJECT') + key = Key("KIND", 1234, project="PROJECT") self._call_fut(pb, key) value = pb.key_value self.assertEqual(value, key.to_protobuf()) @@ -840,7 +807,7 @@ def test_key(self): def test_none(self): pb = self._makePB() self._call_fut(pb, None) - self.assertEqual(pb.WhichOneof('value_type'), 'null_value') + self.assertEqual(pb.WhichOneof("value_type"), "null_value") def test_bool(self): pb = self._makePB() @@ -871,24 +838,24 @@ def test_native_str(self): import six pb = self._makePB() - self._call_fut(pb, 'str') + self._call_fut(pb, "str") if six.PY2: value = pb.blob_value else: # pragma: NO COVER Python 3 value = pb.string_value - self.assertEqual(value, 'str') + self.assertEqual(value, "str") def test_bytes(self): pb = self._makePB() - self._call_fut(pb, b'str') + self._call_fut(pb, b"str") value = pb.blob_value - self.assertEqual(value, b'str') + self.assertEqual(value, b"str") def test_unicode(self): pb = self._makePB() - self._call_fut(pb, u'str') + self._call_fut(pb, u"str") value = pb.string_value - self.assertEqual(value, u'str') + self.assertEqual(value, u"str") def test_entity_empty_wo_key(self): from google.cloud.datastore.entity import Entity @@ -898,7 +865,7 @@ def test_entity_empty_wo_key(self): entity = Entity() self._call_fut(pb, entity) value = pb.entity_value - self.assertEqual(value.key.SerializeToString(), b'') + self.assertEqual(value.key.SerializeToString(), b"") self.assertEqual(len(list(_property_tuples(value))), 0) def test_entity_w_key(self): @@ -906,10 +873,10 @@ def test_entity_w_key(self): from google.cloud.datastore.helpers import _property_tuples from google.cloud.datastore.key import Key - name = 'foo' - value = u'Foo' + name = "foo" + value = u"Foo" pb = self._makePB() - key = Key('KIND', 123, project='PROJECT') + key = Key("KIND", 123, project="PROJECT") entity = Entity(key=key) entity[name] = value self._call_fut(pb, entity) @@ -923,7 +890,7 @@ def test_entity_w_key(self): def test_array(self): pb = self._makePB() - values = [u'a', 0, 3.14] + values = [u"a", 0, 3.14] self._call_fut(pb, values) marshalled = pb.array_value.values self.assertEqual(len(marshalled), len(values)) @@ -945,7 +912,6 @@ def test_geo_point(self): class Test__get_meaning(unittest.TestCase): - def _call_fut(self, *args, **kwargs): from google.cloud.datastore.helpers import _get_meaning @@ -963,7 +929,7 @@ def test_single(self): value_pb = entity_pb2.Value() value_pb.meaning = meaning = 22 - value_pb.string_value = u'hi' + value_pb.string_value = u"hi" result = self._call_fut(value_pb) self.assertEqual(meaning, result) @@ -986,8 +952,8 @@ def test_array_value(self): sub_value_pb2 = value_pb.array_value.values.add() sub_value_pb1.meaning = sub_value_pb2.meaning = meaning - sub_value_pb1.string_value = u'hi' - sub_value_pb2.string_value = u'bye' + sub_value_pb1.string_value = u"hi" + sub_value_pb2.string_value = u"bye" result = self._call_fut(value_pb, is_list=True) self.assertEqual(meaning, result) @@ -1003,8 +969,8 @@ def test_array_value_multiple_meanings(self): sub_value_pb1.meaning = meaning1 sub_value_pb2.meaning = meaning2 - sub_value_pb1.string_value = u'hi' - sub_value_pb2.string_value = u'bye' + sub_value_pb1.string_value = u"hi" + sub_value_pb2.string_value = u"bye" result = self._call_fut(value_pb, is_list=True) self.assertEqual(result, [meaning1, meaning2]) @@ -1018,15 +984,14 @@ def test_array_value_meaning_partially_unset(self): sub_value_pb2 = value_pb.array_value.values.add() sub_value_pb1.meaning = meaning1 - sub_value_pb1.string_value = u'hi' - sub_value_pb2.string_value = u'bye' + sub_value_pb1.string_value = u"hi" + sub_value_pb2.string_value = u"bye" result = self._call_fut(value_pb, is_list=True) self.assertEqual(result, [meaning1, None]) class TestGeoPoint(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.datastore.helpers import GeoPoint @@ -1072,7 +1037,7 @@ def test___ne__same_value(self): lng = 20.03 geo_pt1 = self._make_one(lat, lng) geo_pt2 = self._make_one(lat, lng) - comparison_val = (geo_pt1 != geo_pt2) + comparison_val = geo_pt1 != geo_pt2 self.assertFalse(comparison_val) def test___ne__(self): diff --git a/packages/google-cloud-datastore/tests/unit/test_key.py b/packages/google-cloud-datastore/tests/unit/test_key.py index db66be08e065..a157d50b576a 100644 --- a/packages/google-cloud-datastore/tests/unit/test_key.py +++ b/packages/google-cloud-datastore/tests/unit/test_key.py @@ -17,7 +17,7 @@ class TestKey(unittest.TestCase): - _DEFAULT_PROJECT = 'PROJECT' + _DEFAULT_PROJECT = "PROJECT" # NOTE: This comes directly from a running (in the dev appserver) # App Engine app. Created via: # @@ -27,17 +27,17 @@ class TestKey(unittest.TestCase): # namespace='space', app='s~sample-app') # urlsafe = key.urlsafe() _URLSAFE_EXAMPLE1 = ( - b'agxzfnNhbXBsZS1hcHByHgsSBlBhcmVudBg7DAsSBUNoaWxkIgdGZ' - b'WF0aGVyDKIBBXNwYWNl') - _URLSAFE_APP1 = 's~sample-app' - _URLSAFE_NAMESPACE1 = 'space' - _URLSAFE_FLAT_PATH1 = ('Parent', 59, 'Child', 'Feather') - _URLSAFE_EXAMPLE2 = b'agZzfmZpcmVyDwsSBEtpbmQiBVRoaW5nDA' - _URLSAFE_APP2 = 's~fire' - _URLSAFE_FLAT_PATH2 = ('Kind', 'Thing') - _URLSAFE_EXAMPLE3 = b'ahhzfnNhbXBsZS1hcHAtbm8tbG9jYXRpb25yCgsSBFpvcnAYWAw' - _URLSAFE_APP3 = 'sample-app-no-location' - _URLSAFE_FLAT_PATH3 = ('Zorp', 88) + b"agxzfnNhbXBsZS1hcHByHgsSBlBhcmVudBg7DAsSBUNoaWxkIgdGZ" b"WF0aGVyDKIBBXNwYWNl" + ) + _URLSAFE_APP1 = "s~sample-app" + _URLSAFE_NAMESPACE1 = "space" + _URLSAFE_FLAT_PATH1 = ("Parent", 59, "Child", "Feather") + _URLSAFE_EXAMPLE2 = b"agZzfmZpcmVyDwsSBEtpbmQiBVRoaW5nDA" + _URLSAFE_APP2 = "s~fire" + _URLSAFE_FLAT_PATH2 = ("Kind", "Thing") + _URLSAFE_EXAMPLE3 = b"ahhzfnNhbXBsZS1hcHAtbm8tbG9jYXRpb25yCgsSBFpvcnAYWAw" + _URLSAFE_APP3 = "sample-app-no-location" + _URLSAFE_FLAT_PATH3 = ("Zorp", 88) @staticmethod def _get_target_class(): @@ -53,26 +53,29 @@ def test_ctor_empty(self): def test_ctor_no_project(self): klass = self._get_target_class() - self.assertRaises(ValueError, klass, 'KIND') + self.assertRaises(ValueError, klass, "KIND") def test_ctor_w_explicit_project_empty_path(self): - _PROJECT = 'PROJECT' + _PROJECT = "PROJECT" self.assertRaises(ValueError, self._make_one, project=_PROJECT) def test_ctor_parent(self): - _PARENT_KIND = 'KIND1' + _PARENT_KIND = "KIND1" _PARENT_ID = 1234 - _PARENT_PROJECT = 'PROJECT-ALT' - _PARENT_NAMESPACE = 'NAMESPACE' - _CHILD_KIND = 'KIND2' + _PARENT_PROJECT = "PROJECT-ALT" + _PARENT_NAMESPACE = "NAMESPACE" + _CHILD_KIND = "KIND2" _CHILD_ID = 2345 _PATH = [ - {'kind': _PARENT_KIND, 'id': _PARENT_ID}, - {'kind': _CHILD_KIND, 'id': _CHILD_ID}, + {"kind": _PARENT_KIND, "id": _PARENT_ID}, + {"kind": _CHILD_KIND, "id": _CHILD_ID}, ] - parent_key = self._make_one(_PARENT_KIND, _PARENT_ID, - project=_PARENT_PROJECT, - namespace=_PARENT_NAMESPACE) + parent_key = self._make_one( + _PARENT_KIND, + _PARENT_ID, + project=_PARENT_PROJECT, + namespace=_PARENT_NAMESPACE, + ) key = self._make_one(_CHILD_KIND, _CHILD_ID, parent=parent_key) self.assertEqual(key.project, parent_key.project) self.assertEqual(key.namespace, parent_key.namespace) @@ -81,67 +84,80 @@ def test_ctor_parent(self): self.assertIs(key.parent, parent_key) def test_ctor_partial_parent(self): - parent_key = self._make_one('KIND', project=self._DEFAULT_PROJECT) + parent_key = self._make_one("KIND", project=self._DEFAULT_PROJECT) with self.assertRaises(ValueError): - self._make_one('KIND2', 1234, parent=parent_key) + self._make_one("KIND2", 1234, parent=parent_key) def test_ctor_parent_bad_type(self): with self.assertRaises(AttributeError): - self._make_one('KIND2', 1234, parent=('KIND1', 1234), - project=self._DEFAULT_PROJECT) + self._make_one( + "KIND2", 1234, parent=("KIND1", 1234), project=self._DEFAULT_PROJECT + ) def test_ctor_parent_bad_namespace(self): - parent_key = self._make_one('KIND', 1234, namespace='FOO', - project=self._DEFAULT_PROJECT) + parent_key = self._make_one( + "KIND", 1234, namespace="FOO", project=self._DEFAULT_PROJECT + ) with self.assertRaises(ValueError): - self._make_one('KIND2', 1234, namespace='BAR', parent=parent_key, - PROJECT=self._DEFAULT_PROJECT) + self._make_one( + "KIND2", + 1234, + namespace="BAR", + parent=parent_key, + PROJECT=self._DEFAULT_PROJECT, + ) def test_ctor_parent_bad_project(self): - parent_key = self._make_one('KIND', 1234, project='FOO') + parent_key = self._make_one("KIND", 1234, project="FOO") with self.assertRaises(ValueError): - self._make_one('KIND2', 1234, parent=parent_key, - project='BAR') + self._make_one("KIND2", 1234, parent=parent_key, project="BAR") def test_ctor_parent_empty_path(self): - parent_key = self._make_one('KIND', 1234, - project=self._DEFAULT_PROJECT) + parent_key = self._make_one("KIND", 1234, project=self._DEFAULT_PROJECT) with self.assertRaises(ValueError): self._make_one(parent=parent_key) def test_ctor_explicit(self): - _PROJECT = 'PROJECT-ALT' - _NAMESPACE = 'NAMESPACE' - _KIND = 'KIND' + _PROJECT = "PROJECT-ALT" + _NAMESPACE = "NAMESPACE" + _KIND = "KIND" _ID = 1234 - _PATH = [{'kind': _KIND, 'id': _ID}] - key = self._make_one(_KIND, _ID, namespace=_NAMESPACE, - project=_PROJECT) + _PATH = [{"kind": _KIND, "id": _ID}] + key = self._make_one(_KIND, _ID, namespace=_NAMESPACE, project=_PROJECT) self.assertEqual(key.project, _PROJECT) self.assertEqual(key.namespace, _NAMESPACE) self.assertEqual(key.kind, _KIND) self.assertEqual(key.path, _PATH) def test_ctor_bad_kind(self): - self.assertRaises(ValueError, self._make_one, object(), - project=self._DEFAULT_PROJECT) + self.assertRaises( + ValueError, self._make_one, object(), project=self._DEFAULT_PROJECT + ) def test_ctor_bad_id_or_name(self): - self.assertRaises(ValueError, self._make_one, 'KIND', object(), - project=self._DEFAULT_PROJECT) - self.assertRaises(ValueError, self._make_one, 'KIND', None, - project=self._DEFAULT_PROJECT) - self.assertRaises(ValueError, self._make_one, 'KIND', 10, 'KIND2', - None, project=self._DEFAULT_PROJECT) + self.assertRaises( + ValueError, self._make_one, "KIND", object(), project=self._DEFAULT_PROJECT + ) + self.assertRaises( + ValueError, self._make_one, "KIND", None, project=self._DEFAULT_PROJECT + ) + self.assertRaises( + ValueError, + self._make_one, + "KIND", + 10, + "KIND2", + None, + project=self._DEFAULT_PROJECT, + ) def test__clone(self): - _PROJECT = 'PROJECT-ALT' - _NAMESPACE = 'NAMESPACE' - _KIND = 'KIND' + _PROJECT = "PROJECT-ALT" + _NAMESPACE = "NAMESPACE" + _KIND = "KIND" _ID = 1234 - _PATH = [{'kind': _KIND, 'id': _ID}] - key = self._make_one(_KIND, _ID, namespace=_NAMESPACE, - project=_PROJECT) + _PATH = [{"kind": _KIND, "id": _ID}] + key = self._make_one(_KIND, _ID, namespace=_NAMESPACE, project=_PROJECT) clone = key._clone() self.assertEqual(clone.project, _PROJECT) self.assertEqual(clone.namespace, _NAMESPACE) @@ -149,16 +165,15 @@ def test__clone(self): self.assertEqual(clone.path, _PATH) def test__clone_with_parent(self): - _PROJECT = 'PROJECT-ALT' - _NAMESPACE = 'NAMESPACE' - _KIND1 = 'PARENT' - _KIND2 = 'KIND' + _PROJECT = "PROJECT-ALT" + _NAMESPACE = "NAMESPACE" + _KIND1 = "PARENT" + _KIND2 = "KIND" _ID1 = 1234 _ID2 = 2345 - _PATH = [{'kind': _KIND1, 'id': _ID1}, {'kind': _KIND2, 'id': _ID2}] + _PATH = [{"kind": _KIND1, "id": _ID1}, {"kind": _KIND2, "id": _ID2}] - parent = self._make_one(_KIND1, _ID1, namespace=_NAMESPACE, - project=_PROJECT) + parent = self._make_one(_KIND1, _ID1, namespace=_NAMESPACE, project=_PROJECT) key = self._make_one(_KIND2, _ID2, parent=parent) self.assertIs(key.parent, parent) clone = key._clone() @@ -168,24 +183,24 @@ def test__clone_with_parent(self): self.assertEqual(clone.path, _PATH) def test___eq_____ne___w_non_key(self): - _PROJECT = 'PROJECT' - _KIND = 'KIND' - _NAME = 'one' + _PROJECT = "PROJECT" + _KIND = "KIND" + _NAME = "one" key = self._make_one(_KIND, _NAME, project=_PROJECT) self.assertFalse(key == object()) self.assertTrue(key != object()) def test___eq_____ne___two_incomplete_keys_same_kind(self): - _PROJECT = 'PROJECT' - _KIND = 'KIND' + _PROJECT = "PROJECT" + _KIND = "KIND" key1 = self._make_one(_KIND, project=_PROJECT) key2 = self._make_one(_KIND, project=_PROJECT) self.assertFalse(key1 == key2) self.assertTrue(key1 != key2) def test___eq_____ne___incomplete_key_w_complete_key_same_kind(self): - _PROJECT = 'PROJECT' - _KIND = 'KIND' + _PROJECT = "PROJECT" + _KIND = "KIND" _ID = 1234 key1 = self._make_one(_KIND, project=_PROJECT) key2 = self._make_one(_KIND, _ID, project=_PROJECT) @@ -193,8 +208,8 @@ def test___eq_____ne___incomplete_key_w_complete_key_same_kind(self): self.assertTrue(key1 != key2) def test___eq_____ne___complete_key_w_incomplete_key_same_kind(self): - _PROJECT = 'PROJECT' - _KIND = 'KIND' + _PROJECT = "PROJECT" + _KIND = "KIND" _ID = 1234 key1 = self._make_one(_KIND, _ID, project=_PROJECT) key2 = self._make_one(_KIND, project=_PROJECT) @@ -202,8 +217,8 @@ def test___eq_____ne___complete_key_w_incomplete_key_same_kind(self): self.assertTrue(key1 != key2) def test___eq_____ne___same_kind_different_ids(self): - _PROJECT = 'PROJECT' - _KIND = 'KIND' + _PROJECT = "PROJECT" + _KIND = "KIND" _ID1 = 1234 _ID2 = 2345 key1 = self._make_one(_KIND, _ID1, project=_PROJECT) @@ -212,8 +227,8 @@ def test___eq_____ne___same_kind_different_ids(self): self.assertTrue(key1 != key2) def test___eq_____ne___same_kind_and_id(self): - _PROJECT = 'PROJECT' - _KIND = 'KIND' + _PROJECT = "PROJECT" + _KIND = "KIND" _ID = 1234 key1 = self._make_one(_KIND, _ID, project=_PROJECT) key2 = self._make_one(_KIND, _ID, project=_PROJECT) @@ -221,9 +236,9 @@ def test___eq_____ne___same_kind_and_id(self): self.assertFalse(key1 != key2) def test___eq_____ne___same_kind_and_id_different_project(self): - _PROJECT1 = 'PROJECT1' - _PROJECT2 = 'PROJECT2' - _KIND = 'KIND' + _PROJECT1 = "PROJECT1" + _PROJECT2 = "PROJECT2" + _KIND = "KIND" _ID = 1234 key1 = self._make_one(_KIND, _ID, project=_PROJECT1) key2 = self._make_one(_KIND, _ID, project=_PROJECT2) @@ -231,87 +246,82 @@ def test___eq_____ne___same_kind_and_id_different_project(self): self.assertTrue(key1 != key2) def test___eq_____ne___same_kind_and_id_different_namespace(self): - _PROJECT = 'PROJECT' - _NAMESPACE1 = 'NAMESPACE1' - _NAMESPACE2 = 'NAMESPACE2' - _KIND = 'KIND' + _PROJECT = "PROJECT" + _NAMESPACE1 = "NAMESPACE1" + _NAMESPACE2 = "NAMESPACE2" + _KIND = "KIND" _ID = 1234 - key1 = self._make_one(_KIND, _ID, project=_PROJECT, - namespace=_NAMESPACE1) - key2 = self._make_one(_KIND, _ID, project=_PROJECT, - namespace=_NAMESPACE2) + key1 = self._make_one(_KIND, _ID, project=_PROJECT, namespace=_NAMESPACE1) + key2 = self._make_one(_KIND, _ID, project=_PROJECT, namespace=_NAMESPACE2) self.assertFalse(key1 == key2) self.assertTrue(key1 != key2) def test___eq_____ne___same_kind_different_names(self): - _PROJECT = 'PROJECT' - _KIND = 'KIND' - _NAME1 = 'one' - _NAME2 = 'two' + _PROJECT = "PROJECT" + _KIND = "KIND" + _NAME1 = "one" + _NAME2 = "two" key1 = self._make_one(_KIND, _NAME1, project=_PROJECT) key2 = self._make_one(_KIND, _NAME2, project=_PROJECT) self.assertFalse(key1 == key2) self.assertTrue(key1 != key2) def test___eq_____ne___same_kind_and_name(self): - _PROJECT = 'PROJECT' - _KIND = 'KIND' - _NAME = 'one' + _PROJECT = "PROJECT" + _KIND = "KIND" + _NAME = "one" key1 = self._make_one(_KIND, _NAME, project=_PROJECT) key2 = self._make_one(_KIND, _NAME, project=_PROJECT) self.assertTrue(key1 == key2) self.assertFalse(key1 != key2) def test___eq_____ne___same_kind_and_name_different_project(self): - _PROJECT1 = 'PROJECT1' - _PROJECT2 = 'PROJECT2' - _KIND = 'KIND' - _NAME = 'one' + _PROJECT1 = "PROJECT1" + _PROJECT2 = "PROJECT2" + _KIND = "KIND" + _NAME = "one" key1 = self._make_one(_KIND, _NAME, project=_PROJECT1) key2 = self._make_one(_KIND, _NAME, project=_PROJECT2) self.assertFalse(key1 == key2) self.assertTrue(key1 != key2) def test___eq_____ne___same_kind_and_name_different_namespace(self): - _PROJECT = 'PROJECT' - _NAMESPACE1 = 'NAMESPACE1' - _NAMESPACE2 = 'NAMESPACE2' - _KIND = 'KIND' - _NAME = 'one' - key1 = self._make_one(_KIND, _NAME, project=_PROJECT, - namespace=_NAMESPACE1) - key2 = self._make_one(_KIND, _NAME, project=_PROJECT, - namespace=_NAMESPACE2) + _PROJECT = "PROJECT" + _NAMESPACE1 = "NAMESPACE1" + _NAMESPACE2 = "NAMESPACE2" + _KIND = "KIND" + _NAME = "one" + key1 = self._make_one(_KIND, _NAME, project=_PROJECT, namespace=_NAMESPACE1) + key2 = self._make_one(_KIND, _NAME, project=_PROJECT, namespace=_NAMESPACE2) self.assertFalse(key1 == key2) self.assertTrue(key1 != key2) def test___hash___incomplete(self): - _PROJECT = 'PROJECT' - _KIND = 'KIND' + _PROJECT = "PROJECT" + _KIND = "KIND" key = self._make_one(_KIND, project=_PROJECT) - self.assertNotEqual(hash(key), - hash(_KIND) + hash(_PROJECT) + hash(None)) + self.assertNotEqual(hash(key), hash(_KIND) + hash(_PROJECT) + hash(None)) def test___hash___completed_w_id(self): - _PROJECT = 'PROJECT' - _KIND = 'KIND' + _PROJECT = "PROJECT" + _KIND = "KIND" _ID = 1234 key = self._make_one(_KIND, _ID, project=_PROJECT) - self.assertNotEqual(hash(key), - hash(_KIND) + hash(_ID) + - hash(_PROJECT) + hash(None)) + self.assertNotEqual( + hash(key), hash(_KIND) + hash(_ID) + hash(_PROJECT) + hash(None) + ) def test___hash___completed_w_name(self): - _PROJECT = 'PROJECT' - _KIND = 'KIND' - _NAME = 'NAME' + _PROJECT = "PROJECT" + _KIND = "KIND" + _NAME = "NAME" key = self._make_one(_KIND, _NAME, project=_PROJECT) - self.assertNotEqual(hash(key), - hash(_KIND) + hash(_NAME) + - hash(_PROJECT) + hash(None)) + self.assertNotEqual( + hash(key), hash(_KIND) + hash(_NAME) + hash(_PROJECT) + hash(None) + ) def test_completed_key_on_partial_w_id(self): - key = self._make_one('KIND', project=self._DEFAULT_PROJECT) + key = self._make_one("KIND", project=self._DEFAULT_PROJECT) _ID = 1234 new_key = key.completed_key(_ID) self.assertIsNot(key, new_key) @@ -319,25 +329,25 @@ def test_completed_key_on_partial_w_id(self): self.assertIsNone(new_key.name) def test_completed_key_on_partial_w_name(self): - key = self._make_one('KIND', project=self._DEFAULT_PROJECT) - _NAME = 'NAME' + key = self._make_one("KIND", project=self._DEFAULT_PROJECT) + _NAME = "NAME" new_key = key.completed_key(_NAME) self.assertIsNot(key, new_key) self.assertIsNone(new_key.id) self.assertEqual(new_key.name, _NAME) def test_completed_key_on_partial_w_invalid(self): - key = self._make_one('KIND', project=self._DEFAULT_PROJECT) + key = self._make_one("KIND", project=self._DEFAULT_PROJECT) self.assertRaises(ValueError, key.completed_key, object()) def test_completed_key_on_complete(self): - key = self._make_one('KIND', 1234, project=self._DEFAULT_PROJECT) + key = self._make_one("KIND", 1234, project=self._DEFAULT_PROJECT) self.assertRaises(ValueError, key.completed_key, 5678) def test_to_protobuf_defaults(self): from google.cloud.datastore_v1.proto import entity_pb2 - _KIND = 'KIND' + _KIND = "KIND" key = self._make_one(_KIND, project=self._DEFAULT_PROJECT) pb = key.to_protobuf() self.assertIsInstance(pb, entity_pb2.Key) @@ -345,36 +355,36 @@ def test_to_protobuf_defaults(self): # Check partition ID. self.assertEqual(pb.partition_id.project_id, self._DEFAULT_PROJECT) # Unset values are False-y. - self.assertEqual(pb.partition_id.namespace_id, '') + self.assertEqual(pb.partition_id.namespace_id, "") # Check the element PB matches the partial key and kind. elem, = list(pb.path) self.assertEqual(elem.kind, _KIND) # Unset values are False-y. - self.assertEqual(elem.name, '') + self.assertEqual(elem.name, "") # Unset values are False-y. self.assertEqual(elem.id, 0) def test_to_protobuf_w_explicit_project(self): - _PROJECT = 'PROJECT-ALT' - key = self._make_one('KIND', project=_PROJECT) + _PROJECT = "PROJECT-ALT" + key = self._make_one("KIND", project=_PROJECT) pb = key.to_protobuf() self.assertEqual(pb.partition_id.project_id, _PROJECT) def test_to_protobuf_w_explicit_namespace(self): - _NAMESPACE = 'NAMESPACE' - key = self._make_one('KIND', namespace=_NAMESPACE, - project=self._DEFAULT_PROJECT) + _NAMESPACE = "NAMESPACE" + key = self._make_one( + "KIND", namespace=_NAMESPACE, project=self._DEFAULT_PROJECT + ) pb = key.to_protobuf() self.assertEqual(pb.partition_id.namespace_id, _NAMESPACE) def test_to_protobuf_w_explicit_path(self): - _PARENT = 'PARENT' - _CHILD = 'CHILD' + _PARENT = "PARENT" + _CHILD = "CHILD" _ID = 1234 - _NAME = 'NAME' - key = self._make_one(_PARENT, _NAME, _CHILD, _ID, - project=self._DEFAULT_PROJECT) + _NAME = "NAME" + key = self._make_one(_PARENT, _NAME, _CHILD, _ID, project=self._DEFAULT_PROJECT) pb = key.to_protobuf() elems = list(pb.path) self.assertEqual(len(elems), 2) @@ -384,27 +394,26 @@ def test_to_protobuf_w_explicit_path(self): self.assertEqual(elems[1].id, _ID) def test_to_protobuf_w_no_kind(self): - key = self._make_one('KIND', project=self._DEFAULT_PROJECT) + key = self._make_one("KIND", project=self._DEFAULT_PROJECT) # Force the 'kind' to be unset. Maybe `to_protobuf` should fail # on this? The backend certainly will. - key._path[-1].pop('kind') + key._path[-1].pop("kind") pb = key.to_protobuf() # Unset values are False-y. - self.assertEqual(pb.path[0].kind, '') + self.assertEqual(pb.path[0].kind, "") def test_to_legacy_urlsafe(self): key = self._make_one( *self._URLSAFE_FLAT_PATH1, project=self._URLSAFE_APP1, - namespace=self._URLSAFE_NAMESPACE1) + namespace=self._URLSAFE_NAMESPACE1 + ) # NOTE: ``key.project`` is somewhat "invalid" but that is OK. urlsafe = key.to_legacy_urlsafe() self.assertEqual(urlsafe, self._URLSAFE_EXAMPLE1) def test_to_legacy_urlsafe_strip_padding(self): - key = self._make_one( - *self._URLSAFE_FLAT_PATH2, - project=self._URLSAFE_APP2) + key = self._make_one(*self._URLSAFE_FLAT_PATH2, project=self._URLSAFE_APP2) # NOTE: ``key.project`` is somewhat "invalid" but that is OK. urlsafe = key.to_legacy_urlsafe() self.assertEqual(urlsafe, self._URLSAFE_EXAMPLE2) @@ -412,17 +421,15 @@ def test_to_legacy_urlsafe_strip_padding(self): self.assertNotEqual(len(self._URLSAFE_EXAMPLE2) % 4, 0) def test_to_legacy_urlsafe_with_location_prefix(self): - key = self._make_one( - *self._URLSAFE_FLAT_PATH3, - project=self._URLSAFE_APP3) - urlsafe = key.to_legacy_urlsafe(location_prefix='s~') + key = self._make_one(*self._URLSAFE_FLAT_PATH3, project=self._URLSAFE_APP3) + urlsafe = key.to_legacy_urlsafe(location_prefix="s~") self.assertEqual(urlsafe, self._URLSAFE_EXAMPLE3) def test_from_legacy_urlsafe(self): klass = self._get_target_class() key = klass.from_legacy_urlsafe(self._URLSAFE_EXAMPLE1) - self.assertEqual('s~' + key.project, self._URLSAFE_APP1) + self.assertEqual("s~" + key.project, self._URLSAFE_APP1) self.assertEqual(key.namespace, self._URLSAFE_NAMESPACE1) self.assertEqual(key.flat_path, self._URLSAFE_FLAT_PATH1) # Also make sure we didn't accidentally set the parent. @@ -436,7 +443,7 @@ def test_from_legacy_urlsafe_needs_padding(self): self.assertNotEqual(len(self._URLSAFE_EXAMPLE2) % 4, 0) key = klass.from_legacy_urlsafe(self._URLSAFE_EXAMPLE2) - self.assertEqual('s~' + key.project, self._URLSAFE_APP2) + self.assertEqual("s~" + key.project, self._URLSAFE_APP2) self.assertIsNone(key.namespace) self.assertEqual(key.flat_path, self._URLSAFE_FLAT_PATH2) @@ -450,60 +457,61 @@ def test_from_legacy_urlsafe_with_location_prefix(self): self.assertEqual(key.flat_path, self._URLSAFE_FLAT_PATH3) def test_is_partial_no_name_or_id(self): - key = self._make_one('KIND', project=self._DEFAULT_PROJECT) + key = self._make_one("KIND", project=self._DEFAULT_PROJECT) self.assertTrue(key.is_partial) def test_is_partial_w_id(self): _ID = 1234 - key = self._make_one('KIND', _ID, project=self._DEFAULT_PROJECT) + key = self._make_one("KIND", _ID, project=self._DEFAULT_PROJECT) self.assertFalse(key.is_partial) def test_is_partial_w_name(self): - _NAME = 'NAME' - key = self._make_one('KIND', _NAME, project=self._DEFAULT_PROJECT) + _NAME = "NAME" + key = self._make_one("KIND", _NAME, project=self._DEFAULT_PROJECT) self.assertFalse(key.is_partial) def test_id_or_name_no_name_or_id(self): - key = self._make_one('KIND', project=self._DEFAULT_PROJECT) + key = self._make_one("KIND", project=self._DEFAULT_PROJECT) self.assertIsNone(key.id_or_name) def test_id_or_name_no_name_or_id_child(self): - key = self._make_one('KIND1', 1234, 'KIND2', - project=self._DEFAULT_PROJECT) + key = self._make_one("KIND1", 1234, "KIND2", project=self._DEFAULT_PROJECT) self.assertIsNone(key.id_or_name) def test_id_or_name_w_id_only(self): _ID = 1234 - key = self._make_one('KIND', _ID, project=self._DEFAULT_PROJECT) + key = self._make_one("KIND", _ID, project=self._DEFAULT_PROJECT) self.assertEqual(key.id_or_name, _ID) def test_id_or_name_w_name_only(self): - _NAME = 'NAME' - key = self._make_one('KIND', _NAME, project=self._DEFAULT_PROJECT) + _NAME = "NAME" + key = self._make_one("KIND", _NAME, project=self._DEFAULT_PROJECT) self.assertEqual(key.id_or_name, _NAME) def test_parent_default(self): - key = self._make_one('KIND', project=self._DEFAULT_PROJECT) + key = self._make_one("KIND", project=self._DEFAULT_PROJECT) self.assertIsNone(key.parent) def test_parent_explicit_top_level(self): - key = self._make_one('KIND', 1234, project=self._DEFAULT_PROJECT) + key = self._make_one("KIND", 1234, project=self._DEFAULT_PROJECT) self.assertIsNone(key.parent) def test_parent_explicit_nested(self): - _PARENT_KIND = 'KIND1' + _PARENT_KIND = "KIND1" _PARENT_ID = 1234 - _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] - key = self._make_one(_PARENT_KIND, _PARENT_ID, 'KIND2', - project=self._DEFAULT_PROJECT) + _PARENT_PATH = [{"kind": _PARENT_KIND, "id": _PARENT_ID}] + key = self._make_one( + _PARENT_KIND, _PARENT_ID, "KIND2", project=self._DEFAULT_PROJECT + ) self.assertEqual(key.parent.path, _PARENT_PATH) def test_parent_multiple_calls(self): - _PARENT_KIND = 'KIND1' + _PARENT_KIND = "KIND1" _PARENT_ID = 1234 - _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] - key = self._make_one(_PARENT_KIND, _PARENT_ID, 'KIND2', - project=self._DEFAULT_PROJECT) + _PARENT_PATH = [{"kind": _PARENT_KIND, "id": _PARENT_ID}] + key = self._make_one( + _PARENT_KIND, _PARENT_ID, "KIND2", project=self._DEFAULT_PROJECT + ) parent = key.parent self.assertEqual(parent.path, _PARENT_PATH) new_parent = key.parent @@ -512,7 +520,7 @@ def test_parent_multiple_calls(self): class Test__clean_app(unittest.TestCase): - PROJECT = 'my-prahjekt' + PROJECT = "my-prahjekt" @staticmethod def _call_fut(app_str): @@ -525,20 +533,19 @@ def test_already_clean(self): self.assertEqual(self._call_fut(app_str), self.PROJECT) def test_standard(self): - app_str = 's~' + self.PROJECT + app_str = "s~" + self.PROJECT self.assertEqual(self._call_fut(app_str), self.PROJECT) def test_european(self): - app_str = 'e~' + self.PROJECT + app_str = "e~" + self.PROJECT self.assertEqual(self._call_fut(app_str), self.PROJECT) def test_dev_server(self): - app_str = 'dev~' + self.PROJECT + app_str = "dev~" + self.PROJECT self.assertEqual(self._call_fut(app_str), self.PROJECT) class Test__get_empty(unittest.TestCase): - @staticmethod def _call_fut(value, empty_value): from google.cloud.datastore.key import _get_empty @@ -546,24 +553,18 @@ def _call_fut(value, empty_value): return _get_empty(value, empty_value) def test_unset(self): - for empty_value in (u'', 0, 0.0, []): + for empty_value in (u"", 0, 0.0, []): ret_val = self._call_fut(empty_value, empty_value) self.assertIsNone(ret_val) def test_actually_set(self): - value_pairs = ( - (u'hello', u''), - (10, 0), - (3.14, 0.0), - (['stuff', 'here'], []), - ) + value_pairs = ((u"hello", u""), (10, 0), (3.14, 0.0), (["stuff", "here"], [])) for value, empty_value in value_pairs: ret_val = self._call_fut(value, empty_value) self.assertIs(ret_val, value) class Test__check_database_id(unittest.TestCase): - @staticmethod def _call_fut(database_id): from google.cloud.datastore.key import _check_database_id @@ -571,17 +572,16 @@ def _call_fut(database_id): return _check_database_id(database_id) def test_empty_value(self): - ret_val = self._call_fut(u'') + ret_val = self._call_fut(u"") # Really we are just happy there was no exception. self.assertIsNone(ret_val) def test_failure(self): with self.assertRaises(ValueError): - self._call_fut(u'some-database-id') + self._call_fut(u"some-database-id") class Test__add_id_or_name(unittest.TestCase): - @staticmethod def _call_fut(flat_path, element_pb, empty_allowed): from google.cloud.datastore.key import _add_id_or_name @@ -602,7 +602,7 @@ def test_add_id(self): def test_add_name(self): flat_path = [] - name = 'moon-shadow' + name = "moon-shadow" element_pb = _make_element_pb(name=name) ret_val = self._call_fut(flat_path, element_pb, False) @@ -613,7 +613,7 @@ def test_add_name(self): self.assertEqual(flat_path, [name, name]) def test_both_present(self): - element_pb = _make_element_pb(id=17, name='seventeen') + element_pb = _make_element_pb(id=17, name="seventeen") flat_path = [] with self.assertRaises(ValueError): self._call_fut(flat_path, element_pb, False) @@ -639,7 +639,6 @@ def test_both_empty_allowed(self): class Test__get_flat_path(unittest.TestCase): - @staticmethod def _call_fut(path_pb): from google.cloud.datastore.key import _get_flat_path @@ -647,20 +646,20 @@ def _call_fut(path_pb): return _get_flat_path(path_pb) def test_one_pair(self): - kind = 'Widget' - name = 'Scooter' + kind = "Widget" + name = "Scooter" element_pb = _make_element_pb(type=kind, name=name) path_pb = _make_path_pb(element_pb) flat_path = self._call_fut(path_pb) self.assertEqual(flat_path, (kind, name)) def test_two_pairs(self): - kind1 = 'parent' + kind1 = "parent" id1 = 59 element_pb1 = _make_element_pb(type=kind1, id=id1) - kind2 = 'child' - name2 = 'naem' + kind2 = "child" + name2 = "naem" element_pb2 = _make_element_pb(type=kind2, name=name2) path_pb = _make_path_pb(element_pb1, element_pb2) @@ -668,15 +667,15 @@ def test_two_pairs(self): self.assertEqual(flat_path, (kind1, id1, kind2, name2)) def test_partial_key(self): - kind1 = 'grandparent' - name1 = 'cats' + kind1 = "grandparent" + name1 = "cats" element_pb1 = _make_element_pb(type=kind1, name=name1) - kind2 = 'parent' + kind2 = "parent" id2 = 1337 element_pb2 = _make_element_pb(type=kind2, id=id2) - kind3 = 'child' + kind3 = "child" element_pb3 = _make_element_pb(type=kind3) path_pb = _make_path_pb(element_pb1, element_pb2, element_pb3) @@ -685,7 +684,6 @@ def test_partial_key(self): class Test__to_legacy_path(unittest.TestCase): - @staticmethod def _call_fut(dict_path): from google.cloud.datastore.key import _to_legacy_path @@ -693,9 +691,9 @@ def _call_fut(dict_path): return _to_legacy_path(dict_path) def test_one_pair(self): - kind = 'Widget' - name = 'Scooter' - dict_path = [{'kind': kind, 'name': name}] + kind = "Widget" + name = "Scooter" + dict_path = [{"kind": kind, "name": name}] path_pb = self._call_fut(dict_path) element_pb = _make_element_pb(type=kind, name=name) @@ -703,14 +701,13 @@ def test_one_pair(self): self.assertEqual(path_pb, expected_pb) def test_two_pairs(self): - kind1 = 'parent' + kind1 = "parent" id1 = 59 - kind2 = 'child' - name2 = 'naem' + kind2 = "child" + name2 = "naem" - dict_path = [ - {'kind': kind1, 'id': id1}, {'kind': kind2, 'name': name2}] + dict_path = [{"kind": kind1, "id": id1}, {"kind": kind2, "name": name2}] path_pb = self._call_fut(dict_path) element_pb1 = _make_element_pb(type=kind1, id=id1) @@ -719,18 +716,18 @@ def test_two_pairs(self): self.assertEqual(path_pb, expected_pb) def test_partial_key(self): - kind1 = 'grandparent' - name1 = 'cats' + kind1 = "grandparent" + name1 = "cats" - kind2 = 'parent' + kind2 = "parent" id2 = 1337 - kind3 = 'child' + kind3 = "child" dict_path = [ - {'kind': kind1, 'name': name1}, - {'kind': kind2, 'id': id2}, - {'kind': kind3}, + {"kind": kind1, "name": name1}, + {"kind": kind2, "id": id2}, + {"kind": kind3}, ] path_pb = self._call_fut(dict_path) diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 8689bbb1e745..01a005f4eb78 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -19,7 +19,7 @@ class TestQuery(unittest.TestCase): - _PROJECT = 'PROJECT' + _PROJECT = "PROJECT" @staticmethod def _get_target_class(): @@ -49,15 +49,15 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): from google.cloud.datastore.key import Key - _PROJECT = 'OTHER_PROJECT' - _KIND = 'KIND' - _NAMESPACE = 'OTHER_NAMESPACE' + _PROJECT = "OTHER_PROJECT" + _KIND = "KIND" + _NAMESPACE = "OTHER_NAMESPACE" client = self._make_client() - ancestor = Key('ANCESTOR', 123, project=_PROJECT) - FILTERS = [('foo', '=', 'Qux'), ('bar', '<', 17)] - PROJECTION = ['foo', 'bar', 'baz'] - ORDER = ['foo', 'bar'] - DISTINCT_ON = ['foo'] + ancestor = Key("ANCESTOR", 123, project=_PROJECT) + FILTERS = [("foo", "=", "Qux"), ("bar", "<", 17)] + PROJECTION = ["foo", "bar", "baz"] + ORDER = ["foo", "bar"] + DISTINCT_ON = ["foo"] query = self._make_one( client, kind=_KIND, @@ -81,23 +81,27 @@ def test_ctor_explicit(self): def test_ctor_bad_projection(self): BAD_PROJECTION = object() - self.assertRaises(TypeError, self._make_one, self._make_client(), - projection=BAD_PROJECTION) + self.assertRaises( + TypeError, self._make_one, self._make_client(), projection=BAD_PROJECTION + ) def test_ctor_bad_order(self): BAD_ORDER = object() - self.assertRaises(TypeError, self._make_one, self._make_client(), - order=BAD_ORDER) + self.assertRaises( + TypeError, self._make_one, self._make_client(), order=BAD_ORDER + ) def test_ctor_bad_distinct_on(self): BAD_DISTINCT_ON = object() - self.assertRaises(TypeError, self._make_one, self._make_client(), - distinct_on=BAD_DISTINCT_ON) + self.assertRaises( + TypeError, self._make_one, self._make_client(), distinct_on=BAD_DISTINCT_ON + ) def test_ctor_bad_filters(self): - FILTERS_CANT_UNPACK = [('one', 'two')] - self.assertRaises(ValueError, self._make_one, self._make_client(), - filters=FILTERS_CANT_UNPACK) + FILTERS_CANT_UNPACK = [("one", "two")] + self.assertRaises( + ValueError, self._make_one, self._make_client(), filters=FILTERS_CANT_UNPACK + ) def test_namespace_setter_w_non_string(self): query = self._make_one(self._make_client()) @@ -108,7 +112,7 @@ def _assign(val): self.assertRaises(ValueError, _assign, object()) def test_namespace_setter(self): - _NAMESPACE = 'OTHER_NAMESPACE' + _NAMESPACE = "OTHER_NAMESPACE" query = self._make_one(self._make_client()) query.namespace = _NAMESPACE self.assertEqual(query.namespace, _NAMESPACE) @@ -122,14 +126,14 @@ def _assign(val): self.assertRaises(TypeError, _assign, object()) def test_kind_setter_wo_existing(self): - _KIND = 'KIND' + _KIND = "KIND" query = self._make_one(self._make_client()) query.kind = _KIND self.assertEqual(query.kind, _KIND) def test_kind_setter_w_existing(self): - _KIND_BEFORE = 'KIND_BEFORE' - _KIND_AFTER = 'KIND_AFTER' + _KIND_BEFORE = "KIND_BEFORE" + _KIND_AFTER = "KIND_AFTER" query = self._make_one(self._make_client(), kind=_KIND_BEFORE) self.assertEqual(query.kind, _KIND_BEFORE) query.kind = _KIND_AFTER @@ -143,85 +147,84 @@ def _assign(val): query.ancestor = val self.assertRaises(TypeError, _assign, object()) - self.assertRaises(TypeError, _assign, ['KIND', 'NAME']) + self.assertRaises(TypeError, _assign, ["KIND", "NAME"]) def test_ancestor_setter_w_key(self): from google.cloud.datastore.key import Key - _NAME = u'NAME' - key = Key('KIND', 123, project=self._PROJECT) + _NAME = u"NAME" + key = Key("KIND", 123, project=self._PROJECT) query = self._make_one(self._make_client()) - query.add_filter('name', '=', _NAME) + query.add_filter("name", "=", _NAME) query.ancestor = key self.assertEqual(query.ancestor.path, key.path) def test_ancestor_deleter_w_key(self): from google.cloud.datastore.key import Key - key = Key('KIND', 123, project=self._PROJECT) + key = Key("KIND", 123, project=self._PROJECT) query = self._make_one(client=self._make_client(), ancestor=key) del query.ancestor self.assertIsNone(query.ancestor) def test_add_filter_setter_w_unknown_operator(self): query = self._make_one(self._make_client()) - self.assertRaises(ValueError, query.add_filter, - 'firstname', '~~', 'John') + self.assertRaises(ValueError, query.add_filter, "firstname", "~~", "John") def test_add_filter_w_known_operator(self): query = self._make_one(self._make_client()) - query.add_filter('firstname', '=', u'John') - self.assertEqual(query.filters, [('firstname', '=', u'John')]) + query.add_filter("firstname", "=", u"John") + self.assertEqual(query.filters, [("firstname", "=", u"John")]) def test_add_filter_w_all_operators(self): query = self._make_one(self._make_client()) - query.add_filter('leq_prop', '<=', u'val1') - query.add_filter('geq_prop', '>=', u'val2') - query.add_filter('lt_prop', '<', u'val3') - query.add_filter('gt_prop', '>', u'val4') - query.add_filter('eq_prop', '=', u'val5') + query.add_filter("leq_prop", "<=", u"val1") + query.add_filter("geq_prop", ">=", u"val2") + query.add_filter("lt_prop", "<", u"val3") + query.add_filter("gt_prop", ">", u"val4") + query.add_filter("eq_prop", "=", u"val5") self.assertEqual(len(query.filters), 5) - self.assertEqual(query.filters[0], ('leq_prop', '<=', u'val1')) - self.assertEqual(query.filters[1], ('geq_prop', '>=', u'val2')) - self.assertEqual(query.filters[2], ('lt_prop', '<', u'val3')) - self.assertEqual(query.filters[3], ('gt_prop', '>', u'val4')) - self.assertEqual(query.filters[4], ('eq_prop', '=', u'val5')) + self.assertEqual(query.filters[0], ("leq_prop", "<=", u"val1")) + self.assertEqual(query.filters[1], ("geq_prop", ">=", u"val2")) + self.assertEqual(query.filters[2], ("lt_prop", "<", u"val3")) + self.assertEqual(query.filters[3], ("gt_prop", ">", u"val4")) + self.assertEqual(query.filters[4], ("eq_prop", "=", u"val5")) def test_add_filter_w_known_operator_and_entity(self): from google.cloud.datastore.entity import Entity query = self._make_one(self._make_client()) other = Entity() - other['firstname'] = u'John' - other['lastname'] = u'Smith' - query.add_filter('other', '=', other) - self.assertEqual(query.filters, [('other', '=', other)]) + other["firstname"] = u"John" + other["lastname"] = u"Smith" + query.add_filter("other", "=", other) + self.assertEqual(query.filters, [("other", "=", other)]) def test_add_filter_w_whitespace_property_name(self): query = self._make_one(self._make_client()) - PROPERTY_NAME = ' property with lots of space ' - query.add_filter(PROPERTY_NAME, '=', u'John') - self.assertEqual(query.filters, [(PROPERTY_NAME, '=', u'John')]) + PROPERTY_NAME = " property with lots of space " + query.add_filter(PROPERTY_NAME, "=", u"John") + self.assertEqual(query.filters, [(PROPERTY_NAME, "=", u"John")]) def test_add_filter___key__valid_key(self): from google.cloud.datastore.key import Key query = self._make_one(self._make_client()) - key = Key('Foo', project=self._PROJECT) - query.add_filter('__key__', '=', key) - self.assertEqual(query.filters, [('__key__', '=', key)]) + key = Key("Foo", project=self._PROJECT) + query.add_filter("__key__", "=", key) + self.assertEqual(query.filters, [("__key__", "=", key)]) def test_filter___key__not_equal_operator(self): from google.cloud.datastore.key import Key - key = Key('Foo', project=self._PROJECT) + key = Key("Foo", project=self._PROJECT) query = self._make_one(self._make_client()) - query.add_filter('__key__', '<', key) - self.assertEqual(query.filters, [('__key__', '<', key)]) + query.add_filter("__key__", "<", key) + self.assertEqual(query.filters, [("__key__", "<", key)]) def test_filter___key__invalid_value(self): query = self._make_one(self._make_client()) - self.assertRaises(ValueError, query.add_filter, '__key__', '=', None) + self.assertRaises(ValueError, query.add_filter, "__key__", "=", None) def test_projection_setter_empty(self): query = self._make_one(self._make_client()) @@ -230,17 +233,17 @@ def test_projection_setter_empty(self): def test_projection_setter_string(self): query = self._make_one(self._make_client()) - query.projection = 'field1' - self.assertEqual(query.projection, ['field1']) + query.projection = "field1" + self.assertEqual(query.projection, ["field1"]) def test_projection_setter_non_empty(self): query = self._make_one(self._make_client()) - query.projection = ['field1', 'field2'] - self.assertEqual(query.projection, ['field1', 'field2']) + query.projection = ["field1", "field2"] + self.assertEqual(query.projection, ["field1", "field2"]) def test_projection_setter_multiple_calls(self): - _PROJECTION1 = ['field1', 'field2'] - _PROJECTION2 = ['field3'] + _PROJECTION1 = ["field1", "field2"] + _PROJECTION2 = ["field3"] query = self._make_one(self._make_client()) query.projection = _PROJECTION1 self.assertEqual(query.projection, _PROJECTION1) @@ -250,7 +253,7 @@ def test_projection_setter_multiple_calls(self): def test_keys_only(self): query = self._make_one(self._make_client()) query.keys_only() - self.assertEqual(query.projection, ['__key__']) + self.assertEqual(query.projection, ["__key__"]) def test_key_filter_defaults(self): from google.cloud.datastore.key import Key @@ -258,9 +261,9 @@ def test_key_filter_defaults(self): client = self._make_client() query = self._make_one(client) self.assertEqual(query.filters, []) - key = Key('Kind', 1234, project='project') + key = Key("Kind", 1234, project="project") query.key_filter(key) - self.assertEqual(query.filters, [('__key__', '=', key)]) + self.assertEqual(query.filters, [("__key__", "=", key)]) def test_key_filter_explicit(self): from google.cloud.datastore.key import Key @@ -268,48 +271,48 @@ def test_key_filter_explicit(self): client = self._make_client() query = self._make_one(client) self.assertEqual(query.filters, []) - key = Key('Kind', 1234, project='project') - query.key_filter(key, operator='>') - self.assertEqual(query.filters, [('__key__', '>', key)]) + key = Key("Kind", 1234, project="project") + query.key_filter(key, operator=">") + self.assertEqual(query.filters, [("__key__", ">", key)]) def test_order_setter_empty(self): - query = self._make_one(self._make_client(), order=['foo', '-bar']) + query = self._make_one(self._make_client(), order=["foo", "-bar"]) query.order = [] self.assertEqual(query.order, []) def test_order_setter_string(self): query = self._make_one(self._make_client()) - query.order = 'field' - self.assertEqual(query.order, ['field']) + query.order = "field" + self.assertEqual(query.order, ["field"]) def test_order_setter_single_item_list_desc(self): query = self._make_one(self._make_client()) - query.order = ['-field'] - self.assertEqual(query.order, ['-field']) + query.order = ["-field"] + self.assertEqual(query.order, ["-field"]) def test_order_setter_multiple(self): query = self._make_one(self._make_client()) - query.order = ['foo', '-bar'] - self.assertEqual(query.order, ['foo', '-bar']) + query.order = ["foo", "-bar"] + self.assertEqual(query.order, ["foo", "-bar"]) def test_distinct_on_setter_empty(self): - query = self._make_one(self._make_client(), distinct_on=['foo', 'bar']) + query = self._make_one(self._make_client(), distinct_on=["foo", "bar"]) query.distinct_on = [] self.assertEqual(query.distinct_on, []) def test_distinct_on_setter_string(self): query = self._make_one(self._make_client()) - query.distinct_on = 'field1' - self.assertEqual(query.distinct_on, ['field1']) + query.distinct_on = "field1" + self.assertEqual(query.distinct_on, ["field1"]) def test_distinct_on_setter_non_empty(self): query = self._make_one(self._make_client()) - query.distinct_on = ['field1', 'field2'] - self.assertEqual(query.distinct_on, ['field1', 'field2']) + query.distinct_on = ["field1", "field2"] + self.assertEqual(query.distinct_on, ["field1", "field2"]) def test_distinct_on_multiple_calls(self): - _DISTINCT_ON1 = ['field1', 'field2'] - _DISTINCT_ON2 = ['field3'] + _DISTINCT_ON1 = ["field1", "field2"] + _DISTINCT_ON2 = ["field3"] query = self._make_one(self._make_client()) query.distinct_on = _DISTINCT_ON1 self.assertEqual(query.distinct_on, _DISTINCT_ON1) @@ -344,7 +347,6 @@ def test_fetch_w_explicit_client(self): class TestIterator(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.datastore.query import Iterator @@ -363,7 +365,7 @@ def test_constructor_defaults(self): self.assertIs(iterator.client, client) self.assertIsNone(iterator.max_results) self.assertEqual(iterator.page_number, 0) - self.assertIsNone(iterator.next_page_token,) + self.assertIsNone(iterator.next_page_token) self.assertEqual(iterator.num_results, 0) self.assertIs(iterator._query, query) self.assertIsNone(iterator._offset) @@ -375,11 +377,16 @@ def test_constructor_explicit(self): client = object() limit = 43 offset = 9 - start_cursor = b'8290\xff' - end_cursor = b'so20rc\ta' + start_cursor = b"8290\xff" + end_cursor = b"so20rc\ta" iterator = self._make_one( - query, client, limit=limit, offset=offset, - start_cursor=start_cursor, end_cursor=end_cursor) + query, + client, + limit=limit, + offset=offset, + start_cursor=start_cursor, + end_cursor=end_cursor, + ) self.assertFalse(iterator._started) self.assertIs(iterator.client, client) @@ -414,26 +421,19 @@ def test__build_protobuf_all_values_except_offset(self): client = _Client(None) query = Query(client) limit = 15 - start_bytes = b'i\xb7\x1d' - start_cursor = 'abcd' - end_bytes = b'\xc3\x1c\xb3' - end_cursor = 'wxyz' + start_bytes = b"i\xb7\x1d" + start_cursor = "abcd" + end_bytes = b"\xc3\x1c\xb3" + end_cursor = "wxyz" iterator = self._make_one( - query, - client, - limit=limit, - start_cursor=start_cursor, - end_cursor=end_cursor + query, client, limit=limit, start_cursor=start_cursor, end_cursor=end_cursor ) self.assertEqual(iterator.max_results, limit) iterator.num_results = 4 iterator._skipped_results = 1 pb = iterator._build_protobuf() - expected_pb = query_pb2.Query( - start_cursor=start_bytes, - end_cursor=end_bytes, - ) + expected_pb = query_pb2.Query(start_cursor=start_bytes, end_cursor=end_bytes) expected_pb.limit.value = limit - iterator.num_results self.assertEqual(pb, expected_pb) @@ -448,38 +448,29 @@ def test__build_protobuf_all_values_except_start_and_end_cursor(self): query = Query(client) limit = 15 offset = 9 - iterator = self._make_one( - query, - client, - limit=limit, - offset=offset, - ) + iterator = self._make_one(query, client, limit=limit, offset=offset) self.assertEqual(iterator.max_results, limit) iterator.num_results = 4 pb = iterator._build_protobuf() - expected_pb = query_pb2.Query( - offset=offset - iterator._skipped_results, - ) + expected_pb = query_pb2.Query(offset=offset - iterator._skipped_results) expected_pb.limit.value = limit - iterator.num_results self.assertEqual(pb, expected_pb) def test__process_query_results(self): from google.cloud.datastore_v1.proto import query_pb2 - iterator = self._make_one(None, None, - end_cursor='abcd') + iterator = self._make_one(None, None, end_cursor="abcd") self.assertIsNotNone(iterator._end_cursor) - entity_pbs = [ - _make_entity('Hello', 9998, 'PRAHJEKT'), - ] - cursor_as_bytes = b'\x9ai\xe7' - cursor = b'mmnn' + entity_pbs = [_make_entity("Hello", 9998, "PRAHJEKT")] + cursor_as_bytes = b"\x9ai\xe7" + cursor = b"mmnn" skipped_results = 4 more_results_enum = query_pb2.QueryResultBatch.NOT_FINISHED response_pb = _make_query_response( - entity_pbs, cursor_as_bytes, more_results_enum, skipped_results) + entity_pbs, cursor_as_bytes, more_results_enum, skipped_results + ) result = iterator._process_query_results(response_pb) self.assertEqual(result, entity_pbs) @@ -490,18 +481,16 @@ def test__process_query_results(self): def test__process_query_results_done(self): from google.cloud.datastore_v1.proto import query_pb2 - iterator = self._make_one(None, None, - end_cursor='abcd') + iterator = self._make_one(None, None, end_cursor="abcd") self.assertIsNotNone(iterator._end_cursor) - entity_pbs = [ - _make_entity('World', 1234, 'PROJECT'), - ] - cursor_as_bytes = b'\x9ai\xe7' + entity_pbs = [_make_entity("World", 1234, "PROJECT")] + cursor_as_bytes = b"\x9ai\xe7" skipped_results = 44 more_results_enum = query_pb2.QueryResultBatch.NO_MORE_RESULTS response_pb = _make_query_response( - entity_pbs, cursor_as_bytes, more_results_enum, skipped_results) + entity_pbs, cursor_as_bytes, more_results_enum, skipped_results + ) result = iterator._process_query_results(response_pb) self.assertEqual(result, entity_pbs) @@ -512,8 +501,7 @@ def test__process_query_results_done(self): def test__process_query_results_bad_enum(self): iterator = self._make_one(None, None) more_results_enum = 999 - response_pb = _make_query_response( - [], b'', more_results_enum, 0) + response_pb = _make_query_response([], b"", more_results_enum, 0) with self.assertRaises(ValueError): iterator._process_query_results(response_pb) @@ -525,15 +513,14 @@ def _next_page_helper(self, txn_id=None): from google.cloud.datastore.query import Query more_enum = query_pb2.QueryResultBatch.NOT_FINISHED - result = _make_query_response([], b'', more_enum, 0) - project = 'prujekt' + result = _make_query_response([], b"", more_enum, 0) + project = "prujekt" ds_api = _make_datastore_api(result) if txn_id is None: client = _Client(project, datastore_api=ds_api) else: - transaction = mock.Mock(id=txn_id, spec=['id']) - client = _Client( - project, datastore_api=ds_api, transaction=transaction) + transaction = mock.Mock(id=txn_id, spec=["id"]) + client = _Client(project, datastore_api=ds_api, transaction=transaction) query = Query(client) iterator = self._make_one(query, client) @@ -549,13 +536,14 @@ def _next_page_helper(self, txn_id=None): read_options = datastore_pb2.ReadOptions(transaction=txn_id) empty_query = query_pb2.Query() ds_api.run_query.assert_called_once_with( - project, partition_id, read_options, query=empty_query) + project, partition_id, read_options, query=empty_query + ) def test__next_page(self): self._next_page_helper() def test__next_page_in_transaction(self): - txn_id = b'1xo1md\xe2\x98\x83' + txn_id = b"1xo1md\xe2\x98\x83" self._next_page_helper(txn_id) def test__next_page_no_more(self): @@ -573,7 +561,6 @@ def test__next_page_no_more(self): class Test__item_to_entity(unittest.TestCase): - def _call_fut(self, iterator, entity_pb): from google.cloud.datastore.query import _item_to_entity @@ -581,8 +568,7 @@ def _call_fut(self, iterator, entity_pb): def test_it(self): entity_pb = mock.sentinel.entity_pb - patch = mock.patch( - 'google.cloud.datastore.helpers.entity_from_protobuf') + patch = mock.patch("google.cloud.datastore.helpers.entity_from_protobuf") with patch as entity_from_protobuf: result = self._call_fut(None, entity_pb) self.assertIs(result, entity_from_protobuf.return_value) @@ -591,7 +577,6 @@ def test_it(self): class Test__pb_from_query(unittest.TestCase): - def _call_fut(self, query): from google.cloud.datastore.query import _pb_from_query @@ -605,101 +590,100 @@ def test_empty(self): self.assertEqual(list(pb.kind), []) self.assertEqual(list(pb.order), []) self.assertEqual(list(pb.distinct_on), []) - self.assertEqual(pb.filter.property_filter.property.name, '') + self.assertEqual(pb.filter.property_filter.property.name, "") cfilter = pb.filter.composite_filter - self.assertEqual(cfilter.op, - query_pb2.CompositeFilter.OPERATOR_UNSPECIFIED) + self.assertEqual(cfilter.op, query_pb2.CompositeFilter.OPERATOR_UNSPECIFIED) self.assertEqual(list(cfilter.filters), []) - self.assertEqual(pb.start_cursor, b'') - self.assertEqual(pb.end_cursor, b'') + self.assertEqual(pb.start_cursor, b"") + self.assertEqual(pb.end_cursor, b"") self.assertEqual(pb.limit.value, 0) self.assertEqual(pb.offset, 0) def test_projection(self): - pb = self._call_fut(_Query(projection=['a', 'b', 'c'])) - self.assertEqual([item.property.name for item in pb.projection], - ['a', 'b', 'c']) + pb = self._call_fut(_Query(projection=["a", "b", "c"])) + self.assertEqual( + [item.property.name for item in pb.projection], ["a", "b", "c"] + ) def test_kind(self): - pb = self._call_fut(_Query(kind='KIND')) - self.assertEqual([item.name for item in pb.kind], ['KIND']) + pb = self._call_fut(_Query(kind="KIND")) + self.assertEqual([item.name for item in pb.kind], ["KIND"]) def test_ancestor(self): from google.cloud.datastore.key import Key from google.cloud.datastore_v1.proto import query_pb2 - ancestor = Key('Ancestor', 123, project='PROJECT') + ancestor = Key("Ancestor", 123, project="PROJECT") pb = self._call_fut(_Query(ancestor=ancestor)) cfilter = pb.filter.composite_filter self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND) self.assertEqual(len(cfilter.filters), 1) pfilter = cfilter.filters[0].property_filter - self.assertEqual(pfilter.property.name, '__key__') + self.assertEqual(pfilter.property.name, "__key__") ancestor_pb = ancestor.to_protobuf() self.assertEqual(pfilter.value.key_value, ancestor_pb) def test_filter(self): from google.cloud.datastore_v1.proto import query_pb2 - query = _Query(filters=[('name', '=', u'John')]) - query.OPERATORS = { - '=': query_pb2.PropertyFilter.EQUAL, - } + query = _Query(filters=[("name", "=", u"John")]) + query.OPERATORS = {"=": query_pb2.PropertyFilter.EQUAL} pb = self._call_fut(query) cfilter = pb.filter.composite_filter self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND) self.assertEqual(len(cfilter.filters), 1) pfilter = cfilter.filters[0].property_filter - self.assertEqual(pfilter.property.name, 'name') - self.assertEqual(pfilter.value.string_value, u'John') + self.assertEqual(pfilter.property.name, "name") + self.assertEqual(pfilter.value.string_value, u"John") def test_filter_key(self): from google.cloud.datastore.key import Key from google.cloud.datastore_v1.proto import query_pb2 - key = Key('Kind', 123, project='PROJECT') - query = _Query(filters=[('__key__', '=', key)]) - query.OPERATORS = { - '=': query_pb2.PropertyFilter.EQUAL, - } + key = Key("Kind", 123, project="PROJECT") + query = _Query(filters=[("__key__", "=", key)]) + query.OPERATORS = {"=": query_pb2.PropertyFilter.EQUAL} pb = self._call_fut(query) cfilter = pb.filter.composite_filter self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND) self.assertEqual(len(cfilter.filters), 1) pfilter = cfilter.filters[0].property_filter - self.assertEqual(pfilter.property.name, '__key__') + self.assertEqual(pfilter.property.name, "__key__") key_pb = key.to_protobuf() self.assertEqual(pfilter.value.key_value, key_pb) def test_order(self): from google.cloud.datastore_v1.proto import query_pb2 - pb = self._call_fut(_Query(order=['a', '-b', 'c'])) - self.assertEqual([item.property.name for item in pb.order], - ['a', 'b', 'c']) - self.assertEqual([item.direction for item in pb.order], - [query_pb2.PropertyOrder.ASCENDING, - query_pb2.PropertyOrder.DESCENDING, - query_pb2.PropertyOrder.ASCENDING]) + pb = self._call_fut(_Query(order=["a", "-b", "c"])) + self.assertEqual([item.property.name for item in pb.order], ["a", "b", "c"]) + self.assertEqual( + [item.direction for item in pb.order], + [ + query_pb2.PropertyOrder.ASCENDING, + query_pb2.PropertyOrder.DESCENDING, + query_pb2.PropertyOrder.ASCENDING, + ], + ) def test_distinct_on(self): - pb = self._call_fut(_Query(distinct_on=['a', 'b', 'c'])) - self.assertEqual([item.name for item in pb.distinct_on], - ['a', 'b', 'c']) + pb = self._call_fut(_Query(distinct_on=["a", "b", "c"])) + self.assertEqual([item.name for item in pb.distinct_on], ["a", "b", "c"]) class _Query(object): - - def __init__(self, - client=object(), - kind=None, - project=None, - namespace=None, - ancestor=None, - filters=(), - projection=(), - order=(), - distinct_on=()): + def __init__( + self, + client=object(), + kind=None, + project=None, + namespace=None, + ancestor=None, + filters=(), + projection=(), + order=(), + distinct_on=(), + ): self._client = client self.kind = kind self.project = project @@ -712,9 +696,7 @@ def __init__(self, class _Client(object): - - def __init__(self, project, datastore_api=None, namespace=None, - transaction=None): + def __init__(self, project, datastore_api=None, namespace=None, transaction=None): self.project = project self._datastore_api = datastore_api self.namespace = namespace @@ -737,7 +719,8 @@ def _make_entity(kind, id_, project): def _make_query_response( - entity_pbs, cursor_as_bytes, more_results_enum, skipped_results): + entity_pbs, cursor_as_bytes, more_results_enum, skipped_results +): from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore_v1.proto import query_pb2 @@ -747,13 +730,12 @@ def _make_query_response( end_cursor=cursor_as_bytes, more_results=more_results_enum, entity_results=[ - query_pb2.EntityResult(entity=entity) - for entity in entity_pbs + query_pb2.EntityResult(entity=entity) for entity in entity_pbs ], - ), + ) ) def _make_datastore_api(result=None): run_query = mock.Mock(return_value=result, spec=[]) - return mock.Mock(run_query=run_query, spec=['run_query']) + return mock.Mock(run_query=run_query, spec=["run_query"]) diff --git a/packages/google-cloud-datastore/tests/unit/test_transaction.py b/packages/google-cloud-datastore/tests/unit/test_transaction.py index cef178a00243..a1e23610368a 100644 --- a/packages/google-cloud-datastore/tests/unit/test_transaction.py +++ b/packages/google-cloud-datastore/tests/unit/test_transaction.py @@ -18,14 +18,15 @@ class TestTransaction(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.datastore.transaction import Transaction + return Transaction def _get_options_class(self, **kw): from google.cloud.datastore_v1.types import TransactionOptions + return TransactionOptions def _make_one(self, client, **kw): @@ -35,7 +36,7 @@ def _make_options(self, **kw): return self._get_options_class()(**kw) def test_ctor_defaults(self): - project = 'PROJECT' + project = "PROJECT" client = _Client(project) xact = self._make_one(client) self.assertEqual(xact.project, project) @@ -48,7 +49,7 @@ def test_ctor_defaults(self): def test_current(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' + project = "PROJECT" id_ = 678 ds_api = _make_datastore_api(xact_id=id_) client = _Client(project, datastore_api=ds_api) @@ -84,7 +85,7 @@ def test_current(self): begin_txn.assert_called_with(project) def test_begin(self): - project = 'PROJECT' + project = "PROJECT" id_ = 889 ds_api = _make_datastore_api(xact_id=id_) client = _Client(project, datastore_api=ds_api) @@ -94,7 +95,7 @@ def test_begin(self): ds_api.begin_transaction.assert_called_once_with(project) def test_begin_tombstoned(self): - project = 'PROJECT' + project = "PROJECT" id_ = 1094 ds_api = _make_datastore_api(xact_id=id_) client = _Client(project, datastore_api=ds_api) @@ -110,7 +111,7 @@ def test_begin_tombstoned(self): self.assertRaises(ValueError, xact.begin) def test_begin_w_begin_transaction_failure(self): - project = 'PROJECT' + project = "PROJECT" id_ = 712 ds_api = _make_datastore_api(xact_id=id_) ds_api.begin_transaction = mock.Mock(side_effect=RuntimeError, spec=[]) @@ -124,7 +125,7 @@ def test_begin_w_begin_transaction_failure(self): ds_api.begin_transaction.assert_called_once_with(project) def test_rollback(self): - project = 'PROJECT' + project = "PROJECT" id_ = 239 ds_api = _make_datastore_api(xact_id=id_) client = _Client(project, datastore_api=ds_api) @@ -138,7 +139,7 @@ def test_rollback(self): def test_commit_no_partial_keys(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' + project = "PROJECT" id_ = 1002930 ds_api = _make_datastore_api(xact_id=id_) client = _Client(project, datastore_api=ds_api) @@ -148,15 +149,16 @@ def test_commit_no_partial_keys(self): mode = datastore_pb2.CommitRequest.TRANSACTIONAL client._datastore_api.commit.assert_called_once_with( - project, mode, [], transaction=id_) + project, mode, [], transaction=id_ + ) self.assertIsNone(xact.id) ds_api.begin_transaction.assert_called_once_with(project) def test_commit_w_partial_keys(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' - kind = 'KIND' + project = "PROJECT" + kind = "KIND" id1 = 123 key = _make_key(kind, id1, project) id2 = 234 @@ -170,15 +172,16 @@ def test_commit_w_partial_keys(self): mode = datastore_pb2.CommitRequest.TRANSACTIONAL ds_api.commit.assert_called_once_with( - project, mode, xact.mutations, transaction=id2) + project, mode, xact.mutations, transaction=id2 + ) self.assertIsNone(xact.id) - self.assertEqual(entity.key.path, [{'kind': kind, 'id': id1}]) + self.assertEqual(entity.key.path, [{"kind": kind, "id": id1}]) ds_api.begin_transaction.assert_called_once_with(project) def test_context_manager_no_raise(self): from google.cloud.datastore_v1.proto import datastore_pb2 - project = 'PROJECT' + project = "PROJECT" id_ = 912830 ds_api = _make_datastore_api(xact_id=id_) client = _Client(project, datastore_api=ds_api) @@ -189,16 +192,16 @@ def test_context_manager_no_raise(self): mode = datastore_pb2.CommitRequest.TRANSACTIONAL client._datastore_api.commit.assert_called_once_with( - project, mode, [], transaction=id_) + project, mode, [], transaction=id_ + ) self.assertIsNone(xact.id) self.assertEqual(ds_api.begin_transaction.call_count, 1) def test_context_manager_w_raise(self): - class Foo(Exception): pass - project = 'PROJECT' + project = "PROJECT" id_ = 614416 ds_api = _make_datastore_api(xact_id=id_) client = _Client(project, datastore_api=ds_api) @@ -211,15 +214,14 @@ class Foo(Exception): raise Foo() except Foo: self.assertIsNone(xact.id) - client._datastore_api.rollback.assert_called_once_with( - project, id_) + client._datastore_api.rollback.assert_called_once_with(project, id_) client._datastore_api.commit.assert_not_called() self.assertIsNone(xact.id) self.assertEqual(ds_api.begin_transaction.call_count, 1) def test_constructor_read_only(self): - project = 'PROJECT' + project = "PROJECT" id_ = 850302 ds_api = _make_datastore_api(xact=id_) client = _Client(project, datastore_api=ds_api) @@ -229,7 +231,7 @@ def test_constructor_read_only(self): self.assertEqual(xact._options, options) def test_put_read_only(self): - project = 'PROJECT' + project = "PROJECT" id_ = 943243 ds_api = _make_datastore_api(xact_id=id_) client = _Client(project, datastore_api=ds_api) @@ -252,16 +254,14 @@ def _make_key(kind, id_, project): class _Entity(dict): - def __init__(self): super(_Entity, self).__init__() from google.cloud.datastore.key import Key - self.key = Key('KIND', project='PROJECT') + self.key = Key("KIND", project="PROJECT") class _Client(object): - def __init__(self, project, datastore_api=None, namespace=None): self.project = project if datastore_api is None: @@ -282,7 +282,6 @@ def current_batch(self): class _NoCommitBatch(object): - def __init__(self, client): from google.cloud.datastore.batch import Batch @@ -300,20 +299,19 @@ def __exit__(self, *args): def _make_commit_response(*keys): from google.cloud.datastore_v1.proto import datastore_pb2 - mutation_results = [ - datastore_pb2.MutationResult(key=key) for key in keys] + mutation_results = [datastore_pb2.MutationResult(key=key) for key in keys] return datastore_pb2.CommitResponse(mutation_results=mutation_results) def _make_datastore_api(*keys, **kwargs): - commit_method = mock.Mock( - return_value=_make_commit_response(*keys), spec=[]) + commit_method = mock.Mock(return_value=_make_commit_response(*keys), spec=[]) - xact_id = kwargs.pop('xact_id', 123) - txn_pb = mock.Mock( - transaction=xact_id, spec=['transaction']) + xact_id = kwargs.pop("xact_id", 123) + txn_pb = mock.Mock(transaction=xact_id, spec=["transaction"]) begin_txn = mock.Mock(return_value=txn_pb, spec=[]) return mock.Mock( - commit=commit_method, begin_transaction=begin_txn, - spec=['begin_transaction', 'commit', 'rollback']) + commit=commit_method, + begin_transaction=begin_txn, + spec=["begin_transaction", "commit", "rollback"], + ) From 959b216730e2cee60909327be14df677feb1b2da Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Thu, 29 Nov 2018 11:21:34 -0800 Subject: [PATCH 198/611] Unblack datastore gapic and protos. --- .../datastore_v1/gapic/datastore_client.py | 346 +++++++++--------- .../gapic/datastore_client_config.py | 22 +- .../google/cloud/datastore_v1/gapic/enums.py | 8 - .../transports/datastore_grpc_transport.py | 47 ++- 4 files changed, 204 insertions(+), 219 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py index cd1ba4582a63..d1ebd3c925bf 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py @@ -35,8 +35,7 @@ from google.cloud.datastore_v1.proto import query_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-datastore" -).version + 'google-cloud-datastore', ).version class DatastoreClient(object): @@ -49,12 +48,12 @@ class DatastoreClient(object): the request. """ - SERVICE_ADDRESS = "datastore.googleapis.com:443" + SERVICE_ADDRESS = 'datastore.googleapis.com:443' """The default address of the service.""" # The name of the interface for this client. This is the key used to # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.datastore.v1.Datastore" + _INTERFACE_NAME = 'google.datastore.v1.Datastore' @classmethod def from_service_account_file(cls, filename, *args, **kwargs): @@ -70,20 +69,19 @@ def from_service_account_file(cls, filename, *args, **kwargs): Returns: DatastoreClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs['credentials'] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - ): + def __init__(self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None): """Constructor. Args: @@ -117,19 +115,18 @@ def __init__( # Raise deprecation warnings for things we want to go away. if client_config is not None: warnings.warn( - "The `client_config` argument is deprecated.", + 'The `client_config` argument is deprecated.', PendingDeprecationWarning, - stacklevel=2, - ) + stacklevel=2) else: client_config = datastore_client_config.config if channel: warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", + 'The `channel` argument is deprecated; use ' + '`transport` instead.', PendingDeprecationWarning, - stacklevel=2, - ) + stacklevel=2) # Instantiate the transport. # The transport is responsible for handling serialization and @@ -138,24 +135,25 @@ def __init__( if callable(transport): self.transport = transport( credentials=credentials, - default_class=datastore_grpc_transport.DatastoreGrpcTransport, + default_class=datastore_grpc_transport. + DatastoreGrpcTransport, ) else: if credentials: raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) + 'Received both a transport instance and ' + 'credentials; these are mutually exclusive.') self.transport = transport else: self.transport = datastore_grpc_transport.DatastoreGrpcTransport( - address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials + address=self.SERVICE_ADDRESS, + channel=channel, + credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info @@ -165,8 +163,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) + client_config['interfaces'][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. # These are the actual callables which invoke the proper @@ -175,15 +172,13 @@ def __init__( self._inner_api_calls = {} # Service calls - def lookup( - self, - project_id, - keys, - read_options=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): + def lookup(self, + project_id, + keys, + read_options=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Looks up entities by key. @@ -230,34 +225,32 @@ def lookup( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if "lookup" not in self._inner_api_calls: + if 'lookup' not in self._inner_api_calls: self._inner_api_calls[ - "lookup" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.lookup, - default_retry=self._method_configs["Lookup"].retry, - default_timeout=self._method_configs["Lookup"].timeout, - client_info=self._client_info, - ) + 'lookup'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.lookup, + default_retry=self._method_configs['Lookup'].retry, + default_timeout=self._method_configs['Lookup'].timeout, + client_info=self._client_info, + ) request = datastore_pb2.LookupRequest( - project_id=project_id, keys=keys, read_options=read_options - ) - return self._inner_api_calls["lookup"]( - request, retry=retry, timeout=timeout, metadata=metadata + project_id=project_id, + keys=keys, + read_options=read_options, ) - - def run_query( - self, - project_id, - partition_id, - read_options=None, - query=None, - gql_query=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): + return self._inner_api_calls['lookup']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def run_query(self, + project_id, + partition_id, + read_options=None, + query=None, + gql_query=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Queries for entities. @@ -315,19 +308,21 @@ def run_query( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if "run_query" not in self._inner_api_calls: + if 'run_query' not in self._inner_api_calls: self._inner_api_calls[ - "run_query" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.run_query, - default_retry=self._method_configs["RunQuery"].retry, - default_timeout=self._method_configs["RunQuery"].timeout, - client_info=self._client_info, - ) + 'run_query'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.run_query, + default_retry=self._method_configs['RunQuery'].retry, + default_timeout=self._method_configs['RunQuery'].timeout, + client_info=self._client_info, + ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof(query=query, gql_query=gql_query) + google.api_core.protobuf_helpers.check_oneof( + query=query, + gql_query=gql_query, + ) request = datastore_pb2.RunQueryRequest( project_id=project_id, @@ -336,18 +331,15 @@ def run_query( query=query, gql_query=gql_query, ) - return self._inner_api_calls["run_query"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def begin_transaction( - self, - project_id, - transaction_options=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): + return self._inner_api_calls['run_query']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def begin_transaction(self, + project_id, + transaction_options=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Begins a new transaction. @@ -387,33 +379,32 @@ def begin_transaction( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if "begin_transaction" not in self._inner_api_calls: + if 'begin_transaction' not in self._inner_api_calls: self._inner_api_calls[ - "begin_transaction" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.begin_transaction, - default_retry=self._method_configs["BeginTransaction"].retry, - default_timeout=self._method_configs["BeginTransaction"].timeout, - client_info=self._client_info, - ) + 'begin_transaction'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.begin_transaction, + default_retry=self._method_configs['BeginTransaction']. + retry, + default_timeout=self._method_configs['BeginTransaction']. + timeout, + client_info=self._client_info, + ) request = datastore_pb2.BeginTransactionRequest( - project_id=project_id, transaction_options=transaction_options - ) - return self._inner_api_calls["begin_transaction"]( - request, retry=retry, timeout=timeout, metadata=metadata + project_id=project_id, + transaction_options=transaction_options, ) - - def commit( - self, - project_id, - mode, - mutations, - transaction=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): + return self._inner_api_calls['begin_transaction']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def commit(self, + project_id, + mode, + mutations, + transaction=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Commits a transaction, optionally creating, deleting or modifying some entities. @@ -477,19 +468,18 @@ def commit( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if "commit" not in self._inner_api_calls: + if 'commit' not in self._inner_api_calls: self._inner_api_calls[ - "commit" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.commit, - default_retry=self._method_configs["Commit"].retry, - default_timeout=self._method_configs["Commit"].timeout, - client_info=self._client_info, - ) + 'commit'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.commit, + default_retry=self._method_configs['Commit'].retry, + default_timeout=self._method_configs['Commit'].timeout, + client_info=self._client_info, + ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof(transaction=transaction) + google.api_core.protobuf_helpers.check_oneof(transaction=transaction, ) request = datastore_pb2.CommitRequest( project_id=project_id, @@ -497,18 +487,15 @@ def commit( mutations=mutations, transaction=transaction, ) - return self._inner_api_calls["commit"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def rollback( - self, - project_id, - transaction, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): + return self._inner_api_calls['commit']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def rollback(self, + project_id, + transaction, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Rolls back a transaction. @@ -549,31 +536,28 @@ def rollback( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if "rollback" not in self._inner_api_calls: + if 'rollback' not in self._inner_api_calls: self._inner_api_calls[ - "rollback" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.rollback, - default_retry=self._method_configs["Rollback"].retry, - default_timeout=self._method_configs["Rollback"].timeout, - client_info=self._client_info, - ) + 'rollback'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.rollback, + default_retry=self._method_configs['Rollback'].retry, + default_timeout=self._method_configs['Rollback'].timeout, + client_info=self._client_info, + ) request = datastore_pb2.RollbackRequest( - project_id=project_id, transaction=transaction - ) - return self._inner_api_calls["rollback"]( - request, retry=retry, timeout=timeout, metadata=metadata + project_id=project_id, + transaction=transaction, ) - - def allocate_ids( - self, - project_id, - keys, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): + return self._inner_api_calls['rollback']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def allocate_ids(self, + project_id, + keys, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. @@ -618,30 +602,30 @@ def allocate_ids( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if "allocate_ids" not in self._inner_api_calls: + if 'allocate_ids' not in self._inner_api_calls: self._inner_api_calls[ - "allocate_ids" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.allocate_ids, - default_retry=self._method_configs["AllocateIds"].retry, - default_timeout=self._method_configs["AllocateIds"].timeout, - client_info=self._client_info, - ) + 'allocate_ids'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.allocate_ids, + default_retry=self._method_configs['AllocateIds'].retry, + default_timeout=self._method_configs['AllocateIds']. + timeout, + client_info=self._client_info, + ) - request = datastore_pb2.AllocateIdsRequest(project_id=project_id, keys=keys) - return self._inner_api_calls["allocate_ids"]( - request, retry=retry, timeout=timeout, metadata=metadata + request = datastore_pb2.AllocateIdsRequest( + project_id=project_id, + keys=keys, ) - - def reserve_ids( - self, - project_id, - keys, - database_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): + return self._inner_api_calls['allocate_ids']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def reserve_ids(self, + project_id, + keys, + database_id=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Prevents the supplied keys' IDs from being auto-allocated by Cloud Datastore. @@ -687,19 +671,19 @@ def reserve_ids( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if "reserve_ids" not in self._inner_api_calls: + if 'reserve_ids' not in self._inner_api_calls: self._inner_api_calls[ - "reserve_ids" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.reserve_ids, - default_retry=self._method_configs["ReserveIds"].retry, - default_timeout=self._method_configs["ReserveIds"].timeout, - client_info=self._client_info, - ) + 'reserve_ids'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.reserve_ids, + default_retry=self._method_configs['ReserveIds'].retry, + default_timeout=self._method_configs['ReserveIds'].timeout, + client_info=self._client_info, + ) request = datastore_pb2.ReserveIdsRequest( - project_id=project_id, keys=keys, database_id=database_id - ) - return self._inner_api_calls["reserve_ids"]( - request, retry=retry, timeout=timeout, metadata=metadata + project_id=project_id, + keys=keys, + database_id=database_id, ) + return self._inner_api_calls['reserve_ids']( + request, retry=retry, timeout=timeout, metadata=metadata) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py index 95822b8babfc..1a3eb9523447 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py @@ -3,7 +3,7 @@ "google.datastore.v1.Datastore": { "retry_codes": { "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], + "non_idempotent": [] }, "retry_params": { "default": { @@ -13,46 +13,46 @@ "initial_rpc_timeout_millis": 60000, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000, + "total_timeout_millis": 600000 } }, "methods": { "Lookup": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default", + "retry_params_name": "default" }, "RunQuery": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default", + "retry_params_name": "default" }, "BeginTransaction": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default", + "retry_params_name": "default" }, "Commit": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default", + "retry_params_name": "default" }, "Rollback": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default", + "retry_params_name": "default" }, "AllocateIds": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default", + "retry_params_name": "default" }, "ReserveIds": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - }, + "retry_params_name": "default" + } + } } } } diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py index 78de5345e377..41497f71781b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py @@ -28,7 +28,6 @@ class NullValue(enum.IntEnum): Attributes: NULL_VALUE (int): Null value. """ - NULL_VALUE = 0 @@ -48,7 +47,6 @@ class ResultType(enum.IntEnum): PROJECTION (int): A projected subset of properties. The entity may have no key. KEY_ONLY (int): Only the key. """ - RESULT_TYPE_UNSPECIFIED = 0 FULL = 1 PROJECTION = 2 @@ -65,7 +63,6 @@ class Direction(enum.IntEnum): ASCENDING (int): Ascending. DESCENDING (int): Descending. """ - DIRECTION_UNSPECIFIED = 0 ASCENDING = 1 DESCENDING = 2 @@ -80,7 +77,6 @@ class Operator(enum.IntEnum): OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used. AND (int): The results are required to satisfy each of the combined filters. """ - OPERATOR_UNSPECIFIED = 0 AND = 1 @@ -99,7 +95,6 @@ class Operator(enum.IntEnum): EQUAL (int): Equal. HAS_ANCESTOR (int): Has ancestor. """ - OPERATOR_UNSPECIFIED = 0 LESS_THAN = 1 LESS_THAN_OR_EQUAL = 2 @@ -122,7 +117,6 @@ class MoreResultsType(enum.IntEnum): cursor. NO_MORE_RESULTS (int): The query is finished, and there are no more results. """ - MORE_RESULTS_TYPE_UNSPECIFIED = 0 NOT_FINISHED = 1 MORE_RESULTS_AFTER_LIMIT = 2 @@ -142,7 +136,6 @@ class Mode(enum.IntEnum): `here `__. NON_TRANSACTIONAL (int): Non-transactional: The mutations may not apply as all or none. """ - MODE_UNSPECIFIED = 0 TRANSACTIONAL = 1 NON_TRANSACTIONAL = 2 @@ -158,7 +151,6 @@ class ReadConsistency(enum.IntEnum): STRONG (int): Strong consistency. EVENTUAL (int): Eventual consistency. """ - READ_CONSISTENCY_UNSPECIFIED = 0 STRONG = 1 EVENTUAL = 2 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py index 1b5639d2dd44..3f0e8ef0cd13 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py @@ -27,17 +27,17 @@ class DatastoreGrpcTransport(object): which can be used to take advantage of advanced features of gRPC. """ - # The scopes needed to make gRPC calls to all of the methods defined # in this service. _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/datastore', ) - def __init__( - self, channel=None, credentials=None, address="datastore.googleapis.com:443" - ): + def __init__(self, + channel=None, + credentials=None, + address='datastore.googleapis.com:443'): """Instantiate the transport class. Args: @@ -55,21 +55,28 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) + 'The `channel` and `credentials` arguments are mutually ' + 'exclusive.', ) # Create the channel. if channel is None: - channel = self.create_channel(address=address, credentials=credentials) + channel = self.create_channel( + address=address, + credentials=credentials, + ) self._channel = channel # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. - self._stubs = {"datastore_stub": datastore_pb2_grpc.DatastoreStub(channel)} + self._stubs = { + 'datastore_stub': datastore_pb2_grpc.DatastoreStub(channel), + } @classmethod - def create_channel(cls, address="datastore.googleapis.com:443", credentials=None): + def create_channel(cls, + address='datastore.googleapis.com:443', + credentials=None): """Create and return a gRPC channel object. Args: @@ -84,7 +91,9 @@ def create_channel(cls, address="datastore.googleapis.com:443", credentials=None grpc.Channel: A gRPC channel object. """ return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES + address, + credentials=credentials, + scopes=cls._OAUTH_SCOPES, ) @property @@ -107,7 +116,7 @@ def lookup(self): deserialized request object and returns a deserialized response object. """ - return self._stubs["datastore_stub"].Lookup + return self._stubs['datastore_stub'].Lookup @property def run_query(self): @@ -120,7 +129,7 @@ def run_query(self): deserialized request object and returns a deserialized response object. """ - return self._stubs["datastore_stub"].RunQuery + return self._stubs['datastore_stub'].RunQuery @property def begin_transaction(self): @@ -133,7 +142,7 @@ def begin_transaction(self): deserialized request object and returns a deserialized response object. """ - return self._stubs["datastore_stub"].BeginTransaction + return self._stubs['datastore_stub'].BeginTransaction @property def commit(self): @@ -147,7 +156,7 @@ def commit(self): deserialized request object and returns a deserialized response object. """ - return self._stubs["datastore_stub"].Commit + return self._stubs['datastore_stub'].Commit @property def rollback(self): @@ -160,7 +169,7 @@ def rollback(self): deserialized request object and returns a deserialized response object. """ - return self._stubs["datastore_stub"].Rollback + return self._stubs['datastore_stub'].Rollback @property def allocate_ids(self): @@ -174,7 +183,7 @@ def allocate_ids(self): deserialized request object and returns a deserialized response object. """ - return self._stubs["datastore_stub"].AllocateIds + return self._stubs['datastore_stub'].AllocateIds @property def reserve_ids(self): @@ -188,4 +197,4 @@ def reserve_ids(self): deserialized request object and returns a deserialized response object. """ - return self._stubs["datastore_stub"].ReserveIds + return self._stubs['datastore_stub'].ReserveIds From a10f6d7fb1c3458b4df61fd82eb89d87ea5b73b1 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Thu, 29 Nov 2018 13:13:54 -0800 Subject: [PATCH 199/611] Run black at end of synth.py (#6698) * Run black at end of synth.py * blacken logging --- packages/google-cloud-datastore/synth.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index efd03dc21547..cd9c11e2bc3d 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -37,3 +37,5 @@ # ---------------------------------------------------------------------------- templated_files = common.py_library(unit_cov_level=97, cov_level=100) s.move(templated_files) + +s.shell.run(["nox", "-s", "blacken"], hide_output=False) From b345329611a50d18d920f7286988688fb0b23054 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Thu, 29 Nov 2018 13:23:53 -0800 Subject: [PATCH 200/611] omit local deps (#6701) --- packages/google-cloud-datastore/.coveragerc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/.coveragerc b/packages/google-cloud-datastore/.coveragerc index 51fec440cebf..6b9ab9da4a1b 100644 --- a/packages/google-cloud-datastore/.coveragerc +++ b/packages/google-cloud-datastore/.coveragerc @@ -14,5 +14,5 @@ exclude_lines = omit = */gapic/*.py */proto/*.py - */google-cloud-python/core/*.py + */core/*.py */site-packages/*.py \ No newline at end of file From 3cf88ceb1f9d0ce3e5cb1c4fd2cbf7a8b581ce1a Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Fri, 30 Nov 2018 15:25:18 -0800 Subject: [PATCH 201/611] blacken all gen'd libs (#6792) * blacken all gen'd libs --- .../datastore_v1/gapic/datastore_client.py | 346 ++- .../gapic/datastore_client_config.py | 22 +- .../google/cloud/datastore_v1/gapic/enums.py | 8 + .../transports/datastore_grpc_transport.py | 47 +- .../cloud/datastore_v1/proto/datastore_pb2.py | 2753 ++++++++++------- .../datastore_v1/proto/datastore_pb2_grpc.py | 219 +- .../cloud/datastore_v1/proto/entity_pb2.py | 1205 +++++--- .../datastore_v1/proto/entity_pb2_grpc.py | 1 - .../cloud/datastore_v1/proto/query_pb2.py | 2113 ++++++++----- .../datastore_v1/proto/query_pb2_grpc.py | 1 - 10 files changed, 4062 insertions(+), 2653 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py index d1ebd3c925bf..cd1ba4582a63 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py @@ -35,7 +35,8 @@ from google.cloud.datastore_v1.proto import query_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - 'google-cloud-datastore', ).version + "google-cloud-datastore" +).version class DatastoreClient(object): @@ -48,12 +49,12 @@ class DatastoreClient(object): the request. """ - SERVICE_ADDRESS = 'datastore.googleapis.com:443' + SERVICE_ADDRESS = "datastore.googleapis.com:443" """The default address of the service.""" # The name of the interface for this client. This is the key used to # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = 'google.datastore.v1.Datastore' + _INTERFACE_NAME = "google.datastore.v1.Datastore" @classmethod def from_service_account_file(cls, filename, *args, **kwargs): @@ -69,19 +70,20 @@ def from_service_account_file(cls, filename, *args, **kwargs): Returns: DatastoreClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs['credentials'] = credentials + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file - def __init__(self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None): + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + ): """Constructor. Args: @@ -115,18 +117,19 @@ def __init__(self, # Raise deprecation warnings for things we want to go away. if client_config is not None: warnings.warn( - 'The `client_config` argument is deprecated.', + "The `client_config` argument is deprecated.", PendingDeprecationWarning, - stacklevel=2) + stacklevel=2, + ) else: client_config = datastore_client_config.config if channel: warnings.warn( - 'The `channel` argument is deprecated; use ' - '`transport` instead.', + "The `channel` argument is deprecated; use " "`transport` instead.", PendingDeprecationWarning, - stacklevel=2) + stacklevel=2, + ) # Instantiate the transport. # The transport is responsible for handling serialization and @@ -135,25 +138,24 @@ def __init__(self, if callable(transport): self.transport = transport( credentials=credentials, - default_class=datastore_grpc_transport. - DatastoreGrpcTransport, + default_class=datastore_grpc_transport.DatastoreGrpcTransport, ) else: if credentials: raise ValueError( - 'Received both a transport instance and ' - 'credentials; these are mutually exclusive.') + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) self.transport = transport else: self.transport = datastore_grpc_transport.DatastoreGrpcTransport( - address=self.SERVICE_ADDRESS, - channel=channel, - credentials=credentials, + address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, ) + gapic_version=_GAPIC_LIBRARY_VERSION + ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info @@ -163,7 +165,8 @@ def __init__(self, # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config['interfaces'][self._INTERFACE_NAME], ) + client_config["interfaces"][self._INTERFACE_NAME] + ) # Save a dictionary of cached API call functions. # These are the actual callables which invoke the proper @@ -172,13 +175,15 @@ def __init__(self, self._inner_api_calls = {} # Service calls - def lookup(self, - project_id, - keys, - read_options=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def lookup( + self, + project_id, + keys, + read_options=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Looks up entities by key. @@ -225,32 +230,34 @@ def lookup(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'lookup' not in self._inner_api_calls: + if "lookup" not in self._inner_api_calls: self._inner_api_calls[ - 'lookup'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.lookup, - default_retry=self._method_configs['Lookup'].retry, - default_timeout=self._method_configs['Lookup'].timeout, - client_info=self._client_info, - ) + "lookup" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.lookup, + default_retry=self._method_configs["Lookup"].retry, + default_timeout=self._method_configs["Lookup"].timeout, + client_info=self._client_info, + ) request = datastore_pb2.LookupRequest( - project_id=project_id, - keys=keys, - read_options=read_options, + project_id=project_id, keys=keys, read_options=read_options ) - return self._inner_api_calls['lookup']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def run_query(self, - project_id, - partition_id, - read_options=None, - query=None, - gql_query=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["lookup"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def run_query( + self, + project_id, + partition_id, + read_options=None, + query=None, + gql_query=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Queries for entities. @@ -308,21 +315,19 @@ def run_query(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'run_query' not in self._inner_api_calls: + if "run_query" not in self._inner_api_calls: self._inner_api_calls[ - 'run_query'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.run_query, - default_retry=self._method_configs['RunQuery'].retry, - default_timeout=self._method_configs['RunQuery'].timeout, - client_info=self._client_info, - ) + "run_query" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.run_query, + default_retry=self._method_configs["RunQuery"].retry, + default_timeout=self._method_configs["RunQuery"].timeout, + client_info=self._client_info, + ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof( - query=query, - gql_query=gql_query, - ) + google.api_core.protobuf_helpers.check_oneof(query=query, gql_query=gql_query) request = datastore_pb2.RunQueryRequest( project_id=project_id, @@ -331,15 +336,18 @@ def run_query(self, query=query, gql_query=gql_query, ) - return self._inner_api_calls['run_query']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def begin_transaction(self, - project_id, - transaction_options=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["run_query"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def begin_transaction( + self, + project_id, + transaction_options=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Begins a new transaction. @@ -379,32 +387,33 @@ def begin_transaction(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'begin_transaction' not in self._inner_api_calls: + if "begin_transaction" not in self._inner_api_calls: self._inner_api_calls[ - 'begin_transaction'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.begin_transaction, - default_retry=self._method_configs['BeginTransaction']. - retry, - default_timeout=self._method_configs['BeginTransaction']. - timeout, - client_info=self._client_info, - ) + "begin_transaction" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.begin_transaction, + default_retry=self._method_configs["BeginTransaction"].retry, + default_timeout=self._method_configs["BeginTransaction"].timeout, + client_info=self._client_info, + ) request = datastore_pb2.BeginTransactionRequest( - project_id=project_id, - transaction_options=transaction_options, + project_id=project_id, transaction_options=transaction_options ) - return self._inner_api_calls['begin_transaction']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def commit(self, - project_id, - mode, - mutations, - transaction=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["begin_transaction"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def commit( + self, + project_id, + mode, + mutations, + transaction=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Commits a transaction, optionally creating, deleting or modifying some entities. @@ -468,18 +477,19 @@ def commit(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'commit' not in self._inner_api_calls: + if "commit" not in self._inner_api_calls: self._inner_api_calls[ - 'commit'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.commit, - default_retry=self._method_configs['Commit'].retry, - default_timeout=self._method_configs['Commit'].timeout, - client_info=self._client_info, - ) + "commit" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.commit, + default_retry=self._method_configs["Commit"].retry, + default_timeout=self._method_configs["Commit"].timeout, + client_info=self._client_info, + ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof(transaction=transaction, ) + google.api_core.protobuf_helpers.check_oneof(transaction=transaction) request = datastore_pb2.CommitRequest( project_id=project_id, @@ -487,15 +497,18 @@ def commit(self, mutations=mutations, transaction=transaction, ) - return self._inner_api_calls['commit']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def rollback(self, - project_id, - transaction, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["commit"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def rollback( + self, + project_id, + transaction, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Rolls back a transaction. @@ -536,28 +549,31 @@ def rollback(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'rollback' not in self._inner_api_calls: + if "rollback" not in self._inner_api_calls: self._inner_api_calls[ - 'rollback'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.rollback, - default_retry=self._method_configs['Rollback'].retry, - default_timeout=self._method_configs['Rollback'].timeout, - client_info=self._client_info, - ) + "rollback" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.rollback, + default_retry=self._method_configs["Rollback"].retry, + default_timeout=self._method_configs["Rollback"].timeout, + client_info=self._client_info, + ) request = datastore_pb2.RollbackRequest( - project_id=project_id, - transaction=transaction, + project_id=project_id, transaction=transaction ) - return self._inner_api_calls['rollback']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def allocate_ids(self, - project_id, - keys, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["rollback"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def allocate_ids( + self, + project_id, + keys, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. @@ -602,30 +618,30 @@ def allocate_ids(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'allocate_ids' not in self._inner_api_calls: + if "allocate_ids" not in self._inner_api_calls: self._inner_api_calls[ - 'allocate_ids'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.allocate_ids, - default_retry=self._method_configs['AllocateIds'].retry, - default_timeout=self._method_configs['AllocateIds']. - timeout, - client_info=self._client_info, - ) + "allocate_ids" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.allocate_ids, + default_retry=self._method_configs["AllocateIds"].retry, + default_timeout=self._method_configs["AllocateIds"].timeout, + client_info=self._client_info, + ) - request = datastore_pb2.AllocateIdsRequest( - project_id=project_id, - keys=keys, + request = datastore_pb2.AllocateIdsRequest(project_id=project_id, keys=keys) + return self._inner_api_calls["allocate_ids"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - return self._inner_api_calls['allocate_ids']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def reserve_ids(self, - project_id, - keys, - database_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + + def reserve_ids( + self, + project_id, + keys, + database_id=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Prevents the supplied keys' IDs from being auto-allocated by Cloud Datastore. @@ -671,19 +687,19 @@ def reserve_ids(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'reserve_ids' not in self._inner_api_calls: + if "reserve_ids" not in self._inner_api_calls: self._inner_api_calls[ - 'reserve_ids'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.reserve_ids, - default_retry=self._method_configs['ReserveIds'].retry, - default_timeout=self._method_configs['ReserveIds'].timeout, - client_info=self._client_info, - ) + "reserve_ids" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.reserve_ids, + default_retry=self._method_configs["ReserveIds"].retry, + default_timeout=self._method_configs["ReserveIds"].timeout, + client_info=self._client_info, + ) request = datastore_pb2.ReserveIdsRequest( - project_id=project_id, - keys=keys, - database_id=database_id, + project_id=project_id, keys=keys, database_id=database_id + ) + return self._inner_api_calls["reserve_ids"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - return self._inner_api_calls['reserve_ids']( - request, retry=retry, timeout=timeout, metadata=metadata) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py index 1a3eb9523447..95822b8babfc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py @@ -3,7 +3,7 @@ "google.datastore.v1.Datastore": { "retry_codes": { "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [] + "non_idempotent": [], }, "retry_params": { "default": { @@ -13,46 +13,46 @@ "initial_rpc_timeout_millis": 60000, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 + "total_timeout_millis": 600000, } }, "methods": { "Lookup": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "RunQuery": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "BeginTransaction": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "Commit": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "Rollback": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "AllocateIds": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "ReserveIds": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" - } - } + "retry_params_name": "default", + }, + }, } } } diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py index 41497f71781b..78de5345e377 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py @@ -28,6 +28,7 @@ class NullValue(enum.IntEnum): Attributes: NULL_VALUE (int): Null value. """ + NULL_VALUE = 0 @@ -47,6 +48,7 @@ class ResultType(enum.IntEnum): PROJECTION (int): A projected subset of properties. The entity may have no key. KEY_ONLY (int): Only the key. """ + RESULT_TYPE_UNSPECIFIED = 0 FULL = 1 PROJECTION = 2 @@ -63,6 +65,7 @@ class Direction(enum.IntEnum): ASCENDING (int): Ascending. DESCENDING (int): Descending. """ + DIRECTION_UNSPECIFIED = 0 ASCENDING = 1 DESCENDING = 2 @@ -77,6 +80,7 @@ class Operator(enum.IntEnum): OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used. AND (int): The results are required to satisfy each of the combined filters. """ + OPERATOR_UNSPECIFIED = 0 AND = 1 @@ -95,6 +99,7 @@ class Operator(enum.IntEnum): EQUAL (int): Equal. HAS_ANCESTOR (int): Has ancestor. """ + OPERATOR_UNSPECIFIED = 0 LESS_THAN = 1 LESS_THAN_OR_EQUAL = 2 @@ -117,6 +122,7 @@ class MoreResultsType(enum.IntEnum): cursor. NO_MORE_RESULTS (int): The query is finished, and there are no more results. """ + MORE_RESULTS_TYPE_UNSPECIFIED = 0 NOT_FINISHED = 1 MORE_RESULTS_AFTER_LIMIT = 2 @@ -136,6 +142,7 @@ class Mode(enum.IntEnum): `here `__. NON_TRANSACTIONAL (int): Non-transactional: The mutations may not apply as all or none. """ + MODE_UNSPECIFIED = 0 TRANSACTIONAL = 1 NON_TRANSACTIONAL = 2 @@ -151,6 +158,7 @@ class ReadConsistency(enum.IntEnum): STRONG (int): Strong consistency. EVENTUAL (int): Eventual consistency. """ + READ_CONSISTENCY_UNSPECIFIED = 0 STRONG = 1 EVENTUAL = 2 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py index 3f0e8ef0cd13..1b5639d2dd44 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py @@ -27,17 +27,17 @@ class DatastoreGrpcTransport(object): which can be used to take advantage of advanced features of gRPC. """ + # The scopes needed to make gRPC calls to all of the methods defined # in this service. _OAUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", ) - def __init__(self, - channel=None, - credentials=None, - address='datastore.googleapis.com:443'): + def __init__( + self, channel=None, credentials=None, address="datastore.googleapis.com:443" + ): """Instantiate the transport class. Args: @@ -55,28 +55,21 @@ def __init__(self, # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - 'The `channel` and `credentials` arguments are mutually ' - 'exclusive.', ) + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) # Create the channel. if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - ) + channel = self.create_channel(address=address, credentials=credentials) self._channel = channel # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. - self._stubs = { - 'datastore_stub': datastore_pb2_grpc.DatastoreStub(channel), - } + self._stubs = {"datastore_stub": datastore_pb2_grpc.DatastoreStub(channel)} @classmethod - def create_channel(cls, - address='datastore.googleapis.com:443', - credentials=None): + def create_channel(cls, address="datastore.googleapis.com:443", credentials=None): """Create and return a gRPC channel object. Args: @@ -91,9 +84,7 @@ def create_channel(cls, grpc.Channel: A gRPC channel object. """ return google.api_core.grpc_helpers.create_channel( - address, - credentials=credentials, - scopes=cls._OAUTH_SCOPES, + address, credentials=credentials, scopes=cls._OAUTH_SCOPES ) @property @@ -116,7 +107,7 @@ def lookup(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['datastore_stub'].Lookup + return self._stubs["datastore_stub"].Lookup @property def run_query(self): @@ -129,7 +120,7 @@ def run_query(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['datastore_stub'].RunQuery + return self._stubs["datastore_stub"].RunQuery @property def begin_transaction(self): @@ -142,7 +133,7 @@ def begin_transaction(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['datastore_stub'].BeginTransaction + return self._stubs["datastore_stub"].BeginTransaction @property def commit(self): @@ -156,7 +147,7 @@ def commit(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['datastore_stub'].Commit + return self._stubs["datastore_stub"].Commit @property def rollback(self): @@ -169,7 +160,7 @@ def rollback(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['datastore_stub'].Rollback + return self._stubs["datastore_stub"].Rollback @property def allocate_ids(self): @@ -183,7 +174,7 @@ def allocate_ids(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['datastore_stub'].AllocateIds + return self._stubs["datastore_stub"].AllocateIds @property def reserve_ids(self): @@ -197,4 +188,4 @@ def reserve_ids(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['datastore_stub'].ReserveIds + return self._stubs["datastore_stub"].ReserveIds diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py index 2cfcf62435f6..340a0224ce83 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py @@ -2,974 +2,1498 @@ # source: google/cloud/datastore_v1/proto/datastore.proto import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.datastore_v1.proto import entity_pb2 as google_dot_cloud_dot_datastore__v1_dot_proto_dot_entity__pb2 -from google.cloud.datastore_v1.proto import query_pb2 as google_dot_cloud_dot_datastore__v1_dot_proto_dot_query__pb2 +from google.cloud.datastore_v1.proto import ( + entity_pb2 as google_dot_cloud_dot_datastore__v1_dot_proto_dot_entity__pb2, +) +from google.cloud.datastore_v1.proto import ( + query_pb2 as google_dot_cloud_dot_datastore__v1_dot_proto_dot_query__pb2, +) DESCRIPTOR = _descriptor.FileDescriptor( - name='google/cloud/datastore_v1/proto/datastore.proto', - package='google.datastore.v1', - syntax='proto3', - serialized_pb=_b('\n/google/cloud/datastore_v1/proto/datastore.proto\x12\x13google.datastore.v1\x1a\x1cgoogle/api/annotations.proto\x1a,google/cloud/datastore_v1/proto/entity.proto\x1a+google/cloud/datastore_v1/proto/query.proto\"\x83\x01\n\rLookupRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x36\n\x0cread_options\x18\x01 \x01(\x0b\x32 .google.datastore.v1.ReadOptions\x12&\n\x04keys\x18\x03 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\xa2\x01\n\x0eLookupResponse\x12\x30\n\x05\x66ound\x18\x01 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x32\n\x07missing\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12*\n\x08\x64\x65\x66\x65rred\x18\x03 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\x84\x02\n\x0fRunQueryRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x36\n\x0cpartition_id\x18\x02 \x01(\x0b\x32 .google.datastore.v1.PartitionId\x12\x36\n\x0cread_options\x18\x01 \x01(\x0b\x32 .google.datastore.v1.ReadOptions\x12+\n\x05query\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.QueryH\x00\x12\x32\n\tgql_query\x18\x07 \x01(\x0b\x32\x1d.google.datastore.v1.GqlQueryH\x00\x42\x0c\n\nquery_type\"s\n\x10RunQueryResponse\x12\x34\n\x05\x62\x61tch\x18\x01 \x01(\x0b\x32%.google.datastore.v1.QueryResultBatch\x12)\n\x05query\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.Query\"s\n\x17\x42\x65ginTransactionRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x44\n\x13transaction_options\x18\n \x01(\x0b\x32\'.google.datastore.v1.TransactionOptions\"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\":\n\x0fRollbackRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\"\x12\n\x10RollbackResponse\"\x83\x02\n\rCommitRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x35\n\x04mode\x18\x05 \x01(\x0e\x32\'.google.datastore.v1.CommitRequest.Mode\x12\x15\n\x0btransaction\x18\x01 \x01(\x0cH\x00\x12\x30\n\tmutations\x18\x06 \x03(\x0b\x32\x1d.google.datastore.v1.Mutation\"F\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\x11\n\rTRANSACTIONAL\x10\x01\x12\x15\n\x11NON_TRANSACTIONAL\x10\x02\x42\x16\n\x14transaction_selector\"f\n\x0e\x43ommitResponse\x12=\n\x10mutation_results\x18\x03 \x03(\x0b\x32#.google.datastore.v1.MutationResult\x12\x15\n\rindex_updates\x18\x04 \x01(\x05\"P\n\x12\x41llocateIdsRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"=\n\x13\x41llocateIdsResponse\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"d\n\x11ReserveIdsRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x13\n\x0b\x64\x61tabase_id\x18\t \x01(\t\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\x14\n\x12ReserveIdsResponse\"\x87\x02\n\x08Mutation\x12-\n\x06insert\x18\x04 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12-\n\x06update\x18\x05 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12-\n\x06upsert\x18\x06 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12*\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x18.google.datastore.v1.KeyH\x00\x12\x16\n\x0c\x62\x61se_version\x18\x08 \x01(\x03H\x01\x42\x0b\n\toperationB\x1d\n\x1b\x63onflict_detection_strategy\"c\n\x0eMutationResult\x12%\n\x03key\x18\x03 \x01(\x0b\x32\x18.google.datastore.v1.Key\x12\x0f\n\x07version\x18\x04 \x01(\x03\x12\x19\n\x11\x63onflict_detected\x18\x05 \x01(\x08\"\xd5\x01\n\x0bReadOptions\x12L\n\x10read_consistency\x18\x01 \x01(\x0e\x32\x30.google.datastore.v1.ReadOptions.ReadConsistencyH\x00\x12\x15\n\x0btransaction\x18\x02 \x01(\x0cH\x00\"M\n\x0fReadConsistency\x12 \n\x1cREAD_CONSISTENCY_UNSPECIFIED\x10\x00\x12\n\n\x06STRONG\x10\x01\x12\x0c\n\x08\x45VENTUAL\x10\x02\x42\x12\n\x10\x63onsistency_type\"\xe3\x01\n\x12TransactionOptions\x12G\n\nread_write\x18\x01 \x01(\x0b\x32\x31.google.datastore.v1.TransactionOptions.ReadWriteH\x00\x12\x45\n\tread_only\x18\x02 \x01(\x0b\x32\x30.google.datastore.v1.TransactionOptions.ReadOnlyH\x00\x1a)\n\tReadWrite\x12\x1c\n\x14previous_transaction\x18\x01 \x01(\x0c\x1a\n\n\x08ReadOnlyB\x06\n\x04mode2\xec\x07\n\tDatastore\x12~\n\x06Lookup\x12\".google.datastore.v1.LookupRequest\x1a#.google.datastore.v1.LookupResponse\"+\x82\xd3\xe4\x93\x02%\" /v1/projects/{project_id}:lookup:\x01*\x12\x86\x01\n\x08RunQuery\x12$.google.datastore.v1.RunQueryRequest\x1a%.google.datastore.v1.RunQueryResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1/projects/{project_id}:runQuery:\x01*\x12\xa6\x01\n\x10\x42\x65ginTransaction\x12,.google.datastore.v1.BeginTransactionRequest\x1a-.google.datastore.v1.BeginTransactionResponse\"5\x82\xd3\xe4\x93\x02/\"*/v1/projects/{project_id}:beginTransaction:\x01*\x12~\n\x06\x43ommit\x12\".google.datastore.v1.CommitRequest\x1a#.google.datastore.v1.CommitResponse\"+\x82\xd3\xe4\x93\x02%\" /v1/projects/{project_id}:commit:\x01*\x12\x86\x01\n\x08Rollback\x12$.google.datastore.v1.RollbackRequest\x1a%.google.datastore.v1.RollbackResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1/projects/{project_id}:rollback:\x01*\x12\x92\x01\n\x0b\x41llocateIds\x12\'.google.datastore.v1.AllocateIdsRequest\x1a(.google.datastore.v1.AllocateIdsResponse\"0\x82\xd3\xe4\x93\x02*\"%/v1/projects/{project_id}:allocateIds:\x01*\x12\x8e\x01\n\nReserveIds\x12&.google.datastore.v1.ReserveIdsRequest\x1a\'.google.datastore.v1.ReserveIdsResponse\"/\x82\xd3\xe4\x93\x02)\"$/v1/projects/{project_id}:reserveIds:\x01*B\xa1\x01\n\x17\x63om.google.datastore.v1B\x0e\x44\x61tastoreProtoP\x01Z\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32#.google.datastore.v1.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type\"\xa9\x01\n\x0f\x43ompositeFilter\x12\x39\n\x02op\x18\x01 \x01(\x0e\x32-.google.datastore.v1.CompositeFilter.Operator\x12,\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x1b.google.datastore.v1.Filter\"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\"\xc7\x02\n\x0ePropertyFilter\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x38\n\x02op\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyFilter.Operator\x12)\n\x05value\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.Value\"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b\"\xa5\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12H\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x30.google.datastore.v1.GqlQuery.NamedBindingsEntry\x12\x43\n\x13positional_bindings\x18\x04 \x03(\x0b\x32&.google.datastore.v1.GqlQueryParameter\x1a\\\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.datastore.v1.GqlQueryParameter:\x02\x38\x01\"d\n\x11GqlQueryParameter\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type\"\xde\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12H\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32,.google.datastore.v1.EntityResult.ResultType\x12\x39\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12K\n\x0cmore_results\x18\x05 \x01(\x0e\x32\x35.google.datastore.v1.QueryResultBatch.MoreResultsType\x12\x18\n\x10snapshot_version\x18\x07 \x01(\x03\"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42\x9d\x01\n\x17\x63om.google.datastore.v1B\nQueryProtoP\x01Z\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32#.google.datastore.v1.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type"\xa9\x01\n\x0f\x43ompositeFilter\x12\x39\n\x02op\x18\x01 \x01(\x0e\x32-.google.datastore.v1.CompositeFilter.Operator\x12,\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x1b.google.datastore.v1.Filter"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01"\xc7\x02\n\x0ePropertyFilter\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x38\n\x02op\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyFilter.Operator\x12)\n\x05value\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.Value"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b"\xa5\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12H\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x30.google.datastore.v1.GqlQuery.NamedBindingsEntry\x12\x43\n\x13positional_bindings\x18\x04 \x03(\x0b\x32&.google.datastore.v1.GqlQueryParameter\x1a\\\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.datastore.v1.GqlQueryParameter:\x02\x38\x01"d\n\x11GqlQueryParameter\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type"\xde\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12H\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32,.google.datastore.v1.EntityResult.ResultType\x12\x39\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12K\n\x0cmore_results\x18\x05 \x01(\x0e\x32\x35.google.datastore.v1.QueryResultBatch.MoreResultsType\x12\x18\n\x10snapshot_version\x18\x07 \x01(\x03"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42\x9d\x01\n\x17\x63om.google.datastore.v1B\nQueryProtoP\x01Z= 0 if specified. """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.Query) - )) + # @@protoc_insertion_point(class_scope:google.datastore.v1.Query) + ), +) _sym_db.RegisterMessage(Query) -KindExpression = _reflection.GeneratedProtocolMessageType('KindExpression', (_message.Message,), dict( - DESCRIPTOR = _KINDEXPRESSION, - __module__ = 'google.cloud.datastore_v1.proto.query_pb2' - , - __doc__ = """A representation of a kind. +KindExpression = _reflection.GeneratedProtocolMessageType( + "KindExpression", + (_message.Message,), + dict( + DESCRIPTOR=_KINDEXPRESSION, + __module__="google.cloud.datastore_v1.proto.query_pb2", + __doc__="""A representation of a kind. Attributes: name: The name of the kind. """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.KindExpression) - )) + # @@protoc_insertion_point(class_scope:google.datastore.v1.KindExpression) + ), +) _sym_db.RegisterMessage(KindExpression) -PropertyReference = _reflection.GeneratedProtocolMessageType('PropertyReference', (_message.Message,), dict( - DESCRIPTOR = _PROPERTYREFERENCE, - __module__ = 'google.cloud.datastore_v1.proto.query_pb2' - , - __doc__ = """A reference to a property relative to the kind expressions. +PropertyReference = _reflection.GeneratedProtocolMessageType( + "PropertyReference", + (_message.Message,), + dict( + DESCRIPTOR=_PROPERTYREFERENCE, + __module__="google.cloud.datastore_v1.proto.query_pb2", + __doc__="""A reference to a property relative to the kind expressions. Attributes: @@ -936,30 +1359,36 @@ The name of the property. If name includes "."s, it may be interpreted as a property name path. """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyReference) - )) + # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyReference) + ), +) _sym_db.RegisterMessage(PropertyReference) -Projection = _reflection.GeneratedProtocolMessageType('Projection', (_message.Message,), dict( - DESCRIPTOR = _PROJECTION, - __module__ = 'google.cloud.datastore_v1.proto.query_pb2' - , - __doc__ = """A representation of a property in a projection. +Projection = _reflection.GeneratedProtocolMessageType( + "Projection", + (_message.Message,), + dict( + DESCRIPTOR=_PROJECTION, + __module__="google.cloud.datastore_v1.proto.query_pb2", + __doc__="""A representation of a property in a projection. Attributes: property: The property to project. """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.Projection) - )) + # @@protoc_insertion_point(class_scope:google.datastore.v1.Projection) + ), +) _sym_db.RegisterMessage(Projection) -PropertyOrder = _reflection.GeneratedProtocolMessageType('PropertyOrder', (_message.Message,), dict( - DESCRIPTOR = _PROPERTYORDER, - __module__ = 'google.cloud.datastore_v1.proto.query_pb2' - , - __doc__ = """The desired order for a specific property. +PropertyOrder = _reflection.GeneratedProtocolMessageType( + "PropertyOrder", + (_message.Message,), + dict( + DESCRIPTOR=_PROPERTYORDER, + __module__="google.cloud.datastore_v1.proto.query_pb2", + __doc__="""The desired order for a specific property. Attributes: @@ -968,15 +1397,18 @@ direction: The direction to order by. Defaults to ``ASCENDING``. """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyOrder) - )) + # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyOrder) + ), +) _sym_db.RegisterMessage(PropertyOrder) -Filter = _reflection.GeneratedProtocolMessageType('Filter', (_message.Message,), dict( - DESCRIPTOR = _FILTER, - __module__ = 'google.cloud.datastore_v1.proto.query_pb2' - , - __doc__ = """A holder for any type of filter. +Filter = _reflection.GeneratedProtocolMessageType( + "Filter", + (_message.Message,), + dict( + DESCRIPTOR=_FILTER, + __module__="google.cloud.datastore_v1.proto.query_pb2", + __doc__="""A holder for any type of filter. Attributes: @@ -987,15 +1419,18 @@ property_filter: A filter on a property. """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.Filter) - )) + # @@protoc_insertion_point(class_scope:google.datastore.v1.Filter) + ), +) _sym_db.RegisterMessage(Filter) -CompositeFilter = _reflection.GeneratedProtocolMessageType('CompositeFilter', (_message.Message,), dict( - DESCRIPTOR = _COMPOSITEFILTER, - __module__ = 'google.cloud.datastore_v1.proto.query_pb2' - , - __doc__ = """A filter that merges multiple other filters using the given operator. +CompositeFilter = _reflection.GeneratedProtocolMessageType( + "CompositeFilter", + (_message.Message,), + dict( + DESCRIPTOR=_COMPOSITEFILTER, + __module__="google.cloud.datastore_v1.proto.query_pb2", + __doc__="""A filter that merges multiple other filters using the given operator. Attributes: @@ -1005,15 +1440,18 @@ The list of filters to combine. Must contain at least one filter. """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.CompositeFilter) - )) + # @@protoc_insertion_point(class_scope:google.datastore.v1.CompositeFilter) + ), +) _sym_db.RegisterMessage(CompositeFilter) -PropertyFilter = _reflection.GeneratedProtocolMessageType('PropertyFilter', (_message.Message,), dict( - DESCRIPTOR = _PROPERTYFILTER, - __module__ = 'google.cloud.datastore_v1.proto.query_pb2' - , - __doc__ = """A filter on a specific property. +PropertyFilter = _reflection.GeneratedProtocolMessageType( + "PropertyFilter", + (_message.Message,), + dict( + DESCRIPTOR=_PROPERTYFILTER, + __module__="google.cloud.datastore_v1.proto.query_pb2", + __doc__="""A filter on a specific property. Attributes: @@ -1024,22 +1462,27 @@ value: The value to compare the property to. """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyFilter) - )) + # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyFilter) + ), +) _sym_db.RegisterMessage(PropertyFilter) -GqlQuery = _reflection.GeneratedProtocolMessageType('GqlQuery', (_message.Message,), dict( - - NamedBindingsEntry = _reflection.GeneratedProtocolMessageType('NamedBindingsEntry', (_message.Message,), dict( - DESCRIPTOR = _GQLQUERY_NAMEDBINDINGSENTRY, - __module__ = 'google.cloud.datastore_v1.proto.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery.NamedBindingsEntry) - )) - , - DESCRIPTOR = _GQLQUERY, - __module__ = 'google.cloud.datastore_v1.proto.query_pb2' - , - __doc__ = """A `GQL +GqlQuery = _reflection.GeneratedProtocolMessageType( + "GqlQuery", + (_message.Message,), + dict( + NamedBindingsEntry=_reflection.GeneratedProtocolMessageType( + "NamedBindingsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_GQLQUERY_NAMEDBINDINGSENTRY, + __module__="google.cloud.datastore_v1.proto.query_pb2" + # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery.NamedBindingsEntry) + ), + ), + DESCRIPTOR=_GQLQUERY, + __module__="google.cloud.datastore_v1.proto.query_pb2", + __doc__="""A `GQL query `__. @@ -1065,16 +1508,19 @@ ``query_string``, there must be an i-th numbered parameter. The inverse must also be true. """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery) - )) + # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery) + ), +) _sym_db.RegisterMessage(GqlQuery) _sym_db.RegisterMessage(GqlQuery.NamedBindingsEntry) -GqlQueryParameter = _reflection.GeneratedProtocolMessageType('GqlQueryParameter', (_message.Message,), dict( - DESCRIPTOR = _GQLQUERYPARAMETER, - __module__ = 'google.cloud.datastore_v1.proto.query_pb2' - , - __doc__ = """A binding parameter for a GQL query. +GqlQueryParameter = _reflection.GeneratedProtocolMessageType( + "GqlQueryParameter", + (_message.Message,), + dict( + DESCRIPTOR=_GQLQUERYPARAMETER, + __module__="google.cloud.datastore_v1.proto.query_pb2", + __doc__="""A binding parameter for a GQL query. Attributes: @@ -1086,15 +1532,18 @@ A query cursor. Query cursors are returned in query result batches. """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQueryParameter) - )) + # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQueryParameter) + ), +) _sym_db.RegisterMessage(GqlQueryParameter) -QueryResultBatch = _reflection.GeneratedProtocolMessageType('QueryResultBatch', (_message.Message,), dict( - DESCRIPTOR = _QUERYRESULTBATCH, - __module__ = 'google.cloud.datastore_v1.proto.query_pb2' - , - __doc__ = """A batch of results produced by a query. +QueryResultBatch = _reflection.GeneratedProtocolMessageType( + "QueryResultBatch", + (_message.Message,), + dict( + DESCRIPTOR=_QUERYRESULTBATCH, + __module__="google.cloud.datastore_v1.proto.query_pb2", + __doc__="""A batch of results produced by a query. Attributes: @@ -1123,13 +1572,21 @@ preceding batches. The value will be zero for eventually consistent queries. """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.QueryResultBatch) - )) + # @@protoc_insertion_point(class_scope:google.datastore.v1.QueryResultBatch) + ), +) _sym_db.RegisterMessage(QueryResultBatch) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.google.datastore.v1B\nQueryProtoP\001Z Date: Fri, 30 Nov 2018 19:43:20 -0800 Subject: [PATCH 202/611] Update noxfile. --- packages/google-cloud-datastore/noxfile.py | 28 ++++++++++------------ 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index a9efc0e344ce..bfac9f4c2bce 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -23,40 +23,36 @@ LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) @nox.session(python="3.7") -def blacken(session): - """Run black. +def lint(session): + """Run linters. - Format code to uniform standard. + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. """ - session.install("black") + session.install("flake8", "black", *LOCAL_DEPS) session.run( "black", + "--check", "google", "tests", "docs", - "--exclude", - ".*/proto/.*|.*/gapic/.*|.*/.*_pb2.py", ) + session.run("flake8", "google", "tests") -@nox.session(python="3.7") -def lint(session): - """Run linters. +@nox.session(python="3.6") +def blacken(session): + """Run black. - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. + Format code to uniform standard. """ - session.install("flake8", "black", *LOCAL_DEPS) + session.install("black") session.run( "black", - "--check", "google", "tests", "docs", - "--exclude", - ".*/proto/.*|.*/gapic/.*|.*/.*_pb2.py", ) - session.run("flake8", "google", "tests") @nox.session(python="3.7") From 2655c8613248ffb3e391d4ab02f533993610b978 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Mon, 3 Dec 2018 13:59:48 -0800 Subject: [PATCH 203/611] Use moved iam.policy now at google.api_core.iam.policy (#6741) * update references to iam to use api-core\ * Update dependency to api_core --- packages/google-cloud-datastore/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 7cf1a2491e6d..870117bd8a02 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -29,7 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ - 'google-api-core[grpc] >= 1.4.1, < 2.0.0dev', + 'google-api-core[grpc] >= 1.6.0, < 2.0.0dev', 'google-cloud-core >=0.28.0, <0.29dev', ] extras = { From e450fc6a505114e41ffa9a309af0f503643827aa Mon Sep 17 00:00:00 2001 From: Angela Li Date: Thu, 6 Dec 2018 11:04:53 -0800 Subject: [PATCH 204/611] Update version of google-cloud-core (#6858) --- packages/google-cloud-datastore/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 870117bd8a02..ce0685e70925 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -30,7 +30,7 @@ release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ 'google-api-core[grpc] >= 1.6.0, < 2.0.0dev', - 'google-cloud-core >=0.28.0, <0.29dev', + 'google-cloud-core >=0.29.0, <0.30dev', ] extras = { } From ecf7a0707aa052863c5bb46e01c52a5640a62881 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Mon, 10 Dec 2018 13:47:45 -0800 Subject: [PATCH 205/611] Release datastore 1.7.2 (#6889) * Release 1.7.2 --- packages/google-cloud-datastore/CHANGELOG.md | 24 ++++++++++++++++++++ packages/google-cloud-datastore/setup.py | 2 +- 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 57b311b9af96..c86041e76a71 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,30 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## 1.7.2 + +12-10-2018 12:37 PST + + +### Implementation Changes +- Fix client_info bug, update docstrings. ([#6409](https://github.com/googleapis/google-cloud-python/pull/6409)) +- Pick up fixes in GAPIC generator. ([#6494](https://github.com/googleapis/google-cloud-python/pull/6494)) +- Import `iam.policy` from `google.api_core`. ([#6741](https://github.com/googleapis/google-cloud-python/pull/6741)) +- Pick up enum fixes in the GAPIC generator. ([#6610](https://github.com/googleapis/google-cloud-python/pull/6610)) + +### Dependencies +- Bump minimum `api_core` version for all GAPIC libs to 1.4.1. ([#6391](https://github.com/googleapis/google-cloud-python/pull/6391)) +- Update version of google-cloud-core ([#6858](https://github.com/googleapis/google-cloud-python/pull/6858)) + +### Internal / Testing Changes +- Update noxfile. +- Add synth metadata. ([#6564](https://github.com/googleapis/google-cloud-python/pull/6564)) +- blacken all gen'd libs ([#6792](https://github.com/googleapis/google-cloud-python/pull/6792)) +- omit local deps ([#6701](https://github.com/googleapis/google-cloud-python/pull/6701)) +- Run black at end of synth.py ([#6698](https://github.com/googleapis/google-cloud-python/pull/6698)) +- Run Black on Generated libraries ([#6666](https://github.com/googleapis/google-cloud-python/pull/6666)) +- Add templates for flake8, coveragerc, noxfile, and black. ([#6642](https://github.com/googleapis/google-cloud-python/pull/6642)) + ## 1.7.1 10-29-2018 10:38 PDT diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index ce0685e70925..6dffb2115b48 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-datastore' description = 'Google Cloud Datastore API client library' -version = '1.7.1' +version = '1.7.2' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 43a338986e73bd62f5b23ab1b88b8f319cda1584 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Fri, 14 Dec 2018 12:25:37 -0800 Subject: [PATCH 206/611] Document Python 2 deprecation (#6910) --- packages/google-cloud-datastore/README.rst | 9 +++++++++ packages/google-cloud-datastore/setup.py | 2 ++ 2 files changed, 11 insertions(+) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index 575585f756e9..dbf58b858160 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -51,6 +51,15 @@ dependencies. .. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Python >= 3.4 + +Deprecated Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python == 2.7. Python 2.7 support will be removed on January 1, 2020. + + Mac/Linux ^^^^^^^^^ diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 6dffb2115b48..0cda00f825f4 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -76,6 +76,7 @@ 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', 'Operating System :: OS Independent', 'Topic :: Internet', ], @@ -84,6 +85,7 @@ namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, + python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*', include_package_data=True, zip_safe=False, ) From 002a362a0abc1f6a08fbce50f14b497ee77b83e0 Mon Sep 17 00:00:00 2001 From: Jim Morrison Date: Mon, 17 Dec 2018 09:09:39 -0800 Subject: [PATCH 207/611] Show use of 'batch.begin()' in docstring example. (#6932) --- packages/google-cloud-datastore/google/cloud/datastore/batch.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/batch.py b/packages/google-cloud-datastore/google/cloud/datastore/batch.py index eaa839f4e540..dc8463f3f973 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/batch.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/batch.py @@ -37,6 +37,7 @@ class Batch(object): >>> from google.cloud import datastore >>> client = datastore.Client() >>> batch = client.batch() + >>> batch.begin() >>> batch.put(entity1) >>> batch.put(entity2) >>> batch.delete(key3) From 51c4989f816721cb419c9bbd76ea46df8519ccd7 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 18 Dec 2018 12:54:20 -0800 Subject: [PATCH 208/611] Release 1.7.3 (#6945) --- packages/google-cloud-datastore/CHANGELOG.md | 9 +++++++++ packages/google-cloud-datastore/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index c86041e76a71..c7f1161078fb 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,15 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## 1.7.3 + +12-17-2018 16:45 PST + + +### Documentation +- Show use of 'batch.begin()' in docstring example. ([#6932](https://github.com/googleapis/google-cloud-python/pull/6932)) +- Document Python 2 deprecation ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910)) + ## 1.7.2 12-10-2018 12:37 PST diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 0cda00f825f4..eb51517ad5bc 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-datastore' description = 'Google Cloud Datastore API client library' -version = '1.7.2' +version = '1.7.3' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 17fbeba56bd4f8dee7f5f9d146c671de604d740f Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Fri, 4 Jan 2019 10:09:16 -0800 Subject: [PATCH 209/611] Pick up stub docstring fix in GAPIC generator. (#6968) --- .../google/cloud/datastore_v1/gapic/enums.py | 96 +++++++++---------- .../transports/datastore_grpc_transport.py | 14 +-- .../google-cloud-datastore/synth.metadata | 36 ++++--- 3 files changed, 79 insertions(+), 67 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py index 78de5345e377..c3943f028159 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py @@ -32,6 +32,38 @@ class NullValue(enum.IntEnum): NULL_VALUE = 0 +class CommitRequest(object): + class Mode(enum.IntEnum): + """ + The modes available for commits. + + Attributes: + MODE_UNSPECIFIED (int): Unspecified. This value must not be used. + TRANSACTIONAL (int): Transactional: The mutations are either all applied, or none are + applied. Learn about transactions + `here `__. + NON_TRANSACTIONAL (int): Non-transactional: The mutations may not apply as all or none. + """ + + MODE_UNSPECIFIED = 0 + TRANSACTIONAL = 1 + NON_TRANSACTIONAL = 2 + + +class CompositeFilter(object): + class Operator(enum.IntEnum): + """ + A composite filter operator. + + Attributes: + OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used. + AND (int): The results are required to satisfy each of the combined filters. + """ + + OPERATOR_UNSPECIFIED = 0 + AND = 1 + + class EntityResult(object): class ResultType(enum.IntEnum): """ @@ -55,36 +87,6 @@ class ResultType(enum.IntEnum): KEY_ONLY = 3 -class PropertyOrder(object): - class Direction(enum.IntEnum): - """ - The sort direction. - - Attributes: - DIRECTION_UNSPECIFIED (int): Unspecified. This value must not be used. - ASCENDING (int): Ascending. - DESCENDING (int): Descending. - """ - - DIRECTION_UNSPECIFIED = 0 - ASCENDING = 1 - DESCENDING = 2 - - -class CompositeFilter(object): - class Operator(enum.IntEnum): - """ - A composite filter operator. - - Attributes: - OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used. - AND (int): The results are required to satisfy each of the combined filters. - """ - - OPERATOR_UNSPECIFIED = 0 - AND = 1 - - class PropertyFilter(object): class Operator(enum.IntEnum): """ @@ -109,6 +111,22 @@ class Operator(enum.IntEnum): HAS_ANCESTOR = 11 +class PropertyOrder(object): + class Direction(enum.IntEnum): + """ + The sort direction. + + Attributes: + DIRECTION_UNSPECIFIED (int): Unspecified. This value must not be used. + ASCENDING (int): Ascending. + DESCENDING (int): Descending. + """ + + DIRECTION_UNSPECIFIED = 0 + ASCENDING = 1 + DESCENDING = 2 + + class QueryResultBatch(object): class MoreResultsType(enum.IntEnum): """ @@ -130,24 +148,6 @@ class MoreResultsType(enum.IntEnum): NO_MORE_RESULTS = 3 -class CommitRequest(object): - class Mode(enum.IntEnum): - """ - The modes available for commits. - - Attributes: - MODE_UNSPECIFIED (int): Unspecified. This value must not be used. - TRANSACTIONAL (int): Transactional: The mutations are either all applied, or none are - applied. Learn about transactions - `here `__. - NON_TRANSACTIONAL (int): Non-transactional: The mutations may not apply as all or none. - """ - - MODE_UNSPECIFIED = 0 - TRANSACTIONAL = 1 - NON_TRANSACTIONAL = 2 - - class ReadOptions(object): class ReadConsistency(enum.IntEnum): """ diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py index 1b5639d2dd44..a99455df1dc2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py @@ -98,7 +98,7 @@ def channel(self): @property def lookup(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`DatastoreClient.lookup`. Looks up entities by key. @@ -111,7 +111,7 @@ def lookup(self): @property def run_query(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`DatastoreClient.run_query`. Queries for entities. @@ -124,7 +124,7 @@ def run_query(self): @property def begin_transaction(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`DatastoreClient.begin_transaction`. Begins a new transaction. @@ -137,7 +137,7 @@ def begin_transaction(self): @property def commit(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`DatastoreClient.commit`. Commits a transaction, optionally creating, deleting or modifying some entities. @@ -151,7 +151,7 @@ def commit(self): @property def rollback(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`DatastoreClient.rollback`. Rolls back a transaction. @@ -164,7 +164,7 @@ def rollback(self): @property def allocate_ids(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`DatastoreClient.allocate_ids`. Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. @@ -178,7 +178,7 @@ def allocate_ids(self): @property def reserve_ids(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`DatastoreClient.reserve_ids`. Prevents the supplied keys' IDs from being auto-allocated by Cloud Datastore. diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index f26e69af8847..b4648f1b9500 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,26 +1,38 @@ { + "updateTime": "2018-12-18T13:15:37.071207Z", "sources": [ + { + "generator": { + "name": "artman", + "version": "0.16.3", + "dockerImage": "googleapis/artman@sha256:bfb92654b4a77368471f70e2808eaf4e60f263b9559f27bb3284097322787bf1" + } + }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "5a57f0c13a358b2b15452bf2d67453774a5f6d4f", - "internalRef": "221837528" + "sha": "c04bc0dc0a9164d924a9ab923fd6845b4ae6a7ab", + "internalRef": "225851467" } }, { - "git": { - "name": "googleapis-private", - "remote": "https://github.com/googleapis/googleapis-private.git", - "sha": "6aa8e1a447bb8d0367150356a28cb4d3f2332641", - "internalRef": "221340946" + "template": { + "name": "python_library", + "origin": "synthtool.gcp", + "version": "2018.12.6" } - }, + } + ], + "destinations": [ { - "generator": { - "name": "artman", - "version": "0.16.0", - "dockerImage": "googleapis/artman@sha256:90f9d15e9bad675aeecd586725bce48f5667ffe7d5fc4d1e96d51ff34304815b" + "client": { + "source": "googleapis", + "apiName": "datastore", + "apiVersion": "v1", + "language": "python", + "generator": "gapic", + "config": "google/datastore/artman_datastore.yaml" } } ] From a3ff590439a712d960ff8845b38ac8116df819c4 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Wed, 9 Jan 2019 11:06:19 -0800 Subject: [PATCH 210/611] Protoc-generated serialization update. (#7080) --- .../cloud/datastore_v1/proto/datastore_pb2.py | 203 ++++++++--------- .../cloud/datastore_v1/proto/entity_pb2.py | 81 ++++--- .../cloud/datastore_v1/proto/query_pb2.py | 209 ++++++++++-------- .../google-cloud-datastore/synth.metadata | 10 +- 4 files changed, 259 insertions(+), 244 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py index 340a0224ce83..1d807b47c81a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py @@ -8,7 +8,6 @@ from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) @@ -28,6 +27,9 @@ name="google/cloud/datastore_v1/proto/datastore.proto", package="google.datastore.v1", syntax="proto3", + serialized_options=_b( + "\n\027com.google.datastore.v1B\016DatastoreProtoP\001Z\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32#.google.datastore.v1.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type"\xa9\x01\n\x0f\x43ompositeFilter\x12\x39\n\x02op\x18\x01 \x01(\x0e\x32-.google.datastore.v1.CompositeFilter.Operator\x12,\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x1b.google.datastore.v1.Filter"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01"\xc7\x02\n\x0ePropertyFilter\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x38\n\x02op\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyFilter.Operator\x12)\n\x05value\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.Value"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b"\xa5\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12H\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x30.google.datastore.v1.GqlQuery.NamedBindingsEntry\x12\x43\n\x13positional_bindings\x18\x04 \x03(\x0b\x32&.google.datastore.v1.GqlQueryParameter\x1a\\\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.datastore.v1.GqlQueryParameter:\x02\x38\x01"d\n\x11GqlQueryParameter\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type"\xde\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12H\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32,.google.datastore.v1.EntityResult.ResultType\x12\x39\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12K\n\x0cmore_results\x18\x05 \x01(\x0e\x32\x35.google.datastore.v1.QueryResultBatch.MoreResultsType\x12\x18\n\x10snapshot_version\x18\x07 \x01(\x03"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42\x9d\x01\n\x17\x63om.google.datastore.v1B\nQueryProtoP\x01Z Date: Thu, 17 Jan 2019 15:31:09 -0800 Subject: [PATCH 211/611] Update copyright headers --- .../cloud/datastore_v1/gapic/datastore_client.py | 2 +- .../google/cloud/datastore_v1/gapic/enums.py | 2 +- .../gapic/transports/datastore_grpc_transport.py | 2 +- packages/google-cloud-datastore/synth.metadata | 12 ++++++------ 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py index cd1ba4582a63..c3d27b48777c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py index c3943f028159..8f8b7367b19c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py index a99455df1dc2..4563664ed6db 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index f4bbbbc9fd9e..c48ec8257292 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-01-09T13:17:07.364894Z", + "updateTime": "2019-01-17T13:16:51.374459Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.5", - "dockerImage": "googleapis/artman@sha256:5a96c2c5c6f9570cc9556b63dc9ce1838777fd9166b5b64e43ad8e0ecee2fe2c" + "version": "0.16.6", + "dockerImage": "googleapis/artman@sha256:12722f2ca3fbc3b53cc6aa5f0e569d7d221b46bd876a2136497089dec5e3634e" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "659d66ec24bf40b35a41a0b79218d96ba3add3d3", - "internalRef": "228437827" + "sha": "0ac60e21a1aa86c07c1836865b35308ba8178b05", + "internalRef": "229626798" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2018.12.6" + "version": "2019.1.16" } } ], From 648fa7cd4fafd1807f9168a6e0373baf59dfa644 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 29 Jan 2019 13:28:49 -0800 Subject: [PATCH 212/611] Add protos as an artifact to library (#7205) --- .../cloud/datastore_v1/proto/datastore.proto | 395 ++++++++++++++++++ .../datastore_v1/proto/datastore_admin.proto | 329 +++++++++++++++ .../cloud/datastore_v1/proto/entity.proto | 204 +++++++++ .../cloud/datastore_v1/proto/index.proto | 122 ++++++ .../cloud/datastore_v1/proto/query.proto | 310 ++++++++++++++ .../google-cloud-datastore/synth.metadata | 10 +- packages/google-cloud-datastore/synth.py | 1 + 7 files changed, 1366 insertions(+), 5 deletions(-) create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore.proto create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_admin.proto create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity.proto create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/index.proto create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query.proto diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore.proto b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore.proto new file mode 100644 index 000000000000..2a29a58cec72 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore.proto @@ -0,0 +1,395 @@ +// Copyright 2018 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.datastore.v1; + +import "google/api/annotations.proto"; +import "google/datastore/v1/entity.proto"; +import "google/datastore/v1/query.proto"; + +option csharp_namespace = "Google.Cloud.Datastore.V1"; +option go_package = "google.golang.org/genproto/googleapis/datastore/v1;datastore"; +option java_multiple_files = true; +option java_outer_classname = "DatastoreProto"; +option java_package = "com.google.datastore.v1"; +option php_namespace = "Google\\Cloud\\Datastore\\V1"; + + +// Each RPC normalizes the partition IDs of the keys in its input entities, +// and always returns entities with keys with normalized partition IDs. +// This applies to all keys and entities, including those in values, except keys +// with both an empty path and an empty or unset partition ID. Normalization of +// input keys sets the project ID (if not already set) to the project ID from +// the request. +// +service Datastore { + // Looks up entities by key. + rpc Lookup(LookupRequest) returns (LookupResponse) { + option (google.api.http) = { + post: "/v1/projects/{project_id}:lookup" + body: "*" + }; + } + + // Queries for entities. + rpc RunQuery(RunQueryRequest) returns (RunQueryResponse) { + option (google.api.http) = { + post: "/v1/projects/{project_id}:runQuery" + body: "*" + }; + } + + // Begins a new transaction. + rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) { + option (google.api.http) = { + post: "/v1/projects/{project_id}:beginTransaction" + body: "*" + }; + } + + // Commits a transaction, optionally creating, deleting or modifying some + // entities. + rpc Commit(CommitRequest) returns (CommitResponse) { + option (google.api.http) = { + post: "/v1/projects/{project_id}:commit" + body: "*" + }; + } + + // Rolls back a transaction. + rpc Rollback(RollbackRequest) returns (RollbackResponse) { + option (google.api.http) = { + post: "/v1/projects/{project_id}:rollback" + body: "*" + }; + } + + // Allocates IDs for the given keys, which is useful for referencing an entity + // before it is inserted. + rpc AllocateIds(AllocateIdsRequest) returns (AllocateIdsResponse) { + option (google.api.http) = { + post: "/v1/projects/{project_id}:allocateIds" + body: "*" + }; + } + + // Prevents the supplied keys' IDs from being auto-allocated by Cloud + // Datastore. + rpc ReserveIds(ReserveIdsRequest) returns (ReserveIdsResponse) { + option (google.api.http) = { + post: "/v1/projects/{project_id}:reserveIds" + body: "*" + }; + } +} + +// The request for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. +message LookupRequest { + // The ID of the project against which to make the request. + string project_id = 8; + + // The options for this lookup request. + ReadOptions read_options = 1; + + // Keys of entities to look up. + repeated Key keys = 3; +} + +// The response for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. +message LookupResponse { + // Entities found as `ResultType.FULL` entities. The order of results in this + // field is undefined and has no relation to the order of the keys in the + // input. + repeated EntityResult found = 1; + + // Entities not found as `ResultType.KEY_ONLY` entities. The order of results + // in this field is undefined and has no relation to the order of the keys + // in the input. + repeated EntityResult missing = 2; + + // A list of keys that were not looked up due to resource constraints. The + // order of results in this field is undefined and has no relation to the + // order of the keys in the input. + repeated Key deferred = 3; +} + +// The request for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. +message RunQueryRequest { + // The ID of the project against which to make the request. + string project_id = 8; + + // Entities are partitioned into subsets, identified by a partition ID. + // Queries are scoped to a single partition. + // This partition ID is normalized with the standard default context + // partition ID. + PartitionId partition_id = 2; + + // The options for this query. + ReadOptions read_options = 1; + + // The type of query. + oneof query_type { + // The query to run. + Query query = 3; + + // The GQL query to run. + GqlQuery gql_query = 7; + } +} + +// The response for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. +message RunQueryResponse { + // A batch of query results (always present). + QueryResultBatch batch = 1; + + // The parsed form of the `GqlQuery` from the request, if it was set. + Query query = 2; +} + +// The request for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. +message BeginTransactionRequest { + // The ID of the project against which to make the request. + string project_id = 8; + + // Options for a new transaction. + TransactionOptions transaction_options = 10; +} + +// The response for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. +message BeginTransactionResponse { + // The transaction identifier (always present). + bytes transaction = 1; +} + +// The request for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. +message RollbackRequest { + // The ID of the project against which to make the request. + string project_id = 8; + + // The transaction identifier, returned by a call to + // [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + bytes transaction = 1; +} + +// The response for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. +// (an empty message). +message RollbackResponse { + +} + +// The request for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. +message CommitRequest { + // The modes available for commits. + enum Mode { + // Unspecified. This value must not be used. + MODE_UNSPECIFIED = 0; + + // Transactional: The mutations are either all applied, or none are applied. + // Learn about transactions [here](https://cloud.google.com/datastore/docs/concepts/transactions). + TRANSACTIONAL = 1; + + // Non-transactional: The mutations may not apply as all or none. + NON_TRANSACTIONAL = 2; + } + + // The ID of the project against which to make the request. + string project_id = 8; + + // The type of commit to perform. Defaults to `TRANSACTIONAL`. + Mode mode = 5; + + // Must be set when mode is `TRANSACTIONAL`. + oneof transaction_selector { + // The identifier of the transaction associated with the commit. A + // transaction identifier is returned by a call to + // [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + bytes transaction = 1; + } + + // The mutations to perform. + // + // When mode is `TRANSACTIONAL`, mutations affecting a single entity are + // applied in order. The following sequences of mutations affecting a single + // entity are not permitted in a single `Commit` request: + // + // - `insert` followed by `insert` + // - `update` followed by `insert` + // - `upsert` followed by `insert` + // - `delete` followed by `update` + // + // When mode is `NON_TRANSACTIONAL`, no two mutations may affect a single + // entity. + repeated Mutation mutations = 6; +} + +// The response for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. +message CommitResponse { + // The result of performing the mutations. + // The i-th mutation result corresponds to the i-th mutation in the request. + repeated MutationResult mutation_results = 3; + + // The number of index entries updated during the commit, or zero if none were + // updated. + int32 index_updates = 4; +} + +// The request for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. +message AllocateIdsRequest { + // The ID of the project against which to make the request. + string project_id = 8; + + // A list of keys with incomplete key paths for which to allocate IDs. + // No key may be reserved/read-only. + repeated Key keys = 1; +} + +// The response for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. +message AllocateIdsResponse { + // The keys specified in the request (in the same order), each with + // its key path completed with a newly allocated ID. + repeated Key keys = 1; +} + +// The request for [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. +message ReserveIdsRequest { + // The ID of the project against which to make the request. + string project_id = 8; + + // If not empty, the ID of the database against which to make the request. + string database_id = 9; + + // A list of keys with complete key paths whose numeric IDs should not be + // auto-allocated. + repeated Key keys = 1; +} + +// The response for [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. +message ReserveIdsResponse { + +} + +// A mutation to apply to an entity. +message Mutation { + // The mutation operation. + // + // For `insert`, `update`, and `upsert`: + // - The entity's key must not be reserved/read-only. + // - No property in the entity may have a reserved name, + // not even a property in an entity in a value. + // - No value in the entity may have meaning 18, + // not even a value in an entity in another value. + oneof operation { + // The entity to insert. The entity must not already exist. + // The entity key's final path element may be incomplete. + Entity insert = 4; + + // The entity to update. The entity must already exist. + // Must have a complete key path. + Entity update = 5; + + // The entity to upsert. The entity may or may not already exist. + // The entity key's final path element may be incomplete. + Entity upsert = 6; + + // The key of the entity to delete. The entity may or may not already exist. + // Must have a complete key path and must not be reserved/read-only. + Key delete = 7; + } + + // When set, the server will detect whether or not this mutation conflicts + // with the current version of the entity on the server. Conflicting mutations + // are not applied, and are marked as such in MutationResult. + oneof conflict_detection_strategy { + // The version of the entity that this mutation is being applied to. If this + // does not match the current version on the server, the mutation conflicts. + int64 base_version = 8; + } +} + +// The result of applying a mutation. +message MutationResult { + // The automatically allocated key. + // Set only when the mutation allocated a key. + Key key = 3; + + // The version of the entity on the server after processing the mutation. If + // the mutation doesn't change anything on the server, then the version will + // be the version of the current entity or, if no entity is present, a version + // that is strictly greater than the version of any previous entity and less + // than the version of any possible future entity. + int64 version = 4; + + // Whether a conflict was detected for this mutation. Always false when a + // conflict detection strategy field is not set in the mutation. + bool conflict_detected = 5; +} + +// The options shared by read requests. +message ReadOptions { + // The possible values for read consistencies. + enum ReadConsistency { + // Unspecified. This value must not be used. + READ_CONSISTENCY_UNSPECIFIED = 0; + + // Strong consistency. + STRONG = 1; + + // Eventual consistency. + EVENTUAL = 2; + } + + // If not specified, lookups and ancestor queries default to + // `read_consistency`=`STRONG`, global queries default to + // `read_consistency`=`EVENTUAL`. + oneof consistency_type { + // The non-transactional read consistency to use. + // Cannot be set to `STRONG` for global queries. + ReadConsistency read_consistency = 1; + + // The identifier of the transaction in which to read. A + // transaction identifier is returned by a call to + // [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + bytes transaction = 2; + } +} + +// Options for beginning a new transaction. +// +// Transactions can be created explicitly with calls to +// [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction] or implicitly by setting +// [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] in read requests. +message TransactionOptions { + // Options specific to read / write transactions. + message ReadWrite { + // The transaction identifier of the transaction being retried. + bytes previous_transaction = 1; + } + + // Options specific to read-only transactions. + message ReadOnly { + + } + + // The `mode` of the transaction, indicating whether write operations are + // supported. + oneof mode { + // The transaction should allow both reads and writes. + ReadWrite read_write = 1; + + // The transaction should only allow reads. + ReadOnly read_only = 2; + } +} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_admin.proto b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_admin.proto new file mode 100644 index 000000000000..c730de79c0f3 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_admin.proto @@ -0,0 +1,329 @@ +// Copyright 2018 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.datastore.admin.v1beta1; + +import "google/api/annotations.proto"; +import "google/longrunning/operations.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.Datastore.Admin.V1Beta1"; +option go_package = "google.golang.org/genproto/googleapis/datastore/admin/v1beta1;admin"; +option java_multiple_files = true; +option java_outer_classname = "DatastoreAdminProto"; +option java_package = "com.google.datastore.admin.v1beta1"; + + +// Google Cloud Datastore Admin API +// +// The Datastore Admin API provides several admin services for Cloud Datastore. +// +// ----------------------------------------------------------------------------- +// ## Concepts +// +// Project, namespace, kind, and entity as defined in the Google Cloud Datastore +// API. +// +// Operation: An Operation represents work being performed in the background. +// +// EntityFilter: Allows specifying a subset of entities in a project. This is +// specified as a combination of kinds and namespaces (either or both of which +// may be all). +// +// ----------------------------------------------------------------------------- +// ## Services +// +// # Export/Import +// +// The Export/Import service provides the ability to copy all or a subset of +// entities to/from Google Cloud Storage. +// +// Exported data may be imported into Cloud Datastore for any Google Cloud +// Platform project. It is not restricted to the export source project. It is +// possible to export from one project and then import into another. +// +// Exported data can also be loaded into Google BigQuery for analysis. +// +// Exports and imports are performed asynchronously. An Operation resource is +// created for each export/import. The state (including any errors encountered) +// of the export/import may be queried via the Operation resource. +// +// # Operation +// +// The Operations collection provides a record of actions performed for the +// specified project (including any operations in progress). Operations are not +// created directly but through calls on other collections or resources. +// +// An operation that is not yet done may be cancelled. The request to cancel is +// asynchronous and the operation may continue to run for some time after the +// request to cancel is made. +// +// An operation that is done may be deleted so that it is no longer listed as +// part of the Operation collection. +// +// ListOperations returns all pending operations, but not completed operations. +// +// Operations are created by service DatastoreAdmin, +// but are accessed via service google.longrunning.Operations. +service DatastoreAdmin { + // Exports a copy of all or a subset of entities from Google Cloud Datastore + // to another storage system, such as Google Cloud Storage. Recent updates to + // entities may not be reflected in the export. The export occurs in the + // background and its progress can be monitored and managed via the + // Operation resource that is created. The output of an export may only be + // used once the associated operation is done. If an export operation is + // cancelled before completion it may leave partial data behind in Google + // Cloud Storage. + rpc ExportEntities(ExportEntitiesRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1beta1/projects/{project_id}:export" + body: "*" + }; + } + + // Imports entities into Google Cloud Datastore. Existing entities with the + // same key are overwritten. The import occurs in the background and its + // progress can be monitored and managed via the Operation resource that is + // created. If an ImportEntities operation is cancelled, it is possible + // that a subset of the data has already been imported to Cloud Datastore. + rpc ImportEntities(ImportEntitiesRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1beta1/projects/{project_id}:import" + body: "*" + }; + } +} + +// Metadata common to all Datastore Admin operations. +message CommonMetadata { + // The various possible states for an ongoing Operation. + enum State { + // Unspecified. + STATE_UNSPECIFIED = 0; + + // Request is being prepared for processing. + INITIALIZING = 1; + + // Request is actively being processed. + PROCESSING = 2; + + // Request is in the process of being cancelled after user called + // google.longrunning.Operations.CancelOperation on the operation. + CANCELLING = 3; + + // Request has been processed and is in its finalization stage. + FINALIZING = 4; + + // Request has completed successfully. + SUCCESSFUL = 5; + + // Request has finished being processed, but encountered an error. + FAILED = 6; + + // Request has finished being cancelled after user called + // google.longrunning.Operations.CancelOperation. + CANCELLED = 7; + } + + // The time that work began on the operation. + google.protobuf.Timestamp start_time = 1; + + // The time the operation ended, either successfully or otherwise. + google.protobuf.Timestamp end_time = 2; + + // The type of the operation. Can be used as a filter in + // ListOperationsRequest. + OperationType operation_type = 3; + + // The client-assigned labels which were provided when the operation was + // created. May also include additional labels. + map labels = 4; + + // The current state of the Operation. + State state = 5; +} + +// Measures the progress of a particular metric. +message Progress { + // The amount of work that has been completed. Note that this may be greater + // than work_estimated. + int64 work_completed = 1; + + // An estimate of how much work needs to be performed. May be zero if the + // work estimate is unavailable. + int64 work_estimated = 2; +} + +// The request for +// [google.datastore.admin.v1beta1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1beta1.DatastoreAdmin.ExportEntities]. +message ExportEntitiesRequest { + // Project ID against which to make the request. + string project_id = 1; + + // Client-assigned labels. + map labels = 2; + + // Description of what data from the project is included in the export. + EntityFilter entity_filter = 3; + + // Location for the export metadata and data files. + // + // The full resource URL of the external storage location. Currently, only + // Google Cloud Storage is supported. So output_url_prefix should be of the + // form: `gs://BUCKET_NAME[/NAMESPACE_PATH]`, where `BUCKET_NAME` is the + // name of the Cloud Storage bucket and `NAMESPACE_PATH` is an optional Cloud + // Storage namespace path (this is not a Cloud Datastore namespace). For more + // information about Cloud Storage namespace paths, see + // [Object name + // considerations](https://cloud.google.com/storage/docs/naming#object-considerations). + // + // The resulting files will be nested deeper than the specified URL prefix. + // The final output URL will be provided in the + // [google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url][google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url] + // field. That value should be used for subsequent ImportEntities operations. + // + // By nesting the data files deeper, the same Cloud Storage bucket can be used + // in multiple ExportEntities operations without conflict. + string output_url_prefix = 4; +} + +// The request for +// [google.datastore.admin.v1beta1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1beta1.DatastoreAdmin.ImportEntities]. +message ImportEntitiesRequest { + // Project ID against which to make the request. + string project_id = 1; + + // Client-assigned labels. + map labels = 2; + + // The full resource URL of the external storage location. Currently, only + // Google Cloud Storage is supported. So input_url should be of the form: + // `gs://BUCKET_NAME[/NAMESPACE_PATH]/OVERALL_EXPORT_METADATA_FILE`, where + // `BUCKET_NAME` is the name of the Cloud Storage bucket, `NAMESPACE_PATH` is + // an optional Cloud Storage namespace path (this is not a Cloud Datastore + // namespace), and `OVERALL_EXPORT_METADATA_FILE` is the metadata file written + // by the ExportEntities operation. For more information about Cloud Storage + // namespace paths, see + // [Object name + // considerations](https://cloud.google.com/storage/docs/naming#object-considerations). + // + // For more information, see + // [google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url][google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url]. + string input_url = 3; + + // Optionally specify which kinds/namespaces are to be imported. If provided, + // the list must be a subset of the EntityFilter used in creating the export, + // otherwise a FAILED_PRECONDITION error will be returned. If no filter is + // specified then all entities from the export are imported. + EntityFilter entity_filter = 4; +} + +// The response for +// [google.datastore.admin.v1beta1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1beta1.DatastoreAdmin.ExportEntities]. +message ExportEntitiesResponse { + // Location of the output metadata file. This can be used to begin an import + // into Cloud Datastore (this project or another project). See + // [google.datastore.admin.v1beta1.ImportEntitiesRequest.input_url][google.datastore.admin.v1beta1.ImportEntitiesRequest.input_url]. + // Only present if the operation completed successfully. + string output_url = 1; +} + +// Metadata for ExportEntities operations. +message ExportEntitiesMetadata { + // Metadata common to all Datastore Admin operations. + CommonMetadata common = 1; + + // An estimate of the number of entities processed. + Progress progress_entities = 2; + + // An estimate of the number of bytes processed. + Progress progress_bytes = 3; + + // Description of which entities are being exported. + EntityFilter entity_filter = 4; + + // Location for the export metadata and data files. This will be the same + // value as the + // [google.datastore.admin.v1beta1.ExportEntitiesRequest.output_url_prefix][google.datastore.admin.v1beta1.ExportEntitiesRequest.output_url_prefix] + // field. The final output location is provided in + // [google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url][google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url]. + string output_url_prefix = 5; +} + +// Metadata for ImportEntities operations. +message ImportEntitiesMetadata { + // Metadata common to all Datastore Admin operations. + CommonMetadata common = 1; + + // An estimate of the number of entities processed. + Progress progress_entities = 2; + + // An estimate of the number of bytes processed. + Progress progress_bytes = 3; + + // Description of which entities are being imported. + EntityFilter entity_filter = 4; + + // The location of the import metadata file. This will be the same value as + // the [google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url][google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url] + // field. + string input_url = 5; +} + +// Identifies a subset of entities in a project. This is specified as +// combinations of kinds and namespaces (either or both of which may be all, as +// described in the following examples). +// Example usage: +// +// Entire project: +// kinds=[], namespace_ids=[] +// +// Kinds Foo and Bar in all namespaces: +// kinds=['Foo', 'Bar'], namespace_ids=[] +// +// Kinds Foo and Bar only in the default namespace: +// kinds=['Foo', 'Bar'], namespace_ids=[''] +// +// Kinds Foo and Bar in both the default and Baz namespaces: +// kinds=['Foo', 'Bar'], namespace_ids=['', 'Baz'] +// +// The entire Baz namespace: +// kinds=[], namespace_ids=['Baz'] +message EntityFilter { + // If empty, then this represents all kinds. + repeated string kinds = 1; + + // An empty list represents all namespaces. This is the preferred + // usage for projects that don't use namespaces. + // + // An empty string element represents the default namespace. This should be + // used if the project has data in non-default namespaces, but doesn't want to + // include them. + // Each namespace in this list must be unique. + repeated string namespace_ids = 2; +} + +// Operation types. +enum OperationType { + // Unspecified. + OPERATION_TYPE_UNSPECIFIED = 0; + + // ExportEntities. + EXPORT_ENTITIES = 1; + + // ImportEntities. + IMPORT_ENTITIES = 2; +} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity.proto b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity.proto new file mode 100644 index 000000000000..bab7953d62e2 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity.proto @@ -0,0 +1,204 @@ +// Copyright 2018 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.datastore.v1; + +import "google/api/annotations.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/timestamp.proto"; +import "google/type/latlng.proto"; + +option csharp_namespace = "Google.Cloud.Datastore.V1"; +option go_package = "google.golang.org/genproto/googleapis/datastore/v1;datastore"; +option java_multiple_files = true; +option java_outer_classname = "EntityProto"; +option java_package = "com.google.datastore.v1"; +option php_namespace = "Google\\Cloud\\Datastore\\V1"; + + +// A partition ID identifies a grouping of entities. The grouping is always +// by project and namespace, however the namespace ID may be empty. +// +// A partition ID contains several dimensions: +// project ID and namespace ID. +// +// Partition dimensions: +// +// - May be `""`. +// - Must be valid UTF-8 bytes. +// - Must have values that match regex `[A-Za-z\d\.\-_]{1,100}` +// If the value of any dimension matches regex `__.*__`, the partition is +// reserved/read-only. +// A reserved/read-only partition ID is forbidden in certain documented +// contexts. +// +// Foreign partition IDs (in which the project ID does +// not match the context project ID ) are discouraged. +// Reads and writes of foreign partition IDs may fail if the project is not in an active state. +message PartitionId { + // The ID of the project to which the entities belong. + string project_id = 2; + + // If not empty, the ID of the namespace to which the entities belong. + string namespace_id = 4; +} + +// A unique identifier for an entity. +// If a key's partition ID or any of its path kinds or names are +// reserved/read-only, the key is reserved/read-only. +// A reserved/read-only key is forbidden in certain documented contexts. +message Key { + // A (kind, ID/name) pair used to construct a key path. + // + // If either name or ID is set, the element is complete. + // If neither is set, the element is incomplete. + message PathElement { + // The kind of the entity. + // A kind matching regex `__.*__` is reserved/read-only. + // A kind must not contain more than 1500 bytes when UTF-8 encoded. + // Cannot be `""`. + string kind = 1; + + // The type of ID. + oneof id_type { + // The auto-allocated ID of the entity. + // Never equal to zero. Values less than zero are discouraged and may not + // be supported in the future. + int64 id = 2; + + // The name of the entity. + // A name matching regex `__.*__` is reserved/read-only. + // A name must not be more than 1500 bytes when UTF-8 encoded. + // Cannot be `""`. + string name = 3; + } + } + + // Entities are partitioned into subsets, currently identified by a project + // ID and namespace ID. + // Queries are scoped to a single partition. + PartitionId partition_id = 1; + + // The entity path. + // An entity path consists of one or more elements composed of a kind and a + // string or numerical identifier, which identify entities. The first + // element identifies a _root entity_, the second element identifies + // a _child_ of the root entity, the third element identifies a child of the + // second entity, and so forth. The entities identified by all prefixes of + // the path are called the element's _ancestors_. + // + // An entity path is always fully complete: *all* of the entity's ancestors + // are required to be in the path along with the entity identifier itself. + // The only exception is that in some documented cases, the identifier in the + // last path element (for the entity) itself may be omitted. For example, + // the last path element of the key of `Mutation.insert` may have no + // identifier. + // + // A path can never be empty, and a path can have at most 100 elements. + repeated PathElement path = 2; +} + +// An array value. +message ArrayValue { + // Values in the array. + // The order of this array may not be preserved if it contains a mix of + // indexed and unindexed values. + repeated Value values = 1; +} + +// A message that can hold any of the supported value types and associated +// metadata. +message Value { + // Must have a value set. + oneof value_type { + // A null value. + google.protobuf.NullValue null_value = 11; + + // A boolean value. + bool boolean_value = 1; + + // An integer value. + int64 integer_value = 2; + + // A double value. + double double_value = 3; + + // A timestamp value. + // When stored in the Datastore, precise only to microseconds; + // any additional precision is rounded down. + google.protobuf.Timestamp timestamp_value = 10; + + // A key value. + Key key_value = 5; + + // A UTF-8 encoded string value. + // When `exclude_from_indexes` is false (it is indexed) , may have at most 1500 bytes. + // Otherwise, may be set to at least 1,000,000 bytes. + string string_value = 17; + + // A blob value. + // May have at most 1,000,000 bytes. + // When `exclude_from_indexes` is false, may have at most 1500 bytes. + // In JSON requests, must be base64-encoded. + bytes blob_value = 18; + + // A geo point value representing a point on the surface of Earth. + google.type.LatLng geo_point_value = 8; + + // An entity value. + // + // - May have no key. + // - May have a key with an incomplete key path. + // - May have a reserved/read-only key. + Entity entity_value = 6; + + // An array value. + // Cannot contain another array value. + // A `Value` instance that sets field `array_value` must not set fields + // `meaning` or `exclude_from_indexes`. + ArrayValue array_value = 9; + } + + // The `meaning` field should only be populated for backwards compatibility. + int32 meaning = 14; + + // If the value should be excluded from all indexes including those defined + // explicitly. + bool exclude_from_indexes = 19; +} + +// A Datastore data object. +// +// An entity is limited to 1 megabyte when stored. That _roughly_ +// corresponds to a limit of 1 megabyte for the serialized form of this +// message. +message Entity { + // The entity's key. + // + // An entity must have a key, unless otherwise documented (for example, + // an entity in `Value.entity_value` may have no key). + // An entity's kind is its key path's last element's kind, + // or null if it has no key. + Key key = 1; + + // The entity's properties. + // The map's keys are property names. + // A property name matching regex `__.*__` is reserved. + // A reserved property name is forbidden in certain documented contexts. + // The name must not contain more than 500 characters. + // The name cannot be `""`. + map properties = 3; +} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/index.proto b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/index.proto new file mode 100644 index 000000000000..15013d557ea2 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/index.proto @@ -0,0 +1,122 @@ +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.datastore.admin.v1; + +import "google/api/annotations.proto"; + +option csharp_namespace = "Google.Cloud.Datastore.Admin.V1"; +option go_package = "google.golang.org/genproto/googleapis/datastore/admin/v1;admin"; +option java_multiple_files = true; +option java_outer_classname = "IndexProto"; +option java_package = "com.google.datastore.admin.v1"; + +// A minimal index definition. +// Next tag: 8 +message Index { + // Next tag: 3 + message IndexedProperty { + // The property name to index. + // Required. + string name = 1; + + // The indexed property's direction. Must not be DIRECTION_UNSPECIFIED. + // Required. + Direction direction = 2; + } + + // For an ordered index, specifies whether each of the entity's ancestors + // will be included. + enum AncestorMode { + // The ancestor mode is unspecified. + ANCESTOR_MODE_UNSPECIFIED = 0; + + // Do not include the entity's ancestors in the index. + NONE = 1; + + // Include all the entity's ancestors in the index. + ALL_ANCESTORS = 2; + } + + // The direction determines how a property is indexed. + enum Direction { + // The direction is unspecified. + DIRECTION_UNSPECIFIED = 0; + + // The property's values are indexed so as to support sequencing in + // ascending order and also query by <, >, <=, >=, and =. + ASCENDING = 1; + + // The property's values are indexed so as to support sequencing in + // descending order and also query by <, >, <=, >=, and =. + DESCENDING = 2; + } + + // The possible set of states of an index. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The index is being created, and cannot be used by queries. + // There is an active long-running operation for the index. + // The index is updated when writing an entity. + // Some index data may exist. + CREATING = 1; + + // The index is ready to be used. + // The index is updated when writing an entity. + // The index is fully populated from all stored entities it applies to. + READY = 2; + + // The index is being deleted, and cannot be used by queries. + // There is an active long-running operation for the index. + // The index is not updated when writing an entity. + // Some index data may exist. + DELETING = 3; + + // The index was being created or deleted, but something went wrong. + // The index cannot by used by queries. + // There is no active long-running operation for the index, + // and the most recently finished long-running operation failed. + // The index is not updated when writing an entity. + // Some index data may exist. + ERROR = 4; + } + + // Project ID. + // Output only. + string project_id = 1; + + // The resource ID of the index. + // Output only. + string index_id = 3; + + // The entity kind to which this index applies. + // Required. + string kind = 4; + + // The index's ancestor mode. Must not be ANCESTOR_MODE_UNSPECIFIED. + // Required. + AncestorMode ancestor = 5; + + // An ordered sequence of property names and their index attributes. + // Required. + repeated IndexedProperty properties = 6; + + // The state of the index. + // Output only. + State state = 7; +} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query.proto b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query.proto new file mode 100644 index 000000000000..2efed915a74e --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query.proto @@ -0,0 +1,310 @@ +// Copyright 2018 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.datastore.v1; + +import "google/api/annotations.proto"; +import "google/datastore/v1/entity.proto"; +import "google/protobuf/wrappers.proto"; +import "google/type/latlng.proto"; + +option csharp_namespace = "Google.Cloud.Datastore.V1"; +option go_package = "google.golang.org/genproto/googleapis/datastore/v1;datastore"; +option java_multiple_files = true; +option java_outer_classname = "QueryProto"; +option java_package = "com.google.datastore.v1"; +option php_namespace = "Google\\Cloud\\Datastore\\V1"; + + +// The result of fetching an entity from Datastore. +message EntityResult { + // Specifies what data the 'entity' field contains. + // A `ResultType` is either implied (for example, in `LookupResponse.missing` + // from `datastore.proto`, it is always `KEY_ONLY`) or specified by context + // (for example, in message `QueryResultBatch`, field `entity_result_type` + // specifies a `ResultType` for all the values in field `entity_results`). + enum ResultType { + // Unspecified. This value is never used. + RESULT_TYPE_UNSPECIFIED = 0; + + // The key and properties. + FULL = 1; + + // A projected subset of properties. The entity may have no key. + PROJECTION = 2; + + // Only the key. + KEY_ONLY = 3; + } + + // The resulting entity. + Entity entity = 1; + + // The version of the entity, a strictly positive number that monotonically + // increases with changes to the entity. + // + // This field is set for [`FULL`][google.datastore.v1.EntityResult.ResultType.FULL] entity + // results. + // + // For [missing][google.datastore.v1.LookupResponse.missing] entities in `LookupResponse`, this + // is the version of the snapshot that was used to look up the entity, and it + // is always set except for eventually consistent reads. + int64 version = 4; + + // A cursor that points to the position after the result entity. + // Set only when the `EntityResult` is part of a `QueryResultBatch` message. + bytes cursor = 3; +} + +// A query for entities. +message Query { + // The projection to return. Defaults to returning all properties. + repeated Projection projection = 2; + + // The kinds to query (if empty, returns entities of all kinds). + // Currently at most 1 kind may be specified. + repeated KindExpression kind = 3; + + // The filter to apply. + Filter filter = 4; + + // The order to apply to the query results (if empty, order is unspecified). + repeated PropertyOrder order = 5; + + // The properties to make distinct. The query results will contain the first + // result for each distinct combination of values for the given properties + // (if empty, all results are returned). + repeated PropertyReference distinct_on = 6; + + // A starting point for the query results. Query cursors are + // returned in query result batches and + // [can only be used to continue the same query](https://cloud.google.com/datastore/docs/concepts/queries#cursors_limits_and_offsets). + bytes start_cursor = 7; + + // An ending point for the query results. Query cursors are + // returned in query result batches and + // [can only be used to limit the same query](https://cloud.google.com/datastore/docs/concepts/queries#cursors_limits_and_offsets). + bytes end_cursor = 8; + + // The number of results to skip. Applies before limit, but after all other + // constraints. Optional. Must be >= 0 if specified. + int32 offset = 10; + + // The maximum number of results to return. Applies after all other + // constraints. Optional. + // Unspecified is interpreted as no limit. + // Must be >= 0 if specified. + google.protobuf.Int32Value limit = 12; +} + +// A representation of a kind. +message KindExpression { + // The name of the kind. + string name = 1; +} + +// A reference to a property relative to the kind expressions. +message PropertyReference { + // The name of the property. + // If name includes "."s, it may be interpreted as a property name path. + string name = 2; +} + +// A representation of a property in a projection. +message Projection { + // The property to project. + PropertyReference property = 1; +} + +// The desired order for a specific property. +message PropertyOrder { + // The sort direction. + enum Direction { + // Unspecified. This value must not be used. + DIRECTION_UNSPECIFIED = 0; + + // Ascending. + ASCENDING = 1; + + // Descending. + DESCENDING = 2; + } + + // The property to order by. + PropertyReference property = 1; + + // The direction to order by. Defaults to `ASCENDING`. + Direction direction = 2; +} + +// A holder for any type of filter. +message Filter { + // The type of filter. + oneof filter_type { + // A composite filter. + CompositeFilter composite_filter = 1; + + // A filter on a property. + PropertyFilter property_filter = 2; + } +} + +// A filter that merges multiple other filters using the given operator. +message CompositeFilter { + // A composite filter operator. + enum Operator { + // Unspecified. This value must not be used. + OPERATOR_UNSPECIFIED = 0; + + // The results are required to satisfy each of the combined filters. + AND = 1; + } + + // The operator for combining multiple filters. + Operator op = 1; + + // The list of filters to combine. + // Must contain at least one filter. + repeated Filter filters = 2; +} + +// A filter on a specific property. +message PropertyFilter { + // A property filter operator. + enum Operator { + // Unspecified. This value must not be used. + OPERATOR_UNSPECIFIED = 0; + + // Less than. + LESS_THAN = 1; + + // Less than or equal. + LESS_THAN_OR_EQUAL = 2; + + // Greater than. + GREATER_THAN = 3; + + // Greater than or equal. + GREATER_THAN_OR_EQUAL = 4; + + // Equal. + EQUAL = 5; + + // Has ancestor. + HAS_ANCESTOR = 11; + } + + // The property to filter by. + PropertyReference property = 1; + + // The operator to filter by. + Operator op = 2; + + // The value to compare the property to. + Value value = 3; +} + +// A [GQL query](https://cloud.google.com/datastore/docs/apis/gql/gql_reference). +message GqlQuery { + // A string of the format described + // [here](https://cloud.google.com/datastore/docs/apis/gql/gql_reference). + string query_string = 1; + + // When false, the query string must not contain any literals and instead must + // bind all values. For example, + // `SELECT * FROM Kind WHERE a = 'string literal'` is not allowed, while + // `SELECT * FROM Kind WHERE a = @value` is. + bool allow_literals = 2; + + // For each non-reserved named binding site in the query string, there must be + // a named parameter with that name, but not necessarily the inverse. + // + // Key must match regex `[A-Za-z_$][A-Za-z_$0-9]*`, must not match regex + // `__.*__`, and must not be `""`. + map named_bindings = 5; + + // Numbered binding site @1 references the first numbered parameter, + // effectively using 1-based indexing, rather than the usual 0. + // + // For each binding site numbered i in `query_string`, there must be an i-th + // numbered parameter. The inverse must also be true. + repeated GqlQueryParameter positional_bindings = 4; +} + +// A binding parameter for a GQL query. +message GqlQueryParameter { + // The type of parameter. + oneof parameter_type { + // A value parameter. + Value value = 2; + + // A query cursor. Query cursors are returned in query + // result batches. + bytes cursor = 3; + } +} + +// A batch of results produced by a query. +message QueryResultBatch { + // The possible values for the `more_results` field. + enum MoreResultsType { + // Unspecified. This value is never used. + MORE_RESULTS_TYPE_UNSPECIFIED = 0; + + // There may be additional batches to fetch from this query. + NOT_FINISHED = 1; + + // The query is finished, but there may be more results after the limit. + MORE_RESULTS_AFTER_LIMIT = 2; + + // The query is finished, but there may be more results after the end + // cursor. + MORE_RESULTS_AFTER_CURSOR = 4; + + // The query is finished, and there are no more results. + NO_MORE_RESULTS = 3; + } + + // The number of results skipped, typically because of an offset. + int32 skipped_results = 6; + + // A cursor that points to the position after the last skipped result. + // Will be set when `skipped_results` != 0. + bytes skipped_cursor = 3; + + // The result type for every entity in `entity_results`. + EntityResult.ResultType entity_result_type = 1; + + // The results for this batch. + repeated EntityResult entity_results = 2; + + // A cursor that points to the position after the last result in the batch. + bytes end_cursor = 4; + + // The state of the query after the current batch. + MoreResultsType more_results = 5; + + // The version number of the snapshot this batch was returned from. + // This applies to the range of results from the query's `start_cursor` (or + // the beginning of the query if no cursor was given) to this batch's + // `end_cursor` (not the query's `end_cursor`). + // + // In a single transaction, subsequent query result batches for the same query + // can have a greater snapshot version number. Each batch's snapshot version + // is valid for all preceding batches. + // The value will be zero for eventually consistent queries. + int64 snapshot_version = 7; +} diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index c48ec8257292..4501f8f0fdb6 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-01-17T13:16:51.374459Z", + "updateTime": "2019-01-23T22:54:56.744429Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.6", - "dockerImage": "googleapis/artman@sha256:12722f2ca3fbc3b53cc6aa5f0e569d7d221b46bd876a2136497089dec5e3634e" + "version": "0.16.7", + "dockerImage": "googleapis/artman@sha256:d6c8ced606eb49973ca95d2af7c55a681acc042db0f87d135968349e7bf6dd80" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "0ac60e21a1aa86c07c1836865b35308ba8178b05", - "internalRef": "229626798" + "sha": "9aac88a22468b1e291937f55fa1ef237adfdc63e", + "internalRef": "230568136" } }, { diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index cd9c11e2bc3d..0d477f5f179d 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -27,6 +27,7 @@ "v1", config_path="/google/datastore/artman_datastore.yaml", artman_output_name="datastore-v1", + include_protos=True, ) s.move(library / "google/cloud/datastore_v1/proto") From 64c53d50c8d43efdb904ec694c3b50db93ae85f8 Mon Sep 17 00:00:00 2001 From: Pravin Dahal Date: Mon, 11 Feb 2019 19:12:32 +0100 Subject: [PATCH 213/611] Updated client library documentation URLs. (#7307) Previously, the URLs would redirect using JavaScript, which would either be slow or not work at all (in case JavaScript is disabled on the browser) --- packages/google-cloud-datastore/README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index dbf58b858160..d5db80341043 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -20,7 +20,7 @@ all other queries. :target: https://pypi.org/project/google-cloud-datastore/ .. _Google Cloud Datastore API: https://cloud.google.com/datastore/docs .. _Product Documentation: https://cloud.google.com/datastore/docs -.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/datastore/index.html +.. _Client Library Documentation: https://googleapis.github.io/google-cloud-python/latest/datastore/index.html Quick Start ----------- @@ -35,7 +35,7 @@ In order to use this library, you first need to go through the following steps: .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Google Cloud Datastore API.: https://cloud.google.com/datastore -.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/latest/core/auth.html +.. _Setup Authentication.: https://googleapis.github.io/google-cloud-python/latest/core/auth.html Installation ~~~~~~~~~~~~ From 119c2bd68033afed822c19e4e0a602cbb20215b3 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Wed, 20 Feb 2019 10:51:36 -0800 Subject: [PATCH 214/611] Add clarifying comment to blacken nox target. (#7389) --- packages/google-cloud-datastore/noxfile.py | 4 ++++ packages/google-cloud-datastore/synth.metadata | 10 +++++----- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index bfac9f4c2bce..d692cf37f39c 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -45,6 +45,10 @@ def blacken(session): """Run black. Format code to uniform standard. + + This currently uses Python 3.6 due to the automated Kokoro run of synthtool. + That run uses an image that doesn't have 3.6 installed. Before updating this + check the state of the `gcp_ubuntu_config` we use for that Kokoro run. """ session.install("black") session.run( diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 4501f8f0fdb6..637b8a545866 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-01-23T22:54:56.744429Z", + "updateTime": "2019-02-20T18:02:37.048112Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.7", - "dockerImage": "googleapis/artman@sha256:d6c8ced606eb49973ca95d2af7c55a681acc042db0f87d135968349e7bf6dd80" + "version": "0.16.13", + "dockerImage": "googleapis/artman@sha256:5fd9aee1d82a00cebf425c8fa431f5457539562f5867ad9c54370f0ec9a7ccaa" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "9aac88a22468b1e291937f55fa1ef237adfdc63e", - "internalRef": "230568136" + "sha": "18ab81eec27942a942622d5a8d9c9e7a202e8c16", + "internalRef": "234814197" } }, { From 297e812c653e7b6ed15a403f1a4da5881925b929 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Tue, 26 Feb 2019 09:04:37 -0800 Subject: [PATCH 215/611] Copy lintified proto files (via synth). (#7446) --- .../cloud/datastore_v1/proto/datastore.proto | 51 ++++++++++--------- .../cloud/datastore_v1/proto/entity.proto | 8 +-- .../cloud/datastore_v1/proto/query.proto | 20 ++++---- .../google-cloud-datastore/synth.metadata | 10 ++-- 4 files changed, 48 insertions(+), 41 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore.proto b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore.proto index 2a29a58cec72..51d69acb9db8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore.proto +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore.proto @@ -27,7 +27,6 @@ option java_outer_classname = "DatastoreProto"; option java_package = "com.google.datastore.v1"; option php_namespace = "Google\\Cloud\\Datastore\\V1"; - // Each RPC normalizes the partition IDs of the keys in its input entities, // and always returns entities with keys with normalized partition IDs. // This applies to all keys and entities, including those in values, except keys @@ -53,7 +52,8 @@ service Datastore { } // Begins a new transaction. - rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) { + rpc BeginTransaction(BeginTransactionRequest) + returns (BeginTransactionResponse) { option (google.api.http) = { post: "/v1/projects/{project_id}:beginTransaction" body: "*" @@ -150,7 +150,8 @@ message RunQueryRequest { } } -// The response for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. +// The response for +// [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. message RunQueryResponse { // A batch of query results (always present). QueryResultBatch batch = 1; @@ -159,7 +160,8 @@ message RunQueryResponse { Query query = 2; } -// The request for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. +// The request for +// [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. message BeginTransactionRequest { // The ID of the project against which to make the request. string project_id = 8; @@ -168,7 +170,8 @@ message BeginTransactionRequest { TransactionOptions transaction_options = 10; } -// The response for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. +// The response for +// [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. message BeginTransactionResponse { // The transaction identifier (always present). bytes transaction = 1; @@ -184,11 +187,10 @@ message RollbackRequest { bytes transaction = 1; } -// The response for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. -// (an empty message). -message RollbackResponse { - -} +// The response for +// [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. (an empty +// message). +message RollbackResponse {} // The request for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. message CommitRequest { @@ -198,7 +200,8 @@ message CommitRequest { MODE_UNSPECIFIED = 0; // Transactional: The mutations are either all applied, or none are applied. - // Learn about transactions [here](https://cloud.google.com/datastore/docs/concepts/transactions). + // Learn about transactions + // [here](https://cloud.google.com/datastore/docs/concepts/transactions). TRANSACTIONAL = 1; // Non-transactional: The mutations may not apply as all or none. @@ -246,7 +249,8 @@ message CommitResponse { int32 index_updates = 4; } -// The request for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. +// The request for +// [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. message AllocateIdsRequest { // The ID of the project against which to make the request. string project_id = 8; @@ -256,14 +260,16 @@ message AllocateIdsRequest { repeated Key keys = 1; } -// The response for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. +// The response for +// [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. message AllocateIdsResponse { // The keys specified in the request (in the same order), each with // its key path completed with a newly allocated ID. repeated Key keys = 1; } -// The request for [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. +// The request for +// [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. message ReserveIdsRequest { // The ID of the project against which to make the request. string project_id = 8; @@ -276,10 +282,9 @@ message ReserveIdsRequest { repeated Key keys = 1; } -// The response for [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. -message ReserveIdsResponse { - -} +// The response for +// [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. +message ReserveIdsResponse {} // A mutation to apply to an entity. message Mutation { @@ -369,8 +374,10 @@ message ReadOptions { // Options for beginning a new transaction. // // Transactions can be created explicitly with calls to -// [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction] or implicitly by setting -// [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] in read requests. +// [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction] +// or implicitly by setting +// [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] +// in read requests. message TransactionOptions { // Options specific to read / write transactions. message ReadWrite { @@ -379,9 +386,7 @@ message TransactionOptions { } // Options specific to read-only transactions. - message ReadOnly { - - } + message ReadOnly {} // The `mode` of the transaction, indicating whether write operations are // supported. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity.proto b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity.proto index bab7953d62e2..9decd2b53802 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity.proto +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity.proto @@ -28,7 +28,6 @@ option java_outer_classname = "EntityProto"; option java_package = "com.google.datastore.v1"; option php_namespace = "Google\\Cloud\\Datastore\\V1"; - // A partition ID identifies a grouping of entities. The grouping is always // by project and namespace, however the namespace ID may be empty. // @@ -47,7 +46,8 @@ option php_namespace = "Google\\Cloud\\Datastore\\V1"; // // Foreign partition IDs (in which the project ID does // not match the context project ID ) are discouraged. -// Reads and writes of foreign partition IDs may fail if the project is not in an active state. +// Reads and writes of foreign partition IDs may fail if the project is not in +// an active state. message PartitionId { // The ID of the project to which the entities belong. string project_id = 2; @@ -145,8 +145,8 @@ message Value { Key key_value = 5; // A UTF-8 encoded string value. - // When `exclude_from_indexes` is false (it is indexed) , may have at most 1500 bytes. - // Otherwise, may be set to at least 1,000,000 bytes. + // When `exclude_from_indexes` is false (it is indexed) , may have at most + // 1500 bytes. Otherwise, may be set to at least 1,000,000 bytes. string string_value = 17; // A blob value. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query.proto b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query.proto index 2efed915a74e..872612c5dd24 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query.proto +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query.proto @@ -28,7 +28,6 @@ option java_outer_classname = "QueryProto"; option java_package = "com.google.datastore.v1"; option php_namespace = "Google\\Cloud\\Datastore\\V1"; - // The result of fetching an entity from Datastore. message EntityResult { // Specifies what data the 'entity' field contains. @@ -56,12 +55,12 @@ message EntityResult { // The version of the entity, a strictly positive number that monotonically // increases with changes to the entity. // - // This field is set for [`FULL`][google.datastore.v1.EntityResult.ResultType.FULL] entity - // results. + // This field is set for + // [`FULL`][google.datastore.v1.EntityResult.ResultType.FULL] entity results. // - // For [missing][google.datastore.v1.LookupResponse.missing] entities in `LookupResponse`, this - // is the version of the snapshot that was used to look up the entity, and it - // is always set except for eventually consistent reads. + // For [missing][google.datastore.v1.LookupResponse.missing] entities in + // `LookupResponse`, this is the version of the snapshot that was used to look + // up the entity, and it is always set except for eventually consistent reads. int64 version = 4; // A cursor that points to the position after the result entity. @@ -91,12 +90,14 @@ message Query { // A starting point for the query results. Query cursors are // returned in query result batches and - // [can only be used to continue the same query](https://cloud.google.com/datastore/docs/concepts/queries#cursors_limits_and_offsets). + // [can only be used to continue the same + // query](https://cloud.google.com/datastore/docs/concepts/queries#cursors_limits_and_offsets). bytes start_cursor = 7; // An ending point for the query results. Query cursors are // returned in query result batches and - // [can only be used to limit the same query](https://cloud.google.com/datastore/docs/concepts/queries#cursors_limits_and_offsets). + // [can only be used to limit the same + // query](https://cloud.google.com/datastore/docs/concepts/queries#cursors_limits_and_offsets). bytes end_cursor = 8; // The number of results to skip. Applies before limit, but after all other @@ -217,7 +218,8 @@ message PropertyFilter { Value value = 3; } -// A [GQL query](https://cloud.google.com/datastore/docs/apis/gql/gql_reference). +// A [GQL +// query](https://cloud.google.com/datastore/docs/apis/gql/gql_reference). message GqlQuery { // A string of the format described // [here](https://cloud.google.com/datastore/docs/apis/gql/gql_reference). diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 637b8a545866..d3501d1371c2 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-02-20T18:02:37.048112Z", + "updateTime": "2019-02-26T13:14:40.008317Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.13", - "dockerImage": "googleapis/artman@sha256:5fd9aee1d82a00cebf425c8fa431f5457539562f5867ad9c54370f0ec9a7ccaa" + "version": "0.16.14", + "dockerImage": "googleapis/artman@sha256:f3d61ae45abaeefb6be5f228cda22732c2f1b00fb687c79c4bd4f2c42bb1e1a7" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "18ab81eec27942a942622d5a8d9c9e7a202e8c16", - "internalRef": "234814197" + "sha": "29f098cb03a9983cc9cb15993de5da64419046f2", + "internalRef": "235621085" } }, { From 57aa4fe648e7c6201cfd2e980f031b1feeb2e3c2 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 20 Mar 2019 13:41:12 -0700 Subject: [PATCH 216/611] Remove classifier for Python 3.4 for end-of-life. (#7535) * Remove classifier for Python 3.4 for end-of-life. * Update supported versions in Client README, Contributing Guide --- packages/google-cloud-datastore/README.rst | 2 +- packages/google-cloud-datastore/setup.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index d5db80341043..ed998982d3a6 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -53,7 +53,7 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.4 +Python >= 3.5 Deprecated Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index eb51517ad5bc..c618390b506c 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -73,7 +73,6 @@ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', From 8f0e047ea6952600a1f86d1f3479c7a78f43f6bb Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Mon, 1 Apr 2019 09:17:07 -0700 Subject: [PATCH 217/611] Add routing header to method metadata (via synth). (#7593) --- .../datastore_v1/gapic/datastore_client.py | 92 +++++++++++++++++++ .../google-cloud-datastore/synth.metadata | 12 +-- 2 files changed, 98 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py index c3d27b48777c..b122b8631125 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py @@ -22,6 +22,7 @@ import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header import google.api_core.grpc_helpers import google.api_core.protobuf_helpers import grpc @@ -243,6 +244,19 @@ def lookup( request = datastore_pb2.LookupRequest( project_id=project_id, keys=keys, read_options=read_options ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["lookup"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -336,6 +350,19 @@ def run_query( query=query, gql_query=gql_query, ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["run_query"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -400,6 +427,19 @@ def begin_transaction( request = datastore_pb2.BeginTransactionRequest( project_id=project_id, transaction_options=transaction_options ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["begin_transaction"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -497,6 +537,19 @@ def commit( mutations=mutations, transaction=transaction, ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["commit"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -562,6 +615,19 @@ def rollback( request = datastore_pb2.RollbackRequest( project_id=project_id, transaction=transaction ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["rollback"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -629,6 +695,19 @@ def allocate_ids( ) request = datastore_pb2.AllocateIdsRequest(project_id=project_id, keys=keys) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["allocate_ids"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -700,6 +779,19 @@ def reserve_ids( request = datastore_pb2.ReserveIdsRequest( project_id=project_id, keys=keys, database_id=database_id ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["reserve_ids"]( request, retry=retry, timeout=timeout, metadata=metadata ) diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index d3501d1371c2..b95127648f43 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-02-26T13:14:40.008317Z", + "updateTime": "2019-03-28T12:13:09.147576Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.14", - "dockerImage": "googleapis/artman@sha256:f3d61ae45abaeefb6be5f228cda22732c2f1b00fb687c79c4bd4f2c42bb1e1a7" + "version": "0.16.20", + "dockerImage": "googleapis/artman@sha256:e3c054a2fb85a12481c722af616c7fb6f1d02d862248385eecbec3e4240ebd1e" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "29f098cb03a9983cc9cb15993de5da64419046f2", - "internalRef": "235621085" + "sha": "6a84b3267b0a95e922608b9891219075047eee29", + "internalRef": "240640999" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.1.16" + "version": "2019.2.26" } } ], From 032c8785d6e44c793141e4ca23177c6f7a38f5d5 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Mon, 6 May 2019 16:22:38 -0700 Subject: [PATCH 218/611] Add nox session `docs` (via synth). (#7768) --- .../google-cloud-datastore/docs/README.rst | 1 + packages/google-cloud-datastore/docs/conf.py | 38 +++++++++++++++++-- .../google-cloud-datastore/docs/index.rst | 2 +- packages/google-cloud-datastore/noxfile.py | 20 ++++++++++ .../google-cloud-datastore/synth.metadata | 12 +++--- 5 files changed, 62 insertions(+), 11 deletions(-) create mode 120000 packages/google-cloud-datastore/docs/README.rst diff --git a/packages/google-cloud-datastore/docs/README.rst b/packages/google-cloud-datastore/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-datastore/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index a24a380e1afd..7cad41e2e9e5 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -20,12 +20,12 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.90.4" +__version__ = "0.1.0" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -# needs_sphinx = '1.0' +needs_sphinx = "1.6.3" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -37,6 +37,7 @@ "sphinx.ext.intersphinx", "sphinx.ext.coverage", "sphinx.ext.napoleon", + "sphinx.ext.todo", "sphinx.ext.viewcode", ] @@ -55,7 +56,7 @@ # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] -source_suffix = ".rst" +source_suffix = [".rst", ".md"] # The encoding of source files. # source_encoding = 'utf-8-sig' @@ -130,7 +131,15 @@ # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -# html_theme_options = {} +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] @@ -219,6 +228,17 @@ # Output file base name for HTML help builder. htmlhelp_basename = "google-cloud-datastore-doc" +# -- Options for warnings ------------------------------------------------------ + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + # -- Options for LaTeX output --------------------------------------------- latex_elements = { @@ -315,6 +335,16 @@ intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ( + "https://googleapis.github.io/google-cloud-python/latest", + None, + ), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://docs.python-requests.org/en/master/", None), + "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), + "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } # Napoleon settings diff --git a/packages/google-cloud-datastore/docs/index.rst b/packages/google-cloud-datastore/docs/index.rst index c90607799913..30d0575a2294 100644 --- a/packages/google-cloud-datastore/docs/index.rst +++ b/packages/google-cloud-datastore/docs/index.rst @@ -1,4 +1,4 @@ -.. include:: /../datastore/README.rst +.. include:: README.rst API Reference diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index d692cf37f39c..0f528b7f3902 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -16,6 +16,7 @@ from __future__ import absolute_import import os +import shutil import nox @@ -138,3 +139,22 @@ def cover(session): session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") + +@nox.session(python="3.7") +def docs(session): + """Build the docs for this library.""" + + session.install('-e', '.') + session.install('sphinx', 'alabaster', 'recommonmark') + + shutil.rmtree(os.path.join('docs', '_build'), ignore_errors=True) + session.run( + 'sphinx-build', + '-W', # warnings as errors + '-T', # show full traceback on exception + '-N', # no colors + '-b', 'html', + '-d', os.path.join('docs', '_build', 'doctrees', ''), + os.path.join('docs', ''), + os.path.join('docs', '_build', 'html', ''), + ) diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index b95127648f43..2306094f29ab 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-03-28T12:13:09.147576Z", + "updateTime": "2019-04-23T12:14:14.125430Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.20", - "dockerImage": "googleapis/artman@sha256:e3c054a2fb85a12481c722af616c7fb6f1d02d862248385eecbec3e4240ebd1e" + "version": "0.17.0", + "dockerImage": "googleapis/artman@sha256:c58f4ec3838eb4e0718eb1bccc6512bd6850feaa85a360a9e38f6f848ec73bc2" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "6a84b3267b0a95e922608b9891219075047eee29", - "internalRef": "240640999" + "sha": "547e19e7df398e9290e8e3674d7351efc500f9b0", + "internalRef": "244712781" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.2.26" + "version": "2019.4.10" } } ], From ed06b6e1278d9d7f28ba6636823687fec3d9da8a Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Wed, 8 May 2019 15:09:55 -0700 Subject: [PATCH 219/611] [CHANGE ME] Re-generated datastore to pick up changes in the API or client library generator. (#7881) --- .../datastore_v1/gapic/datastore_client_config.py | 5 +---- packages/google-cloud-datastore/synth.metadata | 12 ++++++------ 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py index 95822b8babfc..c5ff7ed8dcad 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py @@ -1,10 +1,7 @@ config = { "interfaces": { "google.datastore.v1.Datastore": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - }, + "retry_codes": {"idempotent": ["UNAVAILABLE"], "non_idempotent": []}, "retry_params": { "default": { "initial_retry_delay_millis": 100, diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 2306094f29ab..86fa88fd950e 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-04-23T12:14:14.125430Z", + "updateTime": "2019-05-08T12:16:44.389013Z", "sources": [ { "generator": { "name": "artman", - "version": "0.17.0", - "dockerImage": "googleapis/artman@sha256:c58f4ec3838eb4e0718eb1bccc6512bd6850feaa85a360a9e38f6f848ec73bc2" + "version": "0.19.0", + "dockerImage": "googleapis/artman@sha256:d3df563538225ac6caac45d8ad86499500211d1bcb2536955a6dbda15e1b368e" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "547e19e7df398e9290e8e3674d7351efc500f9b0", - "internalRef": "244712781" + "sha": "51145ff7812d2bb44c1219d0b76dac92a8bd94b2", + "internalRef": "247143125" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.4.10" + "version": "2019.5.2" } } ], From c4396008d8aa0e5a02c781614f691af4fb8dacc8 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Fri, 10 May 2019 10:17:36 -0700 Subject: [PATCH 220/611] Datastore: Retry DEADLINE_EXCEEDED (via synth). (#7925) --- .../cloud/datastore_v1/gapic/datastore_client_config.py | 5 ++++- packages/google-cloud-datastore/synth.metadata | 6 +++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py index c5ff7ed8dcad..95822b8babfc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py @@ -1,7 +1,10 @@ config = { "interfaces": { "google.datastore.v1.Datastore": { - "retry_codes": {"idempotent": ["UNAVAILABLE"], "non_idempotent": []}, + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], + }, "retry_params": { "default": { "initial_retry_delay_millis": 100, diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 86fa88fd950e..bb81d3a995f1 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,5 +1,5 @@ { - "updateTime": "2019-05-08T12:16:44.389013Z", + "updateTime": "2019-05-10T12:20:07.392263Z", "sources": [ { "generator": { @@ -12,8 +12,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "51145ff7812d2bb44c1219d0b76dac92a8bd94b2", - "internalRef": "247143125" + "sha": "07883be5bf3c3233095e99d8e92b8094f5d7084a", + "internalRef": "247530843" } }, { From 23f2853263f9fef3cf5c6fcba7147d9161a2b8a1 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 16 May 2019 13:22:32 -0400 Subject: [PATCH 221/611] Pin 'google-cloud-core >= 1.0.0, < 2.0dev'. (#7993) --- packages/google-cloud-datastore/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index c618390b506c..b4ea2447bde7 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -30,7 +30,7 @@ release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ 'google-api-core[grpc] >= 1.6.0, < 2.0.0dev', - 'google-cloud-core >=0.29.0, <0.30dev', + "google-cloud-core >= 1.0.0, < 2.0dev", ] extras = { } From 24789434253ae4be3928a3ee7da77fe06bef8a53 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 17 May 2019 11:27:38 -0400 Subject: [PATCH 222/611] Add `client_info` support to client. (#8013) --- .../google/cloud/datastore/_gapic.py | 10 +- .../google/cloud/datastore/_http.py | 27 ++-- .../google/cloud/datastore/client.py | 30 +++- .../tests/unit/test__gapic.py | 10 +- .../tests/unit/test__http.py | 140 ++++++++++++++---- .../tests/unit/test_client.py | 70 +++++---- 6 files changed, 213 insertions(+), 74 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py b/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py index 1210f2821802..18c2ce917bc2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py @@ -14,15 +14,12 @@ """Helpers for making API requests via gapic / gRPC.""" -from google.api_core.gapic_v1 import client_info -from google.cloud.datastore_v1.gapic import datastore_client from grpc import insecure_channel import six from google.cloud._helpers import make_secure_channel from google.cloud._http import DEFAULT_USER_AGENT - -from google.cloud.datastore import __version__ +from google.cloud.datastore_v1.gapic import datastore_client def make_datastore_api(client): @@ -42,8 +39,5 @@ def make_datastore_api(client): channel = insecure_channel(host) return datastore_client.DatastoreClient( - channel=channel, - client_info=client_info.ClientInfo( - client_library_version=__version__, gapic_version=__version__ - ), + channel=channel, client_info=client._client_info ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 03a551cec64e..0d10035340c6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -20,8 +20,6 @@ from google.cloud import exceptions from google.cloud.datastore_v1.proto import datastore_pb2 as _datastore_pb2 -from google.cloud.datastore import __version__ - DATASTORE_API_HOST = "datastore.googleapis.com" """Datastore API request host.""" @@ -32,10 +30,8 @@ API_URL_TEMPLATE = "{api_base}/{api_version}/projects" "/{project}:{method}" """A template for the URL of a particular API call.""" -_CLIENT_INFO = connection_module.CLIENT_INFO_TEMPLATE.format(__version__) - -def _request(http, project, method, data, base_url): +def _request(http, project, method, data, base_url, client_info): """Make a request over the Http transport to the Cloud Datastore API. :type http: :class:`requests.Session` @@ -55,15 +51,19 @@ def _request(http, project, method, data, base_url): :type base_url: str :param base_url: The base URL where the API lives. + :type client_info: :class:`google.api_core.client_info.ClientInfo` + :param client_info: used to generate user agent. + :rtype: str :returns: The string response content from the API call. :raises: :class:`google.cloud.exceptions.GoogleCloudError` if the response code is not 200 OK. """ + user_agent = client_info.to_user_agent() headers = { "Content-Type": "application/x-protobuf", - "User-Agent": connection_module.DEFAULT_USER_AGENT, - connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, + "User-Agent": user_agent, + connection_module.CLIENT_INFO_HEADER: user_agent, } api_url = build_api_url(project, method, base_url) @@ -78,7 +78,7 @@ def _request(http, project, method, data, base_url): return response.content -def _rpc(http, project, method, base_url, request_pb, response_pb_cls): +def _rpc(http, project, method, base_url, client_info, request_pb, response_pb_cls): """Make a protobuf RPC request. :type http: :class:`requests.Session` @@ -94,6 +94,9 @@ def _rpc(http, project, method, base_url, request_pb, response_pb_cls): :type base_url: str :param base_url: The base URL where the API lives. + :type client_info: :class:`google.api_core.client_info.ClientInfo` + :param client_info: used to generate user agent. + :type request_pb: :class:`google.protobuf.message.Message` instance :param request_pb: the protobuf instance representing the request. @@ -106,7 +109,7 @@ def _rpc(http, project, method, base_url, request_pb, response_pb_cls): :returns: The RPC message parsed from the response. """ req_data = request_pb.SerializeToString() - response = _request(http, project, method, req_data, base_url) + response = _request(http, project, method, req_data, base_url, client_info) return response_pb_cls.FromString(response) @@ -172,6 +175,7 @@ def lookup(self, project_id, keys, read_options=None): project_id, "lookup", self.client._base_url, + self.client._client_info, request_pb, _datastore_pb2.LookupResponse, ) @@ -217,6 +221,7 @@ def run_query( project_id, "runQuery", self.client._base_url, + self.client._client_info, request_pb, _datastore_pb2.RunQueryResponse, ) @@ -240,6 +245,7 @@ def begin_transaction(self, project_id, transaction_options=None): project_id, "beginTransaction", self.client._base_url, + self.client._client_info, request_pb, _datastore_pb2.BeginTransactionResponse, ) @@ -278,6 +284,7 @@ def commit(self, project_id, mode, mutations, transaction=None): project_id, "commit", self.client._base_url, + self.client._client_info, request_pb, _datastore_pb2.CommitResponse, ) @@ -304,6 +311,7 @@ def rollback(self, project_id, transaction): project_id, "rollback", self.client._base_url, + self.client._client_info, request_pb, _datastore_pb2.RollbackResponse, ) @@ -327,6 +335,7 @@ def allocate_ids(self, project_id, keys): project_id, "allocateIds", self.client._base_url, + self.client._client_info, request_pb, _datastore_pb2.AllocateIdsResponse, ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 6f4d82eae672..9cf892aab54d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -18,6 +18,7 @@ from google.cloud._helpers import _LocalStack from google.cloud._helpers import _determine_default_project as _base_default_project from google.cloud.client import ClientWithProject +from google.cloud.datastore import __version__ from google.cloud.datastore import helpers from google.cloud.datastore._http import HTTPDatastoreAPI from google.cloud.datastore.batch import Batch @@ -32,10 +33,19 @@ try: from google.cloud.datastore._gapic import make_datastore_api - _HAVE_GRPC = True except ImportError: # pragma: NO COVER + from google.api_core import client_info + make_datastore_api = None _HAVE_GRPC = False + _CLIENT_INFO = client_info.ClientInfo(client_library_version=__version__) +else: + from google.api_core.gapic_v1 import client_info + + _HAVE_GRPC = True + _CLIENT_INFO = client_info.ClientInfo( + client_library_version=__version__, gapic_version=__version__ + ) _MAX_LOOPS = 128 @@ -43,6 +53,7 @@ _DATASTORE_BASE_URL = "https://datastore.googleapis.com" """Datastore API request URL base.""" + _USE_GRPC = _HAVE_GRPC and not os.getenv(DISABLE_GRPC, False) @@ -182,6 +193,14 @@ class Client(ClientWithProject): passed), falls back to the default inferred from the environment. + :type client_info: :class:`google.api_core.gapic_v1.client_info.ClientInfo` + or :class:`google.api_core.client_info.ClientInfo` + :param client_info: (Optional) The client info used to send a user-agent + string along with API requests. If ``None``, then + default info will be used. Generally, + you only need to set this if you're developing your + own library or partner tool. + :type _http: :class:`~requests.Session` :param _http: (Optional) HTTP object to make requests. Can be any object that defines ``request()`` with the same interface as @@ -204,12 +223,19 @@ class Client(ClientWithProject): """The scopes required for authenticating as a Cloud Datastore consumer.""" def __init__( - self, project=None, namespace=None, credentials=None, _http=None, _use_grpc=None + self, + project=None, + namespace=None, + credentials=None, + client_info=_CLIENT_INFO, + _http=None, + _use_grpc=None, ): super(Client, self).__init__( project=project, credentials=credentials, _http=_http ) self.namespace = namespace + self._client_info = client_info self._batch_stack = _LocalStack() self._datastore_api_internal = None if _use_grpc is None: diff --git a/packages/google-cloud-datastore/tests/unit/test__gapic.py b/packages/google-cloud-datastore/tests/unit/test__gapic.py index ef359d4068e5..c404dc79c109 100644 --- a/packages/google-cloud-datastore/tests/unit/test__gapic.py +++ b/packages/google-cloud-datastore/tests/unit/test__gapic.py @@ -41,7 +41,8 @@ def test_live_api(self, make_chan, mock_klass): client = mock.Mock( _base_url=base_url, _credentials=mock.sentinel.credentials, - spec=["_base_url", "_credentials"], + _client_info=mock.sentinel.client_info, + spec=["_base_url", "_credentials", "_client_info"], ) ds_api = self._call_fut(client) self.assertIs(ds_api, mock.sentinel.ds_client) @@ -52,7 +53,7 @@ def test_live_api(self, make_chan, mock_klass): "datastore.googleapis.com:443", ) mock_klass.assert_called_once_with( - channel=mock.sentinel.channel, client_info=mock.ANY + channel=mock.sentinel.channel, client_info=mock.sentinel.client_info ) @mock.patch( @@ -70,12 +71,13 @@ def test_emulator(self, make_chan, mock_klass): client = mock.Mock( _base_url=base_url, _credentials=mock.sentinel.credentials, - spec=["_base_url", "_credentials"], + _client_info=mock.sentinel.client_info, + spec=["_base_url", "_credentials", "_client_info"], ) ds_api = self._call_fut(client) self.assertIs(ds_api, mock.sentinel.ds_client) make_chan.assert_called_once_with(host) mock_klass.assert_called_once_with( - channel=mock.sentinel.channel, client_info=mock.ANY + channel=mock.sentinel.channel, client_info=mock.sentinel.client_info ) diff --git a/packages/google-cloud-datastore/tests/unit/test__http.py b/packages/google-cloud-datastore/tests/unit/test__http.py index b402eafc9532..b332c946d40a 100644 --- a/packages/google-cloud-datastore/tests/unit/test__http.py +++ b/packages/google-cloud-datastore/tests/unit/test__http.py @@ -29,26 +29,27 @@ def _call_fut(*args, **kwargs): def test_success(self): from google.cloud import _http as connection_module - from google.cloud.datastore._http import _CLIENT_INFO project = "PROJECT" method = "METHOD" data = b"DATA" base_url = "http://api-url" + user_agent = "USER AGENT" + client_info = _make_client_info(user_agent) response_data = "CONTENT" http = _make_requests_session([_make_response(content=response_data)]) # Call actual function under test. - response = self._call_fut(http, project, method, data, base_url) + response = self._call_fut(http, project, method, data, base_url, client_info) self.assertEqual(response, response_data) # Check that the mocks were called as expected. expected_url = _build_expected_url(base_url, project, method) expected_headers = { "Content-Type": "application/x-protobuf", - "User-Agent": connection_module.DEFAULT_USER_AGENT, - connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, + "User-Agent": user_agent, + connection_module.CLIENT_INFO_HEADER: user_agent, } http.request.assert_called_once_with( method="POST", url=expected_url, headers=expected_headers, data=data @@ -63,6 +64,8 @@ def test_failure(self): method = "METHOD" data = "DATA" uri = "http://api-url" + user_agent = "USER AGENT" + client_info = _make_client_info(user_agent) error = status_pb2.Status() error.message = "Entity value is indexed." @@ -73,7 +76,7 @@ def test_failure(self): ) with self.assertRaises(BadRequest) as exc: - self._call_fut(http, project, method, data, uri) + self._call_fut(http, project, method, data, uri, client_info) expected_message = "400 Entity value is indexed." self.assertEqual(str(exc.exception), expected_message) @@ -93,6 +96,7 @@ def test_it(self): project = "projectOK" method = "beginTransaction" base_url = "test.invalid" + client_info = _make_client_info() request_pb = datastore_pb2.BeginTransactionRequest(project_id=project) response_pb = datastore_pb2.BeginTransactionResponse(transaction=b"7830rmc") @@ -106,13 +110,19 @@ def test_it(self): project, method, base_url, + client_info, request_pb, datastore_pb2.BeginTransactionResponse, ) self.assertEqual(result, response_pb) mock_request.assert_called_once_with( - http, project, method, request_pb.SerializeToString(), base_url + http, + project, + method, + request_pb.SerializeToString(), + base_url, + client_info, ) @@ -149,8 +159,12 @@ def test_lookup_single_key_empty_response(self): http = _make_requests_session( [_make_response(content=rsp_pb.SerializeToString())] ) + client_info = _make_client_info() client = mock.Mock( - _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], ) # Make request. @@ -182,8 +196,12 @@ def test_lookup_single_key_empty_response_w_eventual(self): http = _make_requests_session( [_make_response(content=rsp_pb.SerializeToString())] ) + client_info = _make_client_info() client = mock.Mock( - _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], ) # Make request. @@ -214,8 +232,12 @@ def test_lookup_single_key_empty_response_w_transaction(self): http = _make_requests_session( [_make_response(content=rsp_pb.SerializeToString())] ) + client_info = _make_client_info() client = mock.Mock( - _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], ) # Make request. @@ -249,8 +271,12 @@ def test_lookup_single_key_nonempty_response(self): http = _make_requests_session( [_make_response(content=rsp_pb.SerializeToString())] ) + client_info = _make_client_info() client = mock.Mock( - _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], ) # Make request. @@ -284,8 +310,12 @@ def test_lookup_multiple_keys_empty_response(self): http = _make_requests_session( [_make_response(content=rsp_pb.SerializeToString())] ) + client_info = _make_client_info() client = mock.Mock( - _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], ) # Make request. @@ -320,8 +350,12 @@ def test_lookup_multiple_keys_w_missing(self): http = _make_requests_session( [_make_response(content=rsp_pb.SerializeToString())] ) + client_info = _make_client_info() client = mock.Mock( - _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], ) # Make request. @@ -355,8 +389,12 @@ def test_lookup_multiple_keys_w_deferred(self): http = _make_requests_session( [_make_response(content=rsp_pb.SerializeToString())] ) + client_info = _make_client_info() client = mock.Mock( - _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], ) # Make request. @@ -399,8 +437,12 @@ def test_run_query_w_eventual_no_transaction(self): http = _make_requests_session( [_make_response(content=rsp_pb.SerializeToString())] ) + client_info = _make_client_info() client = mock.Mock( - _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], ) # Make request. @@ -440,8 +482,12 @@ def test_run_query_wo_eventual_w_transaction(self): http = _make_requests_session( [_make_response(content=rsp_pb.SerializeToString())] ) + client_info = _make_client_info() client = mock.Mock( - _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], ) # Make request. @@ -480,8 +526,12 @@ def test_run_query_wo_namespace_empty_result(self): http = _make_requests_session( [_make_response(content=rsp_pb.SerializeToString())] ) + client_info = _make_client_info() client = mock.Mock( - _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], ) # Make request. @@ -522,8 +572,12 @@ def test_run_query_w_namespace_nonempty_result(self): http = _make_requests_session( [_make_response(content=rsp_pb.SerializeToString())] ) + client_info = _make_client_info() client = mock.Mock( - _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], ) # Make request. @@ -550,8 +604,12 @@ def test_begin_transaction(self): http = _make_requests_session( [_make_response(content=rsp_pb.SerializeToString())] ) + client_info = _make_client_info() client = mock.Mock( - _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], ) # Make request. @@ -586,8 +644,12 @@ def test_commit_wo_transaction(self): http = _make_requests_session( [_make_response(content=rsp_pb.SerializeToString())] ) + client_info = _make_client_info() client = mock.Mock( - _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], ) # Make request. @@ -623,8 +685,12 @@ def test_commit_w_transaction(self): http = _make_requests_session( [_make_response(content=rsp_pb.SerializeToString())] ) + client_info = _make_client_info() client = mock.Mock( - _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], ) # Make request. @@ -653,8 +719,12 @@ def test_rollback_ok(self): http = _make_requests_session( [_make_response(content=rsp_pb.SerializeToString())] ) + client_info = _make_client_info() client = mock.Mock( - _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], ) # Make request. @@ -678,8 +748,12 @@ def test_allocate_ids_empty(self): http = _make_requests_session( [_make_response(content=rsp_pb.SerializeToString())] ) + client_info = _make_client_info() client = mock.Mock( - _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], ) # Make request. @@ -711,8 +785,12 @@ def test_allocate_ids_non_empty(self): http = _make_requests_session( [_make_response(content=rsp_pb.SerializeToString())] ) + client_info = _make_client_info() client = mock.Mock( - _http=http, _base_url="test.invalid", spec=["_http", "_base_url"] + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], ) # Make request. @@ -760,14 +838,24 @@ def _make_key_pb(project, id_=1234): return Key(*path_args, project=project).to_protobuf() +_USER_AGENT = "TESTING USER AGENT" + + +def _make_client_info(user_agent=_USER_AGENT): + from google.api_core.client_info import ClientInfo + + client_info = mock.create_autospec(ClientInfo) + client_info.to_user_agent.return_value = user_agent + return client_info + + def _verify_protobuf_call(http, expected_url, pb): from google.cloud import _http as connection_module - from google.cloud.datastore._http import _CLIENT_INFO expected_headers = { "Content-Type": "application/x-protobuf", - "User-Agent": connection_module.DEFAULT_USER_AGENT, - connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO, + "User-Agent": _USER_AGENT, + connection_module.CLIENT_INFO_HEADER: _USER_AGENT, } http.request.assert_called_once_with( diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 10294db3cf62..05c6f4ddbfd5 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -127,6 +127,7 @@ def _make_one( project=PROJECT, namespace=None, credentials=None, + client_info=None, _http=None, _use_grpc=None, ): @@ -134,6 +135,7 @@ def _make_one( project=project, namespace=namespace, credentials=credentials, + client_info=client_info, _http=_http, _use_grpc=_use_grpc, ) @@ -148,6 +150,7 @@ def test_constructor_w_project_no_environ(self): self.assertRaises(EnvironmentError, self._make_one, None) def test_constructor_w_implicit_inputs(self): + from google.cloud.datastore.client import _CLIENT_INFO from google.cloud.datastore.client import _DATASTORE_BASE_URL other = "other" @@ -167,6 +170,7 @@ def test_constructor_w_implicit_inputs(self): self.assertEqual(client.project, other) self.assertIsNone(client.namespace) self.assertIs(client._credentials, creds) + self.assertIs(client._client_info, _CLIENT_INFO) self.assertIsNone(client._http_internal) self.assertEqual(client.base_url, _DATASTORE_BASE_URL) @@ -182,13 +186,19 @@ def test_constructor_w_explicit_inputs(self): other = "other" namespace = "namespace" creds = _make_credentials() + client_info = mock.Mock() http = object() client = self._make_one( - project=other, namespace=namespace, credentials=creds, _http=http + project=other, + namespace=namespace, + credentials=creds, + client_info=client_info, + _http=http, ) self.assertEqual(client.project, other) self.assertEqual(client.namespace, namespace) self.assertIs(client._credentials, creds) + self.assertIs(client._client_info, client_info) self.assertIs(client._http_internal, http) self.assertIsNone(client.current_batch) self.assertEqual(list(client._batch_stack), []) @@ -232,10 +242,29 @@ def test_constructor_gcd_host(self): client = self._make_one(project=project, credentials=creds, _http=http) self.assertEqual(client.base_url, "http://" + host) + def test_base_url_property(self): + alternate_url = "https://alias.example.com/" + project = "PROJECT" + creds = _make_credentials() + http = object() + + client = self._make_one(project=project, credentials=creds, _http=http) + client.base_url = alternate_url + self.assertEqual(client.base_url, alternate_url) + + def test__datastore_api_property_already_set(self): + client = self._make_one( + project="prahj-ekt", credentials=_make_credentials(), _use_grpc=True + ) + already = client._datastore_api_internal = object() + self.assertIs(client._datastore_api, already) + def test__datastore_api_property_gapic(self): + client_info = mock.Mock() client = self._make_one( project="prahj-ekt", credentials=_make_credentials(), + client_info=client_info, _http=object(), _use_grpc=True, ) @@ -247,41 +276,32 @@ def test__datastore_api_property_gapic(self): ) with patch as make_api: ds_api = client._datastore_api - self.assertIs(ds_api, mock.sentinel.ds_api) - make_api.assert_called_once_with(client) - self.assertIs(client._datastore_api_internal, mock.sentinel.ds_api) - # Make sure the cached value is used. - self.assertEqual(make_api.call_count, 1) - self.assertIs(client._datastore_api, mock.sentinel.ds_api) - self.assertEqual(make_api.call_count, 1) - - def test_base_url_property(self): - alternate_url = "https://alias.example.com/" - project = "PROJECT" - creds = _make_credentials() - http = object() - client = self._make_one(project=project, credentials=creds, _http=http) - client.base_url = alternate_url - self.assertEqual(client.base_url, alternate_url) + self.assertIs(ds_api, mock.sentinel.ds_api) + self.assertIs(client._datastore_api_internal, mock.sentinel.ds_api) + make_api.assert_called_once_with(client) def test__datastore_api_property_http(self): - from google.cloud.datastore._http import HTTPDatastoreAPI - + client_info = mock.Mock() client = self._make_one( project="prahj-ekt", credentials=_make_credentials(), + client_info=client_info, _http=object(), _use_grpc=False, ) self.assertIsNone(client._datastore_api_internal) - ds_api = client._datastore_api - self.assertIsInstance(ds_api, HTTPDatastoreAPI) - self.assertIs(ds_api.client, client) - # Make sure the cached value is used. - self.assertIs(client._datastore_api_internal, ds_api) - self.assertIs(client._datastore_api, ds_api) + patch = mock.patch( + "google.cloud.datastore.client.HTTPDatastoreAPI", + return_value=mock.sentinel.ds_api, + ) + with patch as make_api: + ds_api = client._datastore_api + + self.assertIs(ds_api, mock.sentinel.ds_api) + self.assertIs(client._datastore_api_internal, mock.sentinel.ds_api) + make_api.assert_called_once_with(client) def test__push_batch_and__pop_batch(self): creds = _make_credentials() From d10c73758dab3ba6c30dce42ee34252d7dad5fb5 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 17 May 2019 12:59:14 -0400 Subject: [PATCH 223/611] Release datastore-1.8.0 (#8020) --- packages/google-cloud-datastore/CHANGELOG.md | 26 ++++++++++++++++++++ packages/google-cloud-datastore/setup.py | 2 +- 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index c7f1161078fb..549fd9abf451 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,32 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## 1.8.0 + +05-17-2019 08:28 PDT + +### Implementation Changes +- Add routing header to method metadata (via synth). ([#7593](https://github.com/googleapis/google-cloud-python/pull/7593)) +- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/googleapis/google-cloud-python/pull/7535)) + +### New Features +- Add `client_info` support to client. ([#8013](https://github.com/googleapis/google-cloud-python/pull/8013)) + +### Dependencies +- Pin `google-cloud-core >= 1.0.0, < 2.0dev`. ([#7993](https://github.com/googleapis/google-cloud-python/pull/7993)) + +### Documentation +- Update client library documentation URLs. ([#7307](https://github.com/googleapis/google-cloud-python/pull/7307)) +- Pick up stub docstring fix in GAPIC generator. ([#6968](https://github.com/googleapis/google-cloud-python/pull/6968)) + +### Internal / Testing Changes +- Add nox session `docs` (via synth). ([#7768](https://github.com/googleapis/google-cloud-python/pull/7768)) +- Copy lintified proto files (via synth). ([#7446](https://github.com/googleapis/google-cloud-python/pull/7446)) +- Add clarifying comment to blacken nox target. ([#7389](https://github.com/googleapis/google-cloud-python/pull/7389)) +- Add protos as an artifact to library ([#7205](https://github.com/googleapis/google-cloud-python/pull/7205)) +- Update copyright headers ([#7142](https://github.com/googleapis/google-cloud-python/pull/7142)) +- Protoc-generated serialization update. ([#7080](https://github.com/googleapis/google-cloud-python/pull/7080)) + ## 1.7.3 12-17-2018 16:45 PST diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index b4ea2447bde7..4b505e6abe48 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-datastore' description = 'Google Cloud Datastore API client library' -version = '1.7.3' +version = '1.8.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From b159047fba54cfabfd7bd779cd3564a800a7c59c Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 21 May 2019 09:14:01 -0700 Subject: [PATCH 224/611] [CHANGE ME] Re-generated datastore to pick up changes in the API or client library generator. (#8055) --- .../cloud/datastore_v1/gapic/datastore_client.py | 2 ++ .../google/cloud/datastore_v1/gapic/enums.py | 1 + .../gapic/transports/datastore_grpc_transport.py | 1 + packages/google-cloud-datastore/synth.metadata | 10 +++++----- 4 files changed, 9 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py index b122b8631125..8b82fbac6457 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py @@ -13,6 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + """Accesses the google.datastore.v1 Datastore API.""" import pkg_resources @@ -35,6 +36,7 @@ from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore_v1.proto import query_pb2 + _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( "google-cloud-datastore" ).version diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py index 8f8b7367b19c..b56d0fd39269 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py @@ -13,6 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + """Wrappers for protocol buffer enum types.""" import enum diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py index 4563664ed6db..10f89a7bd485 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py @@ -14,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + import google.api_core.grpc_helpers from google.cloud.datastore_v1.proto import datastore_pb2_grpc diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index bb81d3a995f1..19a925ef3203 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-05-10T12:20:07.392263Z", + "updateTime": "2019-05-21T12:17:38.954511Z", "sources": [ { "generator": { "name": "artman", - "version": "0.19.0", - "dockerImage": "googleapis/artman@sha256:d3df563538225ac6caac45d8ad86499500211d1bcb2536955a6dbda15e1b368e" + "version": "0.20.0", + "dockerImage": "googleapis/artman@sha256:3246adac900f4bdbd62920e80de2e5877380e44036b3feae13667ec255ebf5ec" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "07883be5bf3c3233095e99d8e92b8094f5d7084a", - "internalRef": "247530843" + "sha": "32a10f69e2c9ce15bba13ab1ff928bacebb25160", + "internalRef": "249058354" } }, { From d8f8ecf850f6cb773b5775f60d04bc030a619662 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 24 May 2019 08:20:54 -0700 Subject: [PATCH 225/611] Blacken noxfile.py, setup.py (via synth). (#8120) --- packages/google-cloud-datastore/noxfile.py | 46 +++++++------- packages/google-cloud-datastore/setup.py | 61 +++++++++---------- .../google-cloud-datastore/synth.metadata | 6 +- 3 files changed, 55 insertions(+), 58 deletions(-) diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 0f528b7f3902..f021e0290c80 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -23,6 +23,12 @@ LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) +BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +if os.path.exists("samples"): + BLACK_PATHS.append("samples") + + @nox.session(python="3.7") def lint(session): """Run linters. @@ -31,13 +37,7 @@ def lint(session): serious code quality issues. """ session.install("flake8", "black", *LOCAL_DEPS) - session.run( - "black", - "--check", - "google", - "tests", - "docs", - ) + session.run("black", "--check", *BLACK_PATHS) session.run("flake8", "google", "tests") @@ -52,12 +52,7 @@ def blacken(session): check the state of the `gcp_ubuntu_config` we use for that Kokoro run. """ session.install("black") - session.run( - "black", - "google", - "tests", - "docs", - ) + session.run("black", *BLACK_PATHS) @nox.session(python="3.7") @@ -140,21 +135,24 @@ def cover(session): session.run("coverage", "erase") + @nox.session(python="3.7") def docs(session): """Build the docs for this library.""" - session.install('-e', '.') - session.install('sphinx', 'alabaster', 'recommonmark') + session.install("-e", ".") + session.install("sphinx", "alabaster", "recommonmark") - shutil.rmtree(os.path.join('docs', '_build'), ignore_errors=True) + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( - 'sphinx-build', - '-W', # warnings as errors - '-T', # show full traceback on exception - '-N', # no colors - '-b', 'html', - '-d', os.path.join('docs', '_build', 'doctrees', ''), - os.path.join('docs', ''), - os.path.join('docs', '_build', 'html', ''), + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), ) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 4b505e6abe48..d02178d4626d 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -20,40 +20,39 @@ # Package metadata. -name = 'google-cloud-datastore' -description = 'Google Cloud Datastore API client library' -version = '1.8.0' +name = "google-cloud-datastore" +description = "Google Cloud Datastore API client library" +version = "1.8.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' -release_status = 'Development Status :: 5 - Production/Stable' +release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - 'google-api-core[grpc] >= 1.6.0, < 2.0.0dev', + "google-api-core[grpc] >= 1.6.0, < 2.0.0dev", "google-cloud-core >= 1.0.0, < 2.0dev", ] -extras = { -} +extras = {} # Setup boilerplate below this line. package_root = os.path.abspath(os.path.dirname(__file__)) -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: readme = readme_file.read() # Only include packages under the 'google' namespace. Do not include tests, # benchmarks, etc. packages = [ - package for package in setuptools.find_packages() - if package.startswith('google')] + package for package in setuptools.find_packages() if package.startswith("google") +] # Determine which namespaces are needed. -namespaces = ['google'] -if 'google.cloud' in packages: - namespaces.append('google.cloud') +namespaces = ["google"] +if "google.cloud" in packages: + namespaces.append("google.cloud") setuptools.setup( @@ -61,30 +60,30 @@ version=version, description=description, long_description=readme, - author='Google LLC', - author_email='googleapis-packages@google.com', - license='Apache 2.0', - url='https://github.com/GoogleCloudPlatform/google-cloud-python', + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url="https://github.com/GoogleCloudPlatform/google-cloud-python", classifiers=[ release_status, - 'Intended Audience :: Developers', - 'License :: OSI Approved :: Apache Software License', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Operating System :: OS Independent', - 'Topic :: Internet', + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Operating System :: OS Independent", + "Topic :: Internet", ], - platforms='Posix; MacOS X; Windows', + platforms="Posix; MacOS X; Windows", packages=packages, namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*', + python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", include_package_data=True, zip_safe=False, ) diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 19a925ef3203..b03e40522f8f 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,5 +1,5 @@ { - "updateTime": "2019-05-21T12:17:38.954511Z", + "updateTime": "2019-05-24T12:17:24.704309Z", "sources": [ { "generator": { @@ -12,8 +12,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "32a10f69e2c9ce15bba13ab1ff928bacebb25160", - "internalRef": "249058354" + "sha": "0537189470f04f24836d6959821c24197a0ed120", + "internalRef": "249742806" } }, { From 291506520eef6bdc33284904fe80618d5ff221e4 Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Tue, 4 Jun 2019 22:17:24 +0530 Subject: [PATCH 226/611] Add 'Client.reserve_ids' API wrapper. (#8178) --- .../google/cloud/datastore/client.py | 27 ++++++++++++++ .../tests/unit/test_client.py | 36 +++++++++++++++++-- 2 files changed, 60 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 9cf892aab54d..e0b034fbc9c7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -648,3 +648,30 @@ def do_something(entity): if "namespace" not in kwargs: kwargs["namespace"] = self.namespace return Query(self, **kwargs) + + def reserve_ids(self, complete_key, num_ids): + """Reserve a list of IDs from a complete key. + + :type complete_key: :class:`google.cloud.datastore.key.Key` + :param complete_key: Partial key to use as base for reserved IDs. + + :type num_ids: int + :param num_ids: The number of IDs to reserve. + + :rtype: class:`NoneType` + :returns: None + :raises: :class:`ValueError` if `complete_key`` is not a + Complete key. + """ + if complete_key.is_partial: + raise ValueError(("Key is not Complete.", complete_key)) + + if not isinstance(num_ids, int): + raise ValueError(("num_ids is not a valid integer.", num_ids)) + + complete_key_pb = complete_key.to_protobuf() + complete_key_pbs = [complete_key_pb] * num_ids + + self._datastore_api.reserve_ids(complete_key.project, complete_key_pbs) + + return None diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 05c6f4ddbfd5..171a93eda4db 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -860,12 +860,42 @@ def test_allocate_ids_w_partial_key(self): # Check the IDs returned. self.assertEqual([key._id for key in result], list(range(num_ids))) - def test_allocate_ids_with_completed_key(self): + def test_allocate_ids_w_completed_key(self): creds = _make_credentials() client = self._make_one(credentials=creds) - COMPLETE_KEY = _Key(self.PROJECT) - self.assertRaises(ValueError, client.allocate_ids, COMPLETE_KEY, 2) + complete_key = _Key(self.PROJECT) + self.assertRaises(ValueError, client.allocate_ids, complete_key, 2) + + def test_reserve_ids_w_completed_key(self): + num_ids = 2 + creds = _make_credentials() + client = self._make_one(credentials=creds, _use_grpc=False) + complete_key = _Key(self.PROJECT) + reserve_ids = mock.Mock() + ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) + client._datastore_api_internal = ds_api + self.assertTrue(not complete_key.is_partial) + client.reserve_ids(complete_key, num_ids) + expected_keys = [complete_key.to_protobuf()] * num_ids + reserve_ids.assert_called_once_with(self.PROJECT, expected_keys) + + def test_reserve_ids_w_partial_key(self): + num_ids = 2 + incomplete_key = _Key(self.PROJECT) + incomplete_key._id = None + creds = _make_credentials() + client = self._make_one(credentials=creds) + with self.assertRaises(ValueError): + client.reserve_ids(incomplete_key, num_ids) + + def test_reserve_ids_w_wrong_num_ids(self): + num_ids = "2" + complete_key = _Key(self.PROJECT) + creds = _make_credentials() + client = self._make_one(credentials=creds) + with self.assertRaises(ValueError): + client.reserve_ids(complete_key, num_ids) def test_key_w_project(self): KIND = "KIND" From e7215b9a4738ff39dc0dc4352307e7013a411926 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 6 Jun 2019 09:25:35 -0700 Subject: [PATCH 227/611] [CHANGE ME] Re-generated datastore to pick up changes in the API or client library generator. (#8238) --- packages/google-cloud-datastore/noxfile.py | 4 ++-- packages/google-cloud-datastore/synth.metadata | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index f021e0290c80..968fb5a09bf1 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -46,7 +46,7 @@ def blacken(session): """Run black. Format code to uniform standard. - + This currently uses Python 3.6 due to the automated Kokoro run of synthtool. That run uses an image that doesn't have 3.6 installed. Before updating this check the state of the `gcp_ubuntu_config` we use for that Kokoro run. @@ -78,7 +78,7 @@ def default(session): "--cov-append", "--cov-config=.coveragerc", "--cov-report=", - "--cov-fail-under=97", + "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, ) diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index b03e40522f8f..fa9cb33c673b 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-05-24T12:17:24.704309Z", + "updateTime": "2019-06-06T12:17:58.765389Z", "sources": [ { "generator": { "name": "artman", - "version": "0.20.0", - "dockerImage": "googleapis/artman@sha256:3246adac900f4bdbd62920e80de2e5877380e44036b3feae13667ec255ebf5ec" + "version": "0.23.1", + "dockerImage": "googleapis/artman@sha256:9d5cae1454da64ac3a87028f8ef486b04889e351c83bb95e83b8fab3959faed0" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "0537189470f04f24836d6959821c24197a0ed120", - "internalRef": "249742806" + "sha": "f03bf2139ee85aac88411d6c20a21f4c901fe83c", + "internalRef": "251806891" } }, { From aeb9dabdd7f44359f275353e6c0ccdf2f1d8e134 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 14 Jun 2019 09:46:54 -0700 Subject: [PATCH 228/611] Add disclaimer to auto-generated template files (via synth). (#8312) --- packages/google-cloud-datastore/.coveragerc | 1 + packages/google-cloud-datastore/.flake8 | 1 + packages/google-cloud-datastore/noxfile.py | 2 ++ packages/google-cloud-datastore/setup.cfg | 1 + packages/google-cloud-datastore/synth.metadata | 10 +++++----- 5 files changed, 10 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/.coveragerc b/packages/google-cloud-datastore/.coveragerc index 6b9ab9da4a1b..b178b094aa1d 100644 --- a/packages/google-cloud-datastore/.coveragerc +++ b/packages/google-cloud-datastore/.coveragerc @@ -1,3 +1,4 @@ +# Generated by synthtool. DO NOT EDIT! [run] branch = True diff --git a/packages/google-cloud-datastore/.flake8 b/packages/google-cloud-datastore/.flake8 index 61766fa84d02..0268ecc9c55c 100644 --- a/packages/google-cloud-datastore/.flake8 +++ b/packages/google-cloud-datastore/.flake8 @@ -1,3 +1,4 @@ +# Generated by synthtool. DO NOT EDIT! [flake8] ignore = E203, E266, E501, W503 exclude = diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 968fb5a09bf1..f6257317fccd 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -14,6 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Generated by synthtool. DO NOT EDIT! + from __future__ import absolute_import import os import shutil diff --git a/packages/google-cloud-datastore/setup.cfg b/packages/google-cloud-datastore/setup.cfg index 2a9acf13daa9..3bd555500e37 100644 --- a/packages/google-cloud-datastore/setup.cfg +++ b/packages/google-cloud-datastore/setup.cfg @@ -1,2 +1,3 @@ +# Generated by synthtool. DO NOT EDIT! [bdist_wheel] universal = 1 diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index fa9cb33c673b..1a1af9eb63a3 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-06-06T12:17:58.765389Z", + "updateTime": "2019-06-14T12:17:47.466538Z", "sources": [ { "generator": { "name": "artman", - "version": "0.23.1", - "dockerImage": "googleapis/artman@sha256:9d5cae1454da64ac3a87028f8ef486b04889e351c83bb95e83b8fab3959faed0" + "version": "0.25.0", + "dockerImage": "googleapis/artman@sha256:ef1a98ab1e2b8f05f4d9a56f27d63347aefe14020e5f2d585172b14ca76f1d90" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "f03bf2139ee85aac88411d6c20a21f4c901fe83c", - "internalRef": "251806891" + "sha": "c23b68eecb00c4d285a730a49b1d7d943cd56183", + "internalRef": "253113405" } }, { From 43b260dba5410167961fe6e965cccd6221536bd5 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 17 Jun 2019 09:36:20 -0700 Subject: [PATCH 229/611] Declare encoding as utf-8 in pb2 files (via synth). (#8350) --- .../google/cloud/datastore_v1/proto/datastore_pb2.py | 1 + .../google/cloud/datastore_v1/proto/entity_pb2.py | 1 + .../google/cloud/datastore_v1/proto/query_pb2.py | 1 + packages/google-cloud-datastore/synth.metadata | 10 +++++----- 4 files changed, 8 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py index 1d807b47c81a..c62dea6375e0 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_v1/proto/datastore.proto diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py index b3f2b51d5031..00ee58deadaf 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_v1/proto/entity.proto diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2.py index 31a4ba4854e0..50a68509948a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_v1/proto/query.proto diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 1a1af9eb63a3..db38b23004f3 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-06-14T12:17:47.466538Z", + "updateTime": "2019-06-15T12:17:21.326442Z", "sources": [ { "generator": { "name": "artman", - "version": "0.25.0", - "dockerImage": "googleapis/artman@sha256:ef1a98ab1e2b8f05f4d9a56f27d63347aefe14020e5f2d585172b14ca76f1d90" + "version": "0.26.0", + "dockerImage": "googleapis/artman@sha256:6db0735b0d3beec5b887153a2a7c7411fc7bb53f73f6f389a822096bd14a3a15" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "c23b68eecb00c4d285a730a49b1d7d943cd56183", - "internalRef": "253113405" + "sha": "7b58b37559f6a5337c4c564518e9573d742df225", + "internalRef": "253322136" } }, { From 71f1ca6ef2bfc2a3c7a5895be070ac1f8d476c7a Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 18 Jun 2019 10:50:24 -0700 Subject: [PATCH 230/611] [CHANGE ME] Re-generated datastore to pick up changes in the API or client library generator. (#8388) --- .../gapic/transports/datastore_grpc_transport.py | 8 ++++++-- packages/google-cloud-datastore/synth.metadata | 10 +++++----- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py index 10f89a7bd485..026bca5c0ae8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py @@ -70,7 +70,9 @@ def __init__( self._stubs = {"datastore_stub": datastore_pb2_grpc.DatastoreStub(channel)} @classmethod - def create_channel(cls, address="datastore.googleapis.com:443", credentials=None): + def create_channel( + cls, address="datastore.googleapis.com:443", credentials=None, **kwargs + ): """Create and return a gRPC channel object. Args: @@ -80,12 +82,14 @@ def create_channel(cls, address="datastore.googleapis.com:443", credentials=None credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. Returns: grpc.Channel: A gRPC channel object. """ return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs ) @property diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index db38b23004f3..e9ed7da19f6b 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-06-15T12:17:21.326442Z", + "updateTime": "2019-06-18T12:16:20.245676Z", "sources": [ { "generator": { "name": "artman", - "version": "0.26.0", - "dockerImage": "googleapis/artman@sha256:6db0735b0d3beec5b887153a2a7c7411fc7bb53f73f6f389a822096bd14a3a15" + "version": "0.27.0", + "dockerImage": "googleapis/artman@sha256:b036a7f4278d9deb5796f065e5c7f608d47d75369985ca7ab5039998120e972d" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "7b58b37559f6a5337c4c564518e9573d742df225", - "internalRef": "253322136" + "sha": "384aa843867c4d17756d14a01f047b6368494d32", + "internalRef": "253675319" } }, { From d9ae5123fb24307624b9586f1823c6da9622dd01 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 25 Jun 2019 12:44:16 -0700 Subject: [PATCH 231/611] All: Add docs job to publish to googleapis.dev. (#8464) --- packages/google-cloud-datastore/.repo-metadata.json | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 packages/google-cloud-datastore/.repo-metadata.json diff --git a/packages/google-cloud-datastore/.repo-metadata.json b/packages/google-cloud-datastore/.repo-metadata.json new file mode 100644 index 000000000000..f28b9d74ef7c --- /dev/null +++ b/packages/google-cloud-datastore/.repo-metadata.json @@ -0,0 +1,12 @@ +{ + "name": "datastore", + "name_pretty": "Google Cloud Datastore", + "product_documentation": "https://cloud.google.com/datastore", + "client_documentation": "https://googleapis.dev/python/datastore/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559768", + "release_level": "ga", + "language": "python", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-datastore", + "api_id": "datastore.googleapis.com" +} \ No newline at end of file From 5fb582c3dc49709cd9fffc419b40a0306dbc13b9 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 28 Jun 2019 09:17:57 -0700 Subject: [PATCH 232/611] [CHANGE ME] Re-generated datastore to pick up changes in the API or client library generator. (#8506) --- .../datastore_v1/gapic/datastore_client.py | 17 ++++++++++++++++- packages/google-cloud-datastore/synth.metadata | 10 +++++----- 2 files changed, 21 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py index 8b82fbac6457..8a4f084d8970 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py @@ -20,6 +20,7 @@ import warnings from google.oauth2 import service_account +import google.api_core.client_options import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method @@ -86,6 +87,7 @@ def __init__( credentials=None, client_config=None, client_info=None, + client_options=None, ): """Constructor. @@ -116,6 +118,9 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. """ # Raise deprecation warnings for things we want to go away. if client_config is not None: @@ -134,6 +139,15 @@ def __init__( stacklevel=2, ) + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + # Instantiate the transport. # The transport is responsible for handling serialization and # deserialization and actually sending data to the service. @@ -142,6 +156,7 @@ def __init__( self.transport = transport( credentials=credentials, default_class=datastore_grpc_transport.DatastoreGrpcTransport, + address=api_endpoint, ) else: if credentials: @@ -152,7 +167,7 @@ def __init__( self.transport = transport else: self.transport = datastore_grpc_transport.DatastoreGrpcTransport( - address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials ) if client_info is None: diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index e9ed7da19f6b..06a6e65cca9c 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-06-18T12:16:20.245676Z", + "updateTime": "2019-06-28T12:20:30.444808Z", "sources": [ { "generator": { "name": "artman", - "version": "0.27.0", - "dockerImage": "googleapis/artman@sha256:b036a7f4278d9deb5796f065e5c7f608d47d75369985ca7ab5039998120e972d" + "version": "0.29.2", + "dockerImage": "googleapis/artman@sha256:45263333b058a4b3c26a8b7680a2710f43eae3d250f791a6cb66423991dcb2df" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "384aa843867c4d17756d14a01f047b6368494d32", - "internalRef": "253675319" + "sha": "84c8ad4e52f8eec8f08a60636cfa597b86969b5c", + "internalRef": "255474859" } }, { From cdba0e18c5a6d5c4b5ea87c354520da000f14570 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Mon, 1 Jul 2019 13:07:24 -0700 Subject: [PATCH 233/611] Fixes #8545 by removing typing information for kwargs to not conflict with type checkers (#8546) --- .../google-cloud-datastore/google/cloud/datastore/client.py | 2 -- packages/google-cloud-datastore/google/cloud/datastore/key.py | 1 - 2 files changed, 3 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index e0b034fbc9c7..df9ce33a0bdf 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -560,7 +560,6 @@ def batch(self): def transaction(self, **kwargs): """Proxy to :class:`google.cloud.datastore.transaction.Transaction`. - :type kwargs: dict :param kwargs: Keyword arguments to be passed in. """ return Transaction(self, **kwargs) @@ -633,7 +632,6 @@ def do_something(entity): >>> query_iter.next_page_token is None True - :type kwargs: dict :param kwargs: Parameters for initializing and instance of :class:`~google.cloud.datastore.query.Query`. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index f537e9d0fd69..c988eebd80af 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -92,7 +92,6 @@ class Key(object): :param path_args: May represent a partial (odd length) or full (even length) key path. - :type kwargs: dict :param kwargs: Keyword arguments to be passed in. Accepted keyword arguments are From c330fa488e25f12a0d7a1c95fd9d2646ab69c4cb Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 3 Jul 2019 10:47:10 -0700 Subject: [PATCH 234/611] [CHANGE ME] Re-generated datastore to pick up changes in the API or client library generator. (#8580) --- packages/google-cloud-datastore/noxfile.py | 6 +++--- packages/google-cloud-datastore/synth.metadata | 10 +++++----- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index f6257317fccd..a2eefbb6765f 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -24,7 +24,7 @@ LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) - +BLACK_VERSION = "black==19.3b0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] if os.path.exists("samples"): @@ -38,7 +38,7 @@ def lint(session): Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ - session.install("flake8", "black", *LOCAL_DEPS) + session.install("flake8", BLACK_VERSION, *LOCAL_DEPS) session.run("black", "--check", *BLACK_PATHS) session.run("flake8", "google", "tests") @@ -53,7 +53,7 @@ def blacken(session): That run uses an image that doesn't have 3.6 installed. Before updating this check the state of the `gcp_ubuntu_config` we use for that Kokoro run. """ - session.install("black") + session.install(BLACK_VERSION) session.run("black", *BLACK_PATHS) diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 06a6e65cca9c..1ddfbade41f8 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-06-28T12:20:30.444808Z", + "updateTime": "2019-07-03T12:24:06.203156Z", "sources": [ { "generator": { "name": "artman", - "version": "0.29.2", - "dockerImage": "googleapis/artman@sha256:45263333b058a4b3c26a8b7680a2710f43eae3d250f791a6cb66423991dcb2df" + "version": "0.29.3", + "dockerImage": "googleapis/artman@sha256:8900f94a81adaab0238965aa8a7b3648791f4f3a95ee65adc6a56cfcc3753101" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "84c8ad4e52f8eec8f08a60636cfa597b86969b5c", - "internalRef": "255474859" + "sha": "69916b6ffbb7717fa009033351777d0c9909fb79", + "internalRef": "256241904" } }, { From 51bf79d2bfa1eada6d15a0a197326ef2be5228a4 Mon Sep 17 00:00:00 2001 From: ylil93 Date: Mon, 15 Jul 2019 12:12:29 -0700 Subject: [PATCH 235/611] Add compatibility check badges to READMEs. (#8288) --- packages/google-cloud-datastore/README.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index ed998982d3a6..c5d9748dc508 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -1,7 +1,7 @@ Python Client for Google Cloud Datastore ======================================== -|GA| |pypi| |versions| +|GA| |pypi| |versions| |compat_check_pypi| |compat_check_github| `Google Cloud Datastore API`_ is a fully managed, schemaless database for storing non-relational data. Cloud Datastore automatically scales with your @@ -18,6 +18,10 @@ all other queries. :target: https://pypi.org/project/google-cloud-datastore/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-datastore.svg :target: https://pypi.org/project/google-cloud-datastore/ +.. |compat_check_pypi| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=google-cloud-datastore + :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=google-cloud-datastore +.. |compat_check_github| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=git%2Bgit%3A//github.com/googleapis/google-cloud-python.git%23subdirectory%3Ddatastore + :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=git%2Bgit%3A//github.com/googleapis/google-cloud-python.git%23subdirectory%3Ddatastore .. _Google Cloud Datastore API: https://cloud.google.com/datastore/docs .. _Product Documentation: https://cloud.google.com/datastore/docs .. _Client Library Documentation: https://googleapis.github.io/google-cloud-python/latest/datastore/index.html From 5f888be5020ca4572c5512609a86a14692051ef1 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 19 Jul 2019 13:31:47 -0400 Subject: [PATCH 236/611] Bump minimum version for google-api-core to 1.14.0. (#8709) --- packages/google-cloud-datastore/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index d02178d4626d..2860b1c67c44 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -29,7 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.6.0, < 2.0.0dev", + "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", "google-cloud-core >= 1.0.0, < 2.0dev", ] extras = {} From 1e31ab44f54fae93fad991ecde361c5da8df9c81 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 19 Jul 2019 14:45:47 -0700 Subject: [PATCH 237/611] Link to googleapis.dev documentation in READMEs. (#8705) --- packages/google-cloud-datastore/README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index c5d9748dc508..c242ed96f8aa 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -24,7 +24,7 @@ all other queries. :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=git%2Bgit%3A//github.com/googleapis/google-cloud-python.git%23subdirectory%3Ddatastore .. _Google Cloud Datastore API: https://cloud.google.com/datastore/docs .. _Product Documentation: https://cloud.google.com/datastore/docs -.. _Client Library Documentation: https://googleapis.github.io/google-cloud-python/latest/datastore/index.html +.. _Client Library Documentation: https://googleapis.dev/python/datastore/latest Quick Start ----------- @@ -39,7 +39,7 @@ In order to use this library, you first need to go through the following steps: .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Google Cloud Datastore API.: https://cloud.google.com/datastore -.. _Setup Authentication.: https://googleapis.github.io/google-cloud-python/latest/core/auth.html +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ From 33a8d300ba5935c39e5039048a450d7f87b4d0fb Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 26 Jul 2019 10:17:15 -0700 Subject: [PATCH 238/611] Release datastore 1.9.0 (#8751) --- packages/google-cloud-datastore/CHANGELOG.md | 29 ++++++++++++++++++++ packages/google-cloud-datastore/setup.py | 2 +- 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 549fd9abf451..621e31cd680b 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,35 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## 1.9.0 + +07-24-2019 16:04 PDT + + +### Implementation Changes +- Allow kwargs to be passed to create_channel (via synth). ([#8388](https://github.com/googleapis/google-cloud-python/pull/8388)) + +### New Features +- Add 'client_options' support (via synth). ([#8506](https://github.com/googleapis/google-cloud-python/pull/8506)) +- Add 'Client.reserve_ids' API wrapper. ([#8178](https://github.com/googleapis/google-cloud-python/pull/8178)) + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) + +### Documentation +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) +- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288)) + +### Internal / Testing Changes +- Pin black version (via synth). ([#8580](https://github.com/googleapis/google-cloud-python/pull/8580)) +- Remove typing information for kwargs to not conflict with type checkers ([#8546](https://github.com/googleapis/google-cloud-python/pull/8546)) +- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) +- Declare encoding as utf-8 in pb2 files (via synth). ([#8350](https://github.com/googleapis/google-cloud-python/pull/8350)) +- Add disclaimer to auto-generated template files (via synth). ([#8312](https://github.com/googleapis/google-cloud-python/pull/8312)) +- Suppress checking 'cov-fail-under' in nox default session (via synth). ([#8238](https://github.com/googleapis/google-cloud-python/pull/8238)) +- Blacken noxfile.py, setup.py (via synth). ([#8120](https://github.com/googleapis/google-cloud-python/pull/8120)) +- Add empty lines (via synth). ([#8055](https://github.com/googleapis/google-cloud-python/pull/8055)) + ## 1.8.0 05-17-2019 08:28 PDT diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 2860b1c67c44..7414c0ed29da 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-datastore" description = "Google Cloud Datastore API client library" -version = "1.8.0" +version = "1.9.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 15317dd52e91d240c9993f986da174df0d4bfdbd Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 29 Jul 2019 12:53:23 -0700 Subject: [PATCH 239/611] Update intersphinx mapping for requests. (#8805) --- packages/google-cloud-datastore/docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index 7cad41e2e9e5..8f72f48c1be5 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -342,7 +342,7 @@ None, ), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://docs.python-requests.org/en/master/", None), + "requests": ("https://2.python-requests.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } From 6d706a0baa8dc4a565e21f9fd9b726c27f4d48a2 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Thu, 1 Aug 2019 19:55:33 +0200 Subject: [PATCH 240/611] Update docs for building datastore indexes. (#8707) Restore `index.yml` file removed in PR #3146. --- .../tests/system/index.yaml | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 packages/google-cloud-datastore/tests/system/index.yaml diff --git a/packages/google-cloud-datastore/tests/system/index.yaml b/packages/google-cloud-datastore/tests/system/index.yaml new file mode 100644 index 000000000000..5a2d2b1a8bc9 --- /dev/null +++ b/packages/google-cloud-datastore/tests/system/index.yaml @@ -0,0 +1,23 @@ +indexes: + +- kind: Character + ancestor: yes + properties: + - name: appearances + +- kind: Character + ancestor: yes + properties: + - name: alive + +- kind: Character + ancestor: yes + properties: + - name: family + - name: appearances + +- kind: Character + ancestor: yes + properties: + - name: name + - name: family From 3194d373fdfecb54ab0c0adc7a973bf2d1814a62 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 6 Aug 2019 09:23:51 -0700 Subject: [PATCH 241/611] Remove send/recv msg size limit (via synth). (#8952) --- .../datastore_v1/gapic/datastore_client.py | 28 +++++++++---------- .../transports/datastore_grpc_transport.py | 9 +++++- .../google-cloud-datastore/synth.metadata | 10 +++---- 3 files changed, 27 insertions(+), 20 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py index 8a4f084d8970..12958c41a487 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py @@ -229,8 +229,8 @@ def lookup( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.ReadOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -327,8 +327,8 @@ def run_query( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.GqlQuery` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -412,8 +412,8 @@ def begin_transaction( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.TransactionOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -515,8 +515,8 @@ def commit( transaction identifier is returned by a call to ``Datastore.BeginTransaction``. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -600,8 +600,8 @@ def rollback( transaction (bytes): The transaction identifier, returned by a call to ``Datastore.BeginTransaction``. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -682,8 +682,8 @@ def allocate_ids( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.Key` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -764,8 +764,8 @@ def reserve_ids( message :class:`~google.cloud.datastore_v1.types.Key` database_id (str): If not empty, the ID of the database against which to make the request. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py index 026bca5c0ae8..c7c640c472e1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py @@ -61,7 +61,14 @@ def __init__( # Create the channel. if channel is None: - channel = self.create_channel(address=address, credentials=credentials) + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) self._channel = channel diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 1ddfbade41f8..8bff1cea3c7c 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-07-03T12:24:06.203156Z", + "updateTime": "2019-08-06T12:22:10.473851Z", "sources": [ { "generator": { "name": "artman", - "version": "0.29.3", - "dockerImage": "googleapis/artman@sha256:8900f94a81adaab0238965aa8a7b3648791f4f3a95ee65adc6a56cfcc3753101" + "version": "0.32.1", + "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "69916b6ffbb7717fa009033351777d0c9909fb79", - "internalRef": "256241904" + "sha": "e699b0cba64ffddfae39633417180f1f65875896", + "internalRef": "261759677" } }, { From 6348bc140767988795a21b362c5c052ea9e2dd83 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 16 Aug 2019 13:25:32 -0700 Subject: [PATCH 242/611] Remove compatability badges from READMEs. (#9035) --- packages/google-cloud-datastore/README.rst | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index c242ed96f8aa..e613092a58bf 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -1,7 +1,7 @@ Python Client for Google Cloud Datastore ======================================== -|GA| |pypi| |versions| |compat_check_pypi| |compat_check_github| +|GA| |pypi| |versions| `Google Cloud Datastore API`_ is a fully managed, schemaless database for storing non-relational data. Cloud Datastore automatically scales with your @@ -18,10 +18,6 @@ all other queries. :target: https://pypi.org/project/google-cloud-datastore/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-datastore.svg :target: https://pypi.org/project/google-cloud-datastore/ -.. |compat_check_pypi| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=google-cloud-datastore - :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=google-cloud-datastore -.. |compat_check_github| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=git%2Bgit%3A//github.com/googleapis/google-cloud-python.git%23subdirectory%3Ddatastore - :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=git%2Bgit%3A//github.com/googleapis/google-cloud-python.git%23subdirectory%3Ddatastore .. _Google Cloud Datastore API: https://cloud.google.com/datastore/docs .. _Product Documentation: https://cloud.google.com/datastore/docs .. _Client Library Documentation: https://googleapis.dev/python/datastore/latest From b2c61eec6312f7b76ed7add47af055249a812bdb Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 27 Aug 2019 16:35:22 -0700 Subject: [PATCH 243/611] Docs: Remove CI for gh-pages, use googleapis.dev for api_core refs. (#9085) --- packages/google-cloud-datastore/docs/conf.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index 8f72f48c1be5..a4c30084718e 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -337,10 +337,7 @@ "gax": ("https://gax-python.readthedocs.org/en/latest/", None), "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ( - "https://googleapis.github.io/google-cloud-python/latest", - None, - ), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), "requests": ("https://2.python-requests.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), From f8c157f4b58ae2961fddcf654c89a80d93590938 Mon Sep 17 00:00:00 2001 From: Leonid Emar-Kar <46078689+Emar-Kar@users.noreply.github.com> Date: Thu, 5 Sep 2019 21:22:45 +0300 Subject: [PATCH 244/611] Datastore: Add `client_options` to constructors for manual clients. (#9055) * Add `client_options` to datastore --- .../google/cloud/datastore/client.py | 18 ++++++++- packages/google-cloud-datastore/setup.py | 2 +- .../tests/unit/test_client.py | 38 +++++++++++++++++-- 3 files changed, 53 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index df9ce33a0bdf..69bbc0342eaf 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -15,6 +15,7 @@ import os +import google.api_core.client_options from google.cloud._helpers import _LocalStack from google.cloud._helpers import _determine_default_project as _base_default_project from google.cloud.client import ClientWithProject @@ -201,6 +202,11 @@ class Client(ClientWithProject): you only need to set this if you're developing your own library or partner tool. + :type client_options: :class:`~google.api_core.client_options.ClientOptions` + or :class:`dict` + :param client_options: (Optional) Client options used to set user options on the + client. API Endpoint should be set through client_options. + :type _http: :class:`~requests.Session` :param _http: (Optional) HTTP object to make requests. Can be any object that defines ``request()`` with the same interface as @@ -228,6 +234,7 @@ def __init__( namespace=None, credentials=None, client_info=_CLIENT_INFO, + client_options=None, _http=None, _use_grpc=None, ): @@ -236,6 +243,7 @@ def __init__( ) self.namespace = namespace self._client_info = client_info + self._client_options = client_options self._batch_stack = _LocalStack() self._datastore_api_internal = None if _use_grpc is None: @@ -246,7 +254,15 @@ def __init__( host = os.environ[GCD_HOST] self._base_url = "http://" + host except KeyError: - self._base_url = _DATASTORE_BASE_URL + api_endpoint = _DATASTORE_BASE_URL + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + self._base_url = api_endpoint @staticmethod def _determine_default(project): diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 7414c0ed29da..06c15b0b8434 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -30,7 +30,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", - "google-cloud-core >= 1.0.0, < 2.0dev", + "google-cloud-core >= 1.0.3, < 2.0dev", ] extras = {} diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 171a93eda4db..5a7448fc8894 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -128,6 +128,7 @@ def _make_one( namespace=None, credentials=None, client_info=None, + client_options=None, _http=None, _use_grpc=None, ): @@ -136,6 +137,7 @@ def _make_one( namespace=namespace, credentials=credentials, client_info=client_info, + client_options=client_options, _http=_http, _use_grpc=_use_grpc, ) @@ -172,6 +174,7 @@ def test_constructor_w_implicit_inputs(self): self.assertIs(client._credentials, creds) self.assertIs(client._client_info, _CLIENT_INFO) self.assertIsNone(client._http_internal) + self.assertIsNone(client._client_options) self.assertEqual(client.base_url, _DATASTORE_BASE_URL) self.assertIsNone(client.current_batch) @@ -181,18 +184,20 @@ def test_constructor_w_implicit_inputs(self): _determine_default_project.assert_called_once_with(None) def test_constructor_w_explicit_inputs(self): - from google.cloud.datastore.client import _DATASTORE_BASE_URL + from google.api_core.client_options import ClientOptions other = "other" namespace = "namespace" creds = _make_credentials() client_info = mock.Mock() + client_options = ClientOptions("endpoint") http = object() client = self._make_one( project=other, namespace=namespace, credentials=creds, client_info=client_info, + client_options=client_options, _http=http, ) self.assertEqual(client.project, other) @@ -201,8 +206,8 @@ def test_constructor_w_explicit_inputs(self): self.assertIs(client._client_info, client_info) self.assertIs(client._http_internal, http) self.assertIsNone(client.current_batch) + self.assertIs(client._base_url, "endpoint") self.assertEqual(list(client._batch_stack), []) - self.assertEqual(client.base_url, _DATASTORE_BASE_URL) def test_constructor_use_grpc_default(self): import google.cloud.datastore.client as MUT @@ -243,12 +248,39 @@ def test_constructor_gcd_host(self): self.assertEqual(client.base_url, "http://" + host) def test_base_url_property(self): + from google.cloud.datastore.client import _DATASTORE_BASE_URL + from google.api_core.client_options import ClientOptions + alternate_url = "https://alias.example.com/" project = "PROJECT" creds = _make_credentials() http = object() + client_options = ClientOptions() - client = self._make_one(project=project, credentials=creds, _http=http) + client = self._make_one( + project=project, + credentials=creds, + _http=http, + client_options=client_options, + ) + self.assertEqual(client.base_url, _DATASTORE_BASE_URL) + client.base_url = alternate_url + self.assertEqual(client.base_url, alternate_url) + + def test_base_url_property_w_client_options(self): + alternate_url = "https://alias.example.com/" + project = "PROJECT" + creds = _make_credentials() + http = object() + client_options = {"api_endpoint": "endpoint"} + + client = self._make_one( + project=project, + credentials=creds, + _http=http, + client_options=client_options, + ) + self.assertEqual(client.base_url, "endpoint") client.base_url = alternate_url self.assertEqual(client.base_url, alternate_url) From 13598efef4032c0db2961f8458fb52b9db4bd4ac Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 25 Sep 2019 12:35:50 -0400 Subject: [PATCH 245/611] docs: fix intersphinx reference to requests (#9294) --- packages/google-cloud-datastore/docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index a4c30084718e..64f9e995acf3 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -339,7 +339,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } From 3bb185ecd06dbfc7d51e6007193d6d33bd5d0808 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 15 Oct 2019 13:37:40 -0400 Subject: [PATCH 246/611] chore(datastore): release 1.10.0 (#9449) --- packages/google-cloud-datastore/CHANGELOG.md | 21 ++++++++++++++++++++ packages/google-cloud-datastore/setup.py | 2 +- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 621e31cd680b..2dc7b6c7cc86 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,27 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## 1.10.0 + +10-10-2019 12:20 PDT + + +### Implementation Changes +- Remove send / receive message size limit (via synth). ([#8952](https://github.com/googleapis/google-cloud-python/pull/8952)) + +### New Features +- Add `client_options` to constructors for manual clients. ([#9055](https://github.com/googleapis/google-cloud-python/pull/9055)) + +### Dependencies +- Pin `google-cloud-core >= 1.0.3, < 2.0.0dev`. ([#9055](https://github.com/googleapis/google-cloud-python/pull/9055)) + +### Documentation +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- Update docs for building datastore indexes. ([#8707](https://github.com/googleapis/google-cloud-python/pull/8707)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + ## 1.9.0 07-24-2019 16:04 PDT diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 06c15b0b8434..4fadd33db7ce 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-datastore" description = "Google Cloud Datastore API client library" -version = "1.9.0" +version = "1.10.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 2eb83d2b40c1d78a3dfe9cbb2c13dd04d1d8c3ae Mon Sep 17 00:00:00 2001 From: Sebastien Williams-Wynn Date: Wed, 23 Oct 2019 17:43:12 +0100 Subject: [PATCH 247/611] docs(datastore): remove duplication of word 'API' (#9524) --- packages/google-cloud-datastore/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index e613092a58bf..bb685f04f9c9 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -105,7 +105,7 @@ Example Usage Next Steps ~~~~~~~~~~ -- Read the `Client Library Documentation`_ for Google Cloud Datastore API +- Read the `Client Library Documentation`_ for Google Cloud Datastore API to see other available methods on the client. - Read the `Product documentation`_ to learn more about the product and see How-to Guides. From 5d3b92514032f8ecfe3e6fa8cc111f8f86be4fa9 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 11 Nov 2019 15:15:32 -0800 Subject: [PATCH 248/611] docs: add python 2 sunset banner to documentation (#9036) --- .../docs/_static/custom.css | 4 ++ .../docs/_templates/layout.html | 49 +++++++++++++++++++ packages/google-cloud-datastore/docs/conf.py | 2 +- 3 files changed, 54 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-datastore/docs/_static/custom.css create mode 100644 packages/google-cloud-datastore/docs/_templates/layout.html diff --git a/packages/google-cloud-datastore/docs/_static/custom.css b/packages/google-cloud-datastore/docs/_static/custom.css new file mode 100644 index 000000000000..9a6f9f8ddc3a --- /dev/null +++ b/packages/google-cloud-datastore/docs/_static/custom.css @@ -0,0 +1,4 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/docs/_templates/layout.html b/packages/google-cloud-datastore/docs/_templates/layout.html new file mode 100644 index 000000000000..de457b2c2767 --- /dev/null +++ b/packages/google-cloud-datastore/docs/_templates/layout.html @@ -0,0 +1,49 @@ +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ On January 1, 2020 this library will no longer support Python 2 on the latest released version. + Previously released library versions will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index 64f9e995acf3..902fb0d11c52 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -163,7 +163,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -# html_static_path = [] +html_static_path = ["_static"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied From 1de6ef07d56f36b513c29f1c667f0dd54738240b Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 12 Nov 2019 11:38:46 -0800 Subject: [PATCH 249/611] chore(datastore): change spacing in docs templates (via synth) (#9746) --- .../google-cloud-datastore/docs/_static/custom.css | 2 +- .../docs/_templates/layout.html | 1 + packages/google-cloud-datastore/synth.metadata | 12 ++++++------ 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-datastore/docs/_static/custom.css b/packages/google-cloud-datastore/docs/_static/custom.css index 9a6f9f8ddc3a..0abaf229fce3 100644 --- a/packages/google-cloud-datastore/docs/_static/custom.css +++ b/packages/google-cloud-datastore/docs/_static/custom.css @@ -1,4 +1,4 @@ div#python2-eol { border-color: red; border-width: medium; -} \ No newline at end of file +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/docs/_templates/layout.html b/packages/google-cloud-datastore/docs/_templates/layout.html index de457b2c2767..228529efe2d2 100644 --- a/packages/google-cloud-datastore/docs/_templates/layout.html +++ b/packages/google-cloud-datastore/docs/_templates/layout.html @@ -1,3 +1,4 @@ + {% extends "!layout.html" %} {%- block content %} {%- if theme_fixed_sidebar|lower == 'true' %} diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 8bff1cea3c7c..f84ea0f90d6f 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-06T12:22:10.473851Z", + "updateTime": "2019-11-12T13:24:50.229401Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.41.1", + "dockerImage": "googleapis/artman@sha256:545c758c76c3f779037aa259023ec3d1ef2d57d2c8cd00a222cb187d63ceac5e" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "f69562be0608904932bdcfbc5ad8b9a22d9dceb8", + "internalRef": "279774957" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], From 50e21382d5a3b9d5daa9d9033a622b6824ee1f03 Mon Sep 17 00:00:00 2001 From: Chris Wilcox Date: Thu, 6 Feb 2020 22:54:35 +0000 Subject: [PATCH 250/611] chore: add split repo templates --- .../.github/CONTRIBUTING.md | 28 ++ .../.github/ISSUE_TEMPLATE/bug_report.md | 44 +++ .../.github/ISSUE_TEMPLATE/feature_request.md | 18 ++ .../.github/ISSUE_TEMPLATE/support_request.md | 7 + .../.github/PULL_REQUEST_TEMPLATE.md | 7 + .../.github/release-please.yml | 1 + packages/google-cloud-datastore/.gitignore | 58 ++++ .../google-cloud-datastore/.kokoro/build.sh | 39 +++ .../.kokoro/continuous/common.cfg | 27 ++ .../.kokoro/continuous/continuous.cfg | 1 + .../.kokoro/docs/common.cfg | 48 +++ .../.kokoro/docs/docs.cfg | 1 + .../.kokoro/presubmit/common.cfg | 27 ++ .../.kokoro/presubmit/presubmit.cfg | 1 + .../.kokoro/publish-docs.sh | 57 ++++ .../google-cloud-datastore/.kokoro/release.sh | 34 +++ .../.kokoro/release/common.cfg | 64 ++++ .../.kokoro/release/release.cfg | 1 + .../.kokoro/trampoline.sh | 23 ++ .../.repo-metadata.json | 2 +- .../google-cloud-datastore/CODE_OF_CONDUCT.md | 44 +++ .../google-cloud-datastore/CONTRIBUTING.rst | 279 ++++++++++++++++++ packages/google-cloud-datastore/LICENSE | 7 +- packages/google-cloud-datastore/MANIFEST.in | 1 + packages/google-cloud-datastore/docs/conf.py | 25 +- packages/google-cloud-datastore/noxfile.py | 12 +- packages/google-cloud-datastore/renovate.json | 5 + packages/google-cloud-datastore/setup.py | 2 +- .../google-cloud-datastore/synth.metadata | 15 +- 29 files changed, 845 insertions(+), 33 deletions(-) create mode 100644 packages/google-cloud-datastore/.github/CONTRIBUTING.md create mode 100644 packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/bug_report.md create mode 100644 packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/feature_request.md create mode 100644 packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/support_request.md create mode 100644 packages/google-cloud-datastore/.github/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/google-cloud-datastore/.github/release-please.yml create mode 100644 packages/google-cloud-datastore/.gitignore create mode 100755 packages/google-cloud-datastore/.kokoro/build.sh create mode 100644 packages/google-cloud-datastore/.kokoro/continuous/common.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/continuous/continuous.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/docs/common.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/docs/docs.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/presubmit/common.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/presubmit/presubmit.cfg create mode 100755 packages/google-cloud-datastore/.kokoro/publish-docs.sh create mode 100755 packages/google-cloud-datastore/.kokoro/release.sh create mode 100644 packages/google-cloud-datastore/.kokoro/release/common.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/release/release.cfg create mode 100755 packages/google-cloud-datastore/.kokoro/trampoline.sh create mode 100644 packages/google-cloud-datastore/CODE_OF_CONDUCT.md create mode 100644 packages/google-cloud-datastore/CONTRIBUTING.rst create mode 100644 packages/google-cloud-datastore/renovate.json diff --git a/packages/google-cloud-datastore/.github/CONTRIBUTING.md b/packages/google-cloud-datastore/.github/CONTRIBUTING.md new file mode 100644 index 000000000000..939e5341e74d --- /dev/null +++ b/packages/google-cloud-datastore/.github/CONTRIBUTING.md @@ -0,0 +1,28 @@ +# How to Contribute + +We'd love to accept your patches and contributions to this project. There are +just a few small guidelines you need to follow. + +## Contributor License Agreement + +Contributions to this project must be accompanied by a Contributor License +Agreement. You (or your employer) retain the copyright to your contribution; +this simply gives us permission to use and redistribute your contributions as +part of the project. Head over to to see +your current agreements on file or to sign a new one. + +You generally only need to submit a CLA once, so if you've already submitted one +(even if it was for a different project), you probably don't need to do it +again. + +## Code reviews + +All submissions, including submissions by project members, require review. We +use GitHub pull requests for this purpose. Consult +[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more +information on using pull requests. + +## Community Guidelines + +This project follows [Google's Open Source Community +Guidelines](https://opensource.google.com/conduct/). diff --git a/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/bug_report.md b/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000000..e00382ac1ab0 --- /dev/null +++ b/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,44 @@ +--- +name: Bug report +about: Create a report to help us improve + +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + +Please run down the following list and make sure you've tried the usual "quick fixes": + + - Search the issues already opened: https://github.com/googleapis/python-datastore/issues + - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-python + - Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+python + +If you are still having issues, please be sure to include as much information as possible: + +#### Environment details + + - OS type and version: + - Python version: `python --version` + - pip version: `pip --version` + - `google-cloud-datastore` version: `pip show google-cloud-datastore` + +#### Steps to reproduce + + 1. ? + 2. ? + +#### Code example + +```python +# example +``` + +#### Stack trace +``` +# example +``` + +Making sure to follow these steps will guarantee the quickest resolution possible. + +Thanks! diff --git a/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/feature_request.md b/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000000..6365857f33c6 --- /dev/null +++ b/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,18 @@ +--- +name: Feature request +about: Suggest an idea for this library + +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + + **Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + **Describe the solution you'd like** +A clear and concise description of what you want to happen. + **Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + **Additional context** +Add any other context or screenshots about the feature request here. diff --git a/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/support_request.md b/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/support_request.md new file mode 100644 index 000000000000..995869032125 --- /dev/null +++ b/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/support_request.md @@ -0,0 +1,7 @@ +--- +name: Support request +about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. + +--- + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/packages/google-cloud-datastore/.github/PULL_REQUEST_TEMPLATE.md b/packages/google-cloud-datastore/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000000..91cb335a75e5 --- /dev/null +++ b/packages/google-cloud-datastore/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,7 @@ +Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: +- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-datastore/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea +- [ ] Ensure the tests and linter pass +- [ ] Code coverage does not decrease (if any source code was changed) +- [ ] Appropriate docs were updated (if necessary) + +Fixes # 🦕 diff --git a/packages/google-cloud-datastore/.github/release-please.yml b/packages/google-cloud-datastore/.github/release-please.yml new file mode 100644 index 000000000000..4507ad0598a5 --- /dev/null +++ b/packages/google-cloud-datastore/.github/release-please.yml @@ -0,0 +1 @@ +releaseType: python diff --git a/packages/google-cloud-datastore/.gitignore b/packages/google-cloud-datastore/.gitignore new file mode 100644 index 000000000000..3fb06e09ce74 --- /dev/null +++ b/packages/google-cloud-datastore/.gitignore @@ -0,0 +1,58 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated + +# Virtual environment +env/ +coverage.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/build.sh b/packages/google-cloud-datastore/.kokoro/build.sh new file mode 100755 index 000000000000..6237d6617c66 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/build.sh @@ -0,0 +1,39 @@ +#!/bin/bash +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +cd github/python-datastore + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Setup service account credentials. +export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json + +# Setup project id. +export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") + +# Remove old nox +python3.6 -m pip uninstall --yes --quiet nox-automation + +# Install nox +python3.6 -m pip install --upgrade --quiet nox +python3.6 -m nox --version + +python3.6 -m nox diff --git a/packages/google-cloud-datastore/.kokoro/continuous/common.cfg b/packages/google-cloud-datastore/.kokoro/continuous/common.cfg new file mode 100644 index 000000000000..74d61e90239f --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/continuous/common.cfg @@ -0,0 +1,27 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-datastore/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/build.sh" +} diff --git a/packages/google-cloud-datastore/.kokoro/continuous/continuous.cfg b/packages/google-cloud-datastore/.kokoro/continuous/continuous.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/continuous/continuous.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/docs/common.cfg b/packages/google-cloud-datastore/.kokoro/docs/common.cfg new file mode 100644 index 000000000000..3b561689b108 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/docs/common.cfg @@ -0,0 +1,48 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-datastore/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/publish-docs.sh" +} + +env_vars: { + key: "STAGING_BUCKET" + value: "docs-staging" +} + +# Fetch the token needed for reporting release status to GitHub +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "yoshi-automation-github-key" + } + } +} + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "docuploader_service_account" + } + } +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/docs/docs.cfg b/packages/google-cloud-datastore/.kokoro/docs/docs.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/docs/docs.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/presubmit/common.cfg b/packages/google-cloud-datastore/.kokoro/presubmit/common.cfg new file mode 100644 index 000000000000..74d61e90239f --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/presubmit/common.cfg @@ -0,0 +1,27 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-datastore/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/build.sh" +} diff --git a/packages/google-cloud-datastore/.kokoro/presubmit/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/presubmit/presubmit.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/presubmit/presubmit.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/publish-docs.sh b/packages/google-cloud-datastore/.kokoro/publish-docs.sh new file mode 100755 index 000000000000..b478f4c4d34e --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/publish-docs.sh @@ -0,0 +1,57 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#!/bin/bash + +set -eo pipefail + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +cd github/python-datastore + +# Remove old nox +python3.6 -m pip uninstall --yes --quiet nox-automation + +# Install nox +python3.6 -m pip install --upgrade --quiet nox +python3.6 -m nox --version + +# build docs +nox -s docs + +python3 -m pip install gcp-docuploader + +# install a json parser +sudo apt-get update +sudo apt-get -y install software-properties-common +sudo add-apt-repository universe +sudo apt-get update +sudo apt-get -y install jq + +# create metadata +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging diff --git a/packages/google-cloud-datastore/.kokoro/release.sh b/packages/google-cloud-datastore/.kokoro/release.sh new file mode 100755 index 000000000000..00df87ac5390 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/release.sh @@ -0,0 +1,34 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#!/bin/bash + +set -eo pipefail + +# Start the releasetool reporter +python3 -m pip install gcp-releasetool +python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script + +# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. +python3 -m pip install --upgrade twine wheel setuptools + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Move into the package, build the distribution and upload. +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password") +cd github/python-datastore +python3 setup.py sdist bdist_wheel +twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/google-cloud-datastore/.kokoro/release/common.cfg b/packages/google-cloud-datastore/.kokoro/release/common.cfg new file mode 100644 index 000000000000..38ca6b7ef3dd --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/release/common.cfg @@ -0,0 +1,64 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-datastore/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/release.sh" +} + +# Fetch the token needed for reporting release status to GitHub +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "yoshi-automation-github-key" + } + } +} + +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google_cloud_pypi_password" + } + } +} + +# Fetch magictoken to use with Magic Github Proxy +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "releasetool-magictoken" + } + } +} + +# Fetch api key to use with Magic Github Proxy +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "magic-github-proxy-api-key" + } + } +} diff --git a/packages/google-cloud-datastore/.kokoro/release/release.cfg b/packages/google-cloud-datastore/.kokoro/release/release.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/release/release.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/trampoline.sh b/packages/google-cloud-datastore/.kokoro/trampoline.sh new file mode 100755 index 000000000000..e8c4251f3ed4 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/trampoline.sh @@ -0,0 +1,23 @@ +#!/bin/bash +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$? + +chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh +${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true + +exit ${ret_code} diff --git a/packages/google-cloud-datastore/.repo-metadata.json b/packages/google-cloud-datastore/.repo-metadata.json index f28b9d74ef7c..10661c59824f 100644 --- a/packages/google-cloud-datastore/.repo-metadata.json +++ b/packages/google-cloud-datastore/.repo-metadata.json @@ -6,7 +6,7 @@ "issue_tracker": "https://issuetracker.google.com/savedsearches/559768", "release_level": "ga", "language": "python", - "repo": "googleapis/google-cloud-python", + "repo": "googleapis/python-datastore", "distribution_name": "google-cloud-datastore", "api_id": "datastore.googleapis.com" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CODE_OF_CONDUCT.md b/packages/google-cloud-datastore/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..b3d1f6029849 --- /dev/null +++ b/packages/google-cloud-datastore/CODE_OF_CONDUCT.md @@ -0,0 +1,44 @@ + +# Contributor Code of Conduct + +As contributors and maintainers of this project, +and in the interest of fostering an open and welcoming community, +we pledge to respect all people who contribute through reporting issues, +posting feature requests, updating documentation, +submitting pull requests or patches, and other activities. + +We are committed to making participation in this project +a harassment-free experience for everyone, +regardless of level of experience, gender, gender identity and expression, +sexual orientation, disability, personal appearance, +body size, race, ethnicity, age, religion, or nationality. + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery +* Personal attacks +* Trolling or insulting/derogatory comments +* Public or private harassment +* Publishing other's private information, +such as physical or electronic +addresses, without explicit permission +* Other unethical or unprofessional conduct. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct. +By adopting this Code of Conduct, +project maintainers commit themselves to fairly and consistently +applying these principles to every aspect of managing this project. +Project maintainers who do not follow or enforce the Code of Conduct +may be permanently removed from the project team. + +This code of conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. + +Instances of abusive, harassing, or otherwise unacceptable behavior +may be reported by opening an issue +or contacting one or more of the project maintainers. + +This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, +available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) diff --git a/packages/google-cloud-datastore/CONTRIBUTING.rst b/packages/google-cloud-datastore/CONTRIBUTING.rst new file mode 100644 index 000000000000..b6326a58b484 --- /dev/null +++ b/packages/google-cloud-datastore/CONTRIBUTING.rst @@ -0,0 +1,279 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: 2.7, + 3.5, 3.6, and 3.7 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``python-datastore`` `repo`_ on GitHub. + +- Fork and clone the ``python-datastore`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``python-datastore`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-python-datastore``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/python-datastore.git hack-on-python-datastore + $ cd hack-on-python-datastore + # Configure remotes such that you can pull changes from the googleapis/python-datastore + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/python-datastore.git + # fetch and merge changes from upstream into master + $ git fetch upstream + $ git merge upstream/master + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/python-datastore + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + + $ nox -s unit-2.7 + $ nox -s unit-3.7 + $ ... + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +Note on Editable Installs / Develop Mode +======================================== + +- As mentioned previously, using ``setuptools`` in `develop mode`_ + or a ``pip`` `editable install`_ is not possible with this + library. This is because this library uses `namespace packages`_. + For context see `Issue #2316`_ and the relevant `PyPA issue`_. + + Since ``editable`` / ``develop`` mode can't be used, packages + need to be installed directly. Hence your changes to the source + tree don't get incorporated into the **already installed** + package. + +.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ +.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316 +.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 +.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode +.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ + +- PEP8 compliance, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="master" + + By doing this, you are specifying the location of the most up-to-date + version of ``python-datastore``. The the suggested remote name ``upstream`` + should point to the official ``googleapis`` checkout and the + the branch should be the main branch on that remote (``master``). + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + $ nox -s system-3.7 + $ nox -s system-2.7 + + .. note:: + + System tests are only configured to run under Python 2.7 and + Python 3.7. For expediency, we do not run them in older versions + of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project and + so you'll need to provide some environment variables to facilitate + authentication to your project: + + - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file; + Such a file can be downloaded directly from the developer's console by clicking + "Generate new JSON key". See private key + `docs `__ + for more details. + +- Once you have downloaded your json keys, set the environment variable + ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file:: + + $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json" + + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/python-datastore/blob/master/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-datastore + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.5`_ +- `Python 3.6`_ +- `Python 3.7`_ + +.. _Python 3.5: https://docs.python.org/3.5/ +.. _Python 3.6: https://docs.python.org/3.6/ +.. _Python 3.7: https://docs.python.org/3.7/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/python-datastore/blob/master/noxfile.py + +We explicitly decided not to support `Python 2.5`_ due to `decreased usage`_ +and lack of continuous integration `support`_. + +.. _Python 2.5: https://docs.python.org/2.5/ +.. _decreased usage: https://caremad.io/2013/10/a-look-at-pypi-downloads/ +.. _support: https://blog.travis-ci.com/2013-11-18-upcoming-build-environment-updates/ + +We have `dropped 2.6`_ as a supported version as well since Python 2.6 is no +longer supported by the core development team. + +Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. + +We also explicitly decided to support Python 3 beginning with version +3.5. Reasons for this include: + +- Encouraging use of newest versions of Python 3 +- Taking the lead of `prominent`_ open-source `projects`_ +- `Unicode literal support`_ which allows for a cleaner codebase that + works in both Python 2 and Python 3 + +.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django +.. _projects: http://flask.pocoo.org/docs/0.10/python3/ +.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ +.. _dropped 2.6: https://github.com/googleapis/google-cloud-python/issues/995 + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-datastore/LICENSE b/packages/google-cloud-datastore/LICENSE index d64569567334..a8ee855de2aa 100644 --- a/packages/google-cloud-datastore/LICENSE +++ b/packages/google-cloud-datastore/LICENSE @@ -1,7 +1,6 @@ - - Apache License + Apache License Version 2.0, January 2004 - http://www.apache.org/licenses/ + https://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -193,7 +192,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/packages/google-cloud-datastore/MANIFEST.in b/packages/google-cloud-datastore/MANIFEST.in index 9cbf175afe6b..cd011be27a0e 100644 --- a/packages/google-cloud-datastore/MANIFEST.in +++ b/packages/google-cloud-datastore/MANIFEST.in @@ -1,3 +1,4 @@ +# Generated by synthtool. DO NOT EDIT! include README.rst LICENSE recursive-include google *.json *.proto recursive-include tests * diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index 902fb0d11c52..19a56853f0b1 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -20,7 +20,7 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.1.0" +__version__ = "" # -- General configuration ------------------------------------------------ @@ -33,7 +33,6 @@ extensions = [ "sphinx.ext.autodoc", "sphinx.ext.autosummary", - "sphinx.ext.doctest", "sphinx.ext.intersphinx", "sphinx.ext.coverage", "sphinx.ext.napoleon", @@ -46,6 +45,7 @@ autodoc_default_flags = ["members"] autosummary_generate = True + # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] @@ -66,7 +66,7 @@ # General information about the project. project = u"google-cloud-datastore" -copyright = u"2017, Google" +copyright = u"2019, Google" author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for @@ -122,6 +122,7 @@ # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True + # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for @@ -132,9 +133,9 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "Google Cloud Client Libraries for Python", + "description": "Google Cloud Client Libraries for google-cloud-datastore", "github_user": "googleapis", - "github_repo": "google-cloud-python", + "github_repo": "python-datastore", "github_banner": True, "font_family": "'Roboto', Georgia, sans", "head_font_family": "'Roboto', Georgia, serif", @@ -230,6 +231,7 @@ # -- Options for warnings ------------------------------------------------------ + suppress_warnings = [ # Temporarily suppress this to avoid "more than one target found for # cross-reference" warning, which are intractable for us to avoid while in @@ -285,6 +287,7 @@ # If false, no module index is generated. # latex_domain_indices = True + # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples @@ -302,6 +305,7 @@ # If true, show URL addresses after external links. # man_show_urls = False + # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples @@ -314,7 +318,7 @@ u"google-cloud-datastore Documentation", author, "google-cloud-datastore", - "GAPIC library for the {metadata.shortName} v1 service", + "google-cloud-datastore Library", "APIs", ) ] @@ -331,19 +335,16 @@ # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False + # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), - "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), - "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } + # Napoleon settings napoleon_google_docstring = True napoleon_numpy_docstring = True diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index a2eefbb6765f..7fbb1febff8f 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -23,7 +23,6 @@ import nox -LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) BLACK_VERSION = "black==19.3b0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] @@ -38,7 +37,7 @@ def lint(session): Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ - session.install("flake8", BLACK_VERSION, *LOCAL_DEPS) + session.install("flake8", BLACK_VERSION) session.run("black", "--check", *BLACK_PATHS) session.run("flake8", "google", "tests") @@ -67,14 +66,13 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. session.install("mock", "pytest", "pytest-cov") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) session.install("-e", ".") # Run py.test against the unit tests. session.run( "py.test", "--quiet", + "--cov=google.cloud.datastore", "--cov=google.cloud", "--cov=tests.unit", "--cov-append", @@ -86,7 +84,7 @@ def default(session): ) -@nox.session(python=["2.7", "3.5", "3.6", "3.7"]) +@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"]) def unit(session): """Run the unit test suite.""" default(session) @@ -113,9 +111,7 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install("mock", "pytest") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - session.install("-e", "../test_utils/") + session.install("-e", ".") # Run py.test against the system tests. diff --git a/packages/google-cloud-datastore/renovate.json b/packages/google-cloud-datastore/renovate.json new file mode 100644 index 000000000000..4fa949311b20 --- /dev/null +++ b/packages/google-cloud-datastore/renovate.json @@ -0,0 +1,5 @@ +{ + "extends": [ + "config:base", ":preserveSemverRanges" + ] +} diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 4fadd33db7ce..ffe2fd19c539 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -63,7 +63,7 @@ author="Google LLC", author_email="googleapis-packages@google.com", license="Apache 2.0", - url="https://github.com/GoogleCloudPlatform/google-cloud-python", + url="https://github.com/googleapis/python-datastore", classifiers=[ release_status, "Intended Audience :: Developers", diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index f84ea0f90d6f..4fbc8e273696 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,26 +1,27 @@ { - "updateTime": "2019-11-12T13:24:50.229401Z", + "updateTime": "2020-02-06T22:53:26.159627Z", "sources": [ { "generator": { "name": "artman", - "version": "0.41.1", - "dockerImage": "googleapis/artman@sha256:545c758c76c3f779037aa259023ec3d1ef2d57d2c8cd00a222cb187d63ceac5e" + "version": "0.44.4", + "dockerImage": "googleapis/artman@sha256:19e945954fc960a4bdfee6cb34695898ab21a8cf0bac063ee39b91f00a1faec8" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "f69562be0608904932bdcfbc5ad8b9a22d9dceb8", - "internalRef": "279774957" + "sha": "2e23b8fbc45f5d9e200572ca662fe1271bcd6760", + "internalRef": "293666452", + "log": "2e23b8fbc45f5d9e200572ca662fe1271bcd6760\nAdd ListEntryGroups method, add http bindings to support entry group tagging, and update some comments.\n\nPiperOrigin-RevId: 293666452\n\n0275e38a4ca03a13d3f47a9613aac8c8b0d3f1f2\nAdd proto_package field to managedidentities API. It is needed for APIs that still depend on artman generation.\n\nPiperOrigin-RevId: 293643323\n\n4cdfe8278cb6f308106580d70648001c9146e759\nRegenerating public protos for Data Catalog to add new Custom Type Entry feature.\n\nPiperOrigin-RevId: 293614782\n\n45d2a569ab526a1fad3720f95eefb1c7330eaada\nEnable client generation for v1 ManagedIdentities API.\n\nPiperOrigin-RevId: 293515675\n\n2c17086b77e6f3bcf04a1f65758dfb0c3da1568f\nAdd the Actions on Google common types (//google/actions/type/*).\n\nPiperOrigin-RevId: 293478245\n\n781aadb932e64a12fb6ead7cd842698d99588433\nDialogflow weekly v2/v2beta1 library update:\n- Documentation updates\nImportant updates are also posted at\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 293443396\n\ne2602608c9138c2fca24162720e67f9307c30b95\nDialogflow weekly v2/v2beta1 library update:\n- Documentation updates\nImportant updates are also posted at\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 293442964\n\nc8aef82028d06b7992278fa9294c18570dc86c3d\nAdd cc_proto_library and cc_grpc_library targets for Bigtable protos.\n\nAlso fix indentation of cc_grpc_library targets in Spanner and IAM protos.\n\nPiperOrigin-RevId: 293440538\n\ne2faab04f4cb7f9755072330866689b1943a16e9\ncloudtasks: v2 replace non-standard retry params in gapic config v2\n\nPiperOrigin-RevId: 293424055\n\ndfb4097ea628a8470292c6590a4313aee0c675bd\nerrorreporting: v1beta1 add legacy artman config for php\n\nPiperOrigin-RevId: 293423790\n\nb18aed55b45bfe5b62476292c72759e6c3e573c6\nasset: v1p1beta1 updated comment for `page_size` limit.\n\nPiperOrigin-RevId: 293421386\n\nc9ef36b7956d9859a2fc86ad35fcaa16958ab44f\nbazel: Refactor CI build scripts\n\nPiperOrigin-RevId: 293387911\n\na8ed9d921fdddc61d8467bfd7c1668f0ad90435c\nfix: set Ruby module name for OrgPolicy\n\nPiperOrigin-RevId: 293257997\n\n6c7d28509bd8315de8af0889688ee20099594269\nredis: v1beta1 add UpgradeInstance and connect_mode field to Instance\n\nPiperOrigin-RevId: 293242878\n\nae0abed4fcb4c21f5cb67a82349a049524c4ef68\nredis: v1 add connect_mode field to Instance\n\nPiperOrigin-RevId: 293241914\n\n3f7a0d29b28ee9365771da2b66edf7fa2b4e9c56\nAdds service config definition for bigqueryreservation v1beta1\n\nPiperOrigin-RevId: 293234418\n\n0c88168d5ed6fe353a8cf8cbdc6bf084f6bb66a5\naddition of BUILD & configuration for accessapproval v1\n\nPiperOrigin-RevId: 293219198\n\n39bedc2e30f4778ce81193f6ba1fec56107bcfc4\naccessapproval: v1 publish protos\n\nPiperOrigin-RevId: 293167048\n\n69d9945330a5721cd679f17331a78850e2618226\nAdd file-level `Session` resource definition\n\nPiperOrigin-RevId: 293080182\n\nf6a1a6b417f39694275ca286110bc3c1ca4db0dc\nAdd file-level `Session` resource definition\n\nPiperOrigin-RevId: 293080178\n\n29d40b78e3dc1579b0b209463fbcb76e5767f72a\nExpose managedidentities/v1beta1/ API for client library usage.\n\nPiperOrigin-RevId: 292979741\n\na22129a1fb6e18056d576dfb7717aef74b63734a\nExpose managedidentities/v1/ API for client library usage.\n\nPiperOrigin-RevId: 292968186\n\nb5cbe4a4ba64ab19e6627573ff52057a1657773d\nSecurityCenter v1p1beta1: move file-level option on top to workaround protobuf.js bug.\n\nPiperOrigin-RevId: 292647187\n\nb224b317bf20c6a4fbc5030b4a969c3147f27ad3\nAdds API definitions for bigqueryreservation v1beta1.\n\nPiperOrigin-RevId: 292634722\n\nc1468702f9b17e20dd59007c0804a089b83197d2\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 292626173\n\nffdfa4f55ab2f0afc11d0eb68f125ccbd5e404bd\nvision: v1p3beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292605599\n\n78f61482cd028fc1d9892aa5d89d768666a954cd\nvision: v1p1beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292605125\n\n60bb5a294a604fd1778c7ec87b265d13a7106171\nvision: v1p2beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292604980\n\n3bcf7aa79d45eb9ec29ab9036e9359ea325a7fc3\nvision: v1p4beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292604656\n\n2717b8a1c762b26911b45ecc2e4ee01d98401b28\nFix dataproc artman client library generation.\n\nPiperOrigin-RevId: 292555664\n\n7ac66d9be8a7d7de4f13566d8663978c9ee9dcd7\nAdd Dataproc Autoscaling API to V1.\n\nPiperOrigin-RevId: 292450564\n\n5d932b2c1be3a6ef487d094e3cf5c0673d0241dd\n- Improve documentation\n- Add a client_id field to StreamingPullRequest\n\nPiperOrigin-RevId: 292434036\n\neaff9fa8edec3e914995ce832b087039c5417ea7\nmonitoring: v3 publish annotations and client retry config\n\nPiperOrigin-RevId: 292425288\n\n70958bab8c5353870d31a23fb2c40305b050d3fe\nBigQuery Storage Read API v1 clients.\n\nPiperOrigin-RevId: 292407644\n\n7a15e7fe78ff4b6d5c9606a3264559e5bde341d1\nUpdate backend proto for Google Cloud Endpoints\n\nPiperOrigin-RevId: 292391607\n\n3ca2c014e24eb5111c8e7248b1e1eb833977c83d\nbazel: Add --flaky_test_attempts=3 argument to prevent CI failures caused by flaky tests\n\nPiperOrigin-RevId: 292382559\n\n9933347c1f677e81e19a844c2ef95bfceaf694fe\nbazel:Integrate latest protoc-java-resource-names-plugin changes (fix for PyYAML dependency in bazel rules)\n\nPiperOrigin-RevId: 292376626\n\nb835ab9d2f62c88561392aa26074c0b849fb0bd3\nasset: v1p2beta1 add client config annotations\n\n* remove unintentionally exposed RPCs\n* remove messages relevant to removed RPCs\n\nPiperOrigin-RevId: 292369593\n\nc1246a29e22b0f98e800a536b5b0da2d933a55f2\nUpdating v1 protos with the latest inline documentation (in comments) and config options. Also adding a per-service .yaml file.\n\nPiperOrigin-RevId: 292310790\n\nb491d07cadaae7cde5608321f913e5ca1459b32d\nRevert accidental local_repository change\n\nPiperOrigin-RevId: 292245373\n\naf3400a8cb6110025198b59a0f7d018ae3cda700\nUpdate gapic-generator dependency (prebuilt PHP binary support).\n\nPiperOrigin-RevId: 292243997\n\n341fd5690fae36f36cf626ef048fbcf4bbe7cee6\ngrafeas: v1 add resource_definition for the grafeas.io/Project and change references for Project.\n\nPiperOrigin-RevId: 292221998\n\n42e915ec2ece1cd37a590fbcd10aa2c0fb0e5b06\nUpdate the gapic-generator, protoc-java-resource-name-plugin and protoc-docs-plugin to the latest commit.\n\nPiperOrigin-RevId: 292182368\n\nf035f47250675d31492a09f4a7586cfa395520a7\nFix grafeas build and update build.sh script to include gerafeas.\n\nPiperOrigin-RevId: 292168753\n\n26ccb214b7bc4a716032a6266bcb0a9ca55d6dbb\nasset: v1p1beta1 add client config annotations and retry config\n\nPiperOrigin-RevId: 292154210\n\n974ee5c0b5d03e81a50dafcedf41e0efebb5b749\nasset: v1beta1 add client config annotations\n\nPiperOrigin-RevId: 292152573\n\ncf3b61102ed5f36b827bc82ec39be09525f018c8\n Fix to protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 292034635\n\n4e1cfaa7c0fede9e65d64213ca3da1b1255816c0\nUpdate the public proto to support UTF-8 encoded id for CatalogService API, increase the ListCatalogItems deadline to 300s and some minor documentation change\n\nPiperOrigin-RevId: 292030970\n\n9c483584f8fd5a1b862ae07973f4cc7bb3e46648\nasset: add annotations to v1p1beta1\n\nPiperOrigin-RevId: 292009868\n\ne19209fac29731d0baf6d9ac23da1164f7bdca24\nAdd the google.rpc.context.AttributeContext message to the open source\ndirectories.\n\nPiperOrigin-RevId: 291999930\n\nae5662960573f279502bf98a108a35ba1175e782\noslogin API: move file level option on top of the file to avoid protobuf.js bug.\n\nPiperOrigin-RevId: 291990506\n\neba3897fff7c49ed85d3c47fc96fe96e47f6f684\nAdd cc_proto_library and cc_grpc_library targets for Spanner and IAM protos.\n\nPiperOrigin-RevId: 291988651\n\n8e981acfd9b97ea2f312f11bbaa7b6c16e412dea\nBeta launch for PersonDetection and FaceDetection features.\n\nPiperOrigin-RevId: 291821782\n\n994e067fae3b21e195f7da932b08fff806d70b5d\nasset: add annotations to v1p2beta1\n\nPiperOrigin-RevId: 291815259\n\n244e1d2c89346ca2e0701b39e65552330d68545a\nAdd Playable Locations service\n\nPiperOrigin-RevId: 291806349\n\n909f8f67963daf45dd88d020877fb9029b76788d\nasset: add annotations to v1beta2\n\nPiperOrigin-RevId: 291805301\n\n3c39a1d6e23c1ef63c7fba4019c25e76c40dfe19\nKMS: add file-level message for CryptoKeyPath, it is defined in gapic yaml but not\nin proto files.\n\nPiperOrigin-RevId: 291420695\n\nc6f3f350b8387f8d1b85ed4506f30187ebaaddc3\ncontaineranalysis: update v1beta1 and bazel build with annotations\n\nPiperOrigin-RevId: 291401900\n\n92887d74b44e4e636252b7b8477d0d2570cd82db\nfix: fix the location of grpc config file.\n\nPiperOrigin-RevId: 291396015\n\ne26cab8afd19d396b929039dac5d874cf0b5336c\nexpr: add default_host and method_signature annotations to CelService\n\nPiperOrigin-RevId: 291240093\n\n06093ae3952441c34ec176d1f7431b8765cec0be\nirm: fix v1alpha2 bazel build by adding missing proto imports\n\nPiperOrigin-RevId: 291227940\n\na8a2514af326e4673063f9a3c9d0ef1091c87e6c\nAdd proto annotation for cloud/irm API\n\nPiperOrigin-RevId: 291217859\n\n8d16f76de065f530d395a4c7eabbf766d6a120fd\nGenerate Memcache v1beta2 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 291008516\n\n3af1dabd93df9a9f17bf3624d3b875c11235360b\ngrafeas: Add containeranalysis default_host to Grafeas service\n\nPiperOrigin-RevId: 290965849\n\nbe2663fa95e31cba67d0cd62611a6674db9f74b7\nfix(google/maps/roads): add missing opening bracket\n\nPiperOrigin-RevId: 290964086\n\nfacc26550a0af0696e0534bc9cae9df14275aa7c\nUpdating v2 protos with the latest inline documentation (in comments) and adding a per-service .yaml file.\n\nPiperOrigin-RevId: 290952261\n\ncda99c1f7dc5e4ca9b1caeae1dc330838cbc1461\nChange api_name to 'asset' for v1p1beta1\n\nPiperOrigin-RevId: 290800639\n\n94e9e90c303a820ce40643d9129e7f0d2054e8a1\nAdds Google Maps Road service\n\nPiperOrigin-RevId: 290795667\n\na3b23dcb2eaecce98c600c7d009451bdec52dbda\nrpc: new message ErrorInfo, other comment updates\n\nPiperOrigin-RevId: 290781668\n\n26420ef4e46c37f193c0fbe53d6ebac481de460e\nAdd proto definition for Org Policy v1.\n\nPiperOrigin-RevId: 290771923\n\n7f0dab8177cf371ae019a082e2512de7ac102888\nPublish Routes Preferred API v1 service definitions.\n\nPiperOrigin-RevId: 290326986\n\nad6e508d0728e1d1bca6e3f328cd562718cb772d\nFix: Qualify resource type references with \"jobs.googleapis.com/\"\n\nPiperOrigin-RevId: 290285762\n\n58e770d568a2b78168ddc19a874178fee8265a9d\ncts client library\n\nPiperOrigin-RevId: 290146169\n\naf9daa4c3b4c4a8b7133b81588dd9ffd37270af2\nAdd more programming language options to public proto\n\nPiperOrigin-RevId: 290144091\n\nd9f2bbf2df301ef84641d4cec7c828736a0bd907\ntalent: add missing resource.proto dep to Bazel build target\n\nPiperOrigin-RevId: 290143164\n\n3b3968237451d027b42471cd28884a5a1faed6c7\nAnnotate Talent API.\nAdd gRPC service config for retry.\nUpdate bazel file with google.api.resource dependency.\n\nPiperOrigin-RevId: 290125172\n\n0735b4b096872960568d1f366bfa75b7b0e1f1a3\nWeekly library update.\n\nPiperOrigin-RevId: 289939042\n\n8760d3d9a4543d7f9c0d1c7870aca08b116e4095\nWeekly library update.\n\nPiperOrigin-RevId: 289939020\n\n8607df842f782a901805187e02fff598145b0b0e\nChange Talent API timeout to 30s.\n\nPiperOrigin-RevId: 289912621\n\n908155991fe32570653bcb72ecfdcfc896642f41\nAdd Recommendations AI V1Beta1\n\nPiperOrigin-RevId: 289901914\n\n5c9a8c2bebd8b71aa66d1cc473edfaac837a2c78\nAdding no-arg method signatures for ListBillingAccounts and ListServices\n\nPiperOrigin-RevId: 289891136\n\n50b0e8286ac988b0593bd890eb31fef6ea2f5767\nlongrunning: add grpc service config and default_host annotation to operations.proto\n\nPiperOrigin-RevId: 289876944\n\n6cac27dabe51c54807b0401698c32d34998948a9\n Updating default deadline for Cloud Security Command Center's v1 APIs.\n\nPiperOrigin-RevId: 289875412\n\nd99df0d67057a233c711187e0689baa4f8e6333d\nFix: Correct spelling in C# namespace option\n\nPiperOrigin-RevId: 289709813\n\n2fa8d48165cc48e35b0c62e6f7bdade12229326c\nfeat: Publish Recommender v1 to GitHub.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289619243\n\n9118db63d1ab493a2e44a3b4973fde810a835c49\nfirestore: don't retry reads that fail with Aborted\n\nFor transaction reads that fail with ABORTED, we need to rollback and start a new transaction. Our current configuration makes it so that GAPIC retries ABORTED reads multiple times without making any progress. Instead, we should retry at the transaction level.\n\nPiperOrigin-RevId: 289532382\n\n1dbfd3fe4330790b1e99c0bb20beb692f1e20b8a\nFix bazel build\nAdd other langauges (Java was already there) for bigquery/storage/v1alpha2 api.\n\nPiperOrigin-RevId: 289519766\n\nc06599cdd7d11f8d3fd25f8d3249e5bb1a3d5d73\nInitial commit of google.cloud.policytroubleshooter API, The API helps in troubleshooting GCP policies. Refer https://cloud.google.com/iam/docs/troubleshooting-access for more information\n\nPiperOrigin-RevId: 289491444\n\nfce7d80fa16ea241e87f7bc33d68595422e94ecd\nDo not pass samples option for Artman config of recommender v1 API.\n\nPiperOrigin-RevId: 289477403\n\nef179e8c61436297e6bb124352e47e45c8c80cb1\nfix: Address missing Bazel dependency.\n\nBazel builds stopped working in 06ec6d5 because\nthe google/longrunning/operations.proto file took\nan import from google/api/client.proto, but that\nimport was not added to BUILD.bazel.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446074\n\n8841655b242c84fd691d77d7bcf21b61044f01ff\nMigrate Data Labeling v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446026\n\n06ec6d5d053fff299eaa6eaa38afdd36c5e2fc68\nAdd annotations to google.longrunning.v1\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289413169\n\n0480cf40be1d3cc231f4268a2fdb36a8dd60e641\nMigrate IAM Admin v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289411084\n\n1017173e9adeb858587639af61889ad970c728b1\nSpecify a C# namespace for BigQuery Connection v1beta1\n\nPiperOrigin-RevId: 289396763\n\nb08714b378e8e5b0c4ecdde73f92c36d6303b4b6\nfix: Integrate latest proto-docs-plugin fix.\nFixes dialogflow v2\n\nPiperOrigin-RevId: 289189004\n\n51217a67e79255ee1f2e70a6a3919df082513327\nCreate BUILD file for recommender v1\n\nPiperOrigin-RevId: 289183234\n\nacacd87263c0a60e458561b8b8ce9f67c760552a\nGenerate recommender v1 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 289177510\n\n9d2f7133b97720b1fa3601f6dcd30760ba6d8a1e\nFix kokoro build script\n\nPiperOrigin-RevId: 289166315\n\nc43a67530d2a47a0220cad20ca8de39b3fbaf2c5\ncloudtasks: replace missing RPC timeout config for v2beta2 and v2beta3\n\nPiperOrigin-RevId: 289162391\n\n4cefc229a9197236fc0adf02d69b71c0c5cf59de\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 289158456\n\n56f263fe959c50786dab42e3c61402d32d1417bd\nCatalog API: Adding config necessary to build client libraries\n\nPiperOrigin-RevId: 289149879\n\n4543762b23a57fc3c53d409efc3a9affd47b6ab3\nFix Bazel build\nbilling/v1 and dialogflow/v2 remain broken (not bazel-related issues).\nBilling has wrong configuration, dialogflow failure is caused by a bug in documentation plugin.\n\nPiperOrigin-RevId: 289140194\n\nc9dce519127b97e866ca133a01157f4ce27dcceb\nUpdate Bigtable docs\n\nPiperOrigin-RevId: 289114419\n\n802c5c5f2bf94c3facb011267d04e71942e0d09f\nMigrate DLP to proto annotations (but not GAPIC v2).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289102579\n\n6357f30f2ec3cff1d8239d18b707ff9d438ea5da\nRemove gRPC configuration file that was in the wrong place.\n\nPiperOrigin-RevId: 289096111\n\n360a8792ed62f944109d7e22d613a04a010665b4\n Protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 289011995\n\na79211c20c4f2807eec524d00123bf7c06ad3d6e\nRoll back containeranalysis v1 to GAPIC v1.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288999068\n\n9e60345ba603e03484a8aaa33ce5ffa19c1c652b\nPublish Routes Preferred API v1 proto definitions.\n\nPiperOrigin-RevId: 288941399\n\nd52885b642ad2aa1f42b132ee62dbf49a73e1e24\nMigrate the service management API to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288909426\n\n6ace586805c08896fef43e28a261337fcf3f022b\ncloudtasks: replace missing RPC timeout config\n\nPiperOrigin-RevId: 288783603\n\n51d906cabee4876b12497054b15b05d4a50ad027\nImport of Grafeas from Github.\n\nUpdate BUILD.bazel accordingly.\n\nPiperOrigin-RevId: 288783426\n\n5ef42bcd363ba0440f0ee65b3c80b499e9067ede\nMigrate Recommender v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288713066\n\n94f986afd365b7d7e132315ddcd43d7af0e652fb\nMigrate Container Analysis v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288708382\n\n7a751a279184970d3b6ba90e4dd4d22a382a0747\nRemove Container Analysis v1alpha1 (nobody publishes it).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288707473\n\n3c0d9c71242e70474b2b640e15bb0a435fd06ff0\nRemove specious annotation from BigQuery Data Transfer before\nanyone accidentally does anything that uses it.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288701604\n\n1af307a4764bd415ef942ac5187fa1def043006f\nMigrate BigQuery Connection to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288698681\n\n08b488e0660c59842a7dee0e3e2b65d9e3a514a9\nExposing cloud_catalog.proto (This API is already available through REST)\n\nPiperOrigin-RevId: 288625007\n\na613482977e11ac09fa47687a5d1b5a01efcf794\nUpdate the OS Login v1beta API description to render better in the UI.\n\nPiperOrigin-RevId: 288547940\n\n5e182b8d9943f1b17008d69d4c7e865dc83641a7\nUpdate the OS Login API description to render better in the UI.\n\nPiperOrigin-RevId: 288546443\n\ncb79155f596e0396dd900da93872be7066f6340d\nFix: Add a resource annotation for Agent\nFix: Correct the service name in annotations for Intent and SessionEntityType\n\nPiperOrigin-RevId: 288441307\n\nf7f6e9daec3315fd47cb638789bd8415bf4a27cc\nAdded cloud asset api v1p1beta1\n\nPiperOrigin-RevId: 288427239\n\nf2880f5b342c6345f3dcaad24fcb3c6ca9483654\nBilling account API: Adding config necessary to build client libraries\n\nPiperOrigin-RevId: 288351810\n\ndc250ffe071729f8f8bef9d6fd0fbbeb0254c666\nFix: Remove incorrect resource annotations in requests\n\nPiperOrigin-RevId: 288321208\n\n91ef2d9dd69807b0b79555f22566fb2d81e49ff9\nAdd GAPIC annotations to Cloud KMS (but do not migrate the GAPIC config yet).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 287999179\n\n4d45a6399e9444fbddaeb1c86aabfde210723714\nRefreshing Cloud Billing API protos.\n\nThis exposes the following API methods:\n- UpdateBillingAccount\n- CreateBillingAccount\n- GetIamPolicy\n- SetIamPolicy\n- TestIamPermissions\n\nThere are also some new fields to support the management of sub-accounts.\n\nPiperOrigin-RevId: 287908369\n\nec285d3d230810147ebbf8d5b691ee90320c6d2d\nHide not yet implemented update_transforms message\n\nPiperOrigin-RevId: 287608953\n\na202fb3b91cd0e4231be878b0348afd17067cbe2\nBigQuery Storage Write API v1alpha2 clients. The service is enabled by whitelist only.\n\nPiperOrigin-RevId: 287379998\n\n650d7f1f8adb0cfaf37b3ce2241c3168f24efd4d\nUpdate Readme.md to match latest Bazel updates\n090d98aea20270e3be4b64240775588f7ce50ff8\ndocs(bigtable): Fix library release level listed in generated documentation\n\nPiperOrigin-RevId: 287308849\n\n2c28f646ca77b1d57550368be22aa388adde2e66\nfirestore: retry reads that fail with contention\n\nPiperOrigin-RevId: 287250665\n\nfd3091fbe9b2083cabc53dc50c78035658bfc4eb\nSync timeout in grpc config back to 10s for tasks API with github googelapis gapic config.\n\nPiperOrigin-RevId: 287207067\n\n49dd7d856a6f77c0cf7e5cb3334423e5089a9e8a\nbazel: Integrate bazel-2.0.0 compatibility fixes\n\nPiperOrigin-RevId: 287205644\n\n46e52fd64973e815cae61e78b14608fe7aa7b1df\nbazel: Integrate bazel build file generator\n\nTo generate/update BUILD.bazel files for any particular client or a batch of clients:\n```\nbazel run //:build_gen -- --src=google/example/library\n```\n\nPiperOrigin-RevId: 286958627\n\n1a380ea21dea9b6ac6ad28c60ad96d9d73574e19\nBigQuery Storage Read API v1beta2 clients.\n\nPiperOrigin-RevId: 286616241\n\n5f3f1d0f1c06b6475a17d995e4f7a436ca67ec9e\nAdd Artman config for secretmanager.\n\nPiperOrigin-RevId: 286598440\n\n50af0530730348f1e3697bf3c70261f7daaf2981\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 286491002\n\n91818800384f4ed26961aea268910b1a2ec58cc8\nFor Data Catalog API,\n1. Add support for marking a tag template field as required when creating a new tag template.\n2. Add support for updating a tag template field from required to optional.\n\nPiperOrigin-RevId: 286490262\n\nff4a2047b3d66f38c9b22197c370ed0d02fc0238\nWeekly library update.\n\nPiperOrigin-RevId: 286484215\n\n192c14029861752a911ed434fd6ee5b850517cd9\nWeekly library update.\n\nPiperOrigin-RevId: 286484165\n\nd9e328eaf790d4e4346fbbf32858160f497a03e0\nFix bazel build (versions 1.x)\n\nBump gapic-generator and resource names plugins to the latest version.\n\nPiperOrigin-RevId: 286469287\n\n0ca305403dcc50e31ad9477c9b6241ddfd2056af\nsecretmanager client package name option updates for java and go\n\nPiperOrigin-RevId: 286439553\n\nade4803e8a1a9e3efd249c8c86895d2f12eb2aaa\niam credentials: publish v1 protos containing annotations\n\nPiperOrigin-RevId: 286418383\n\n03e5708e5f8d1909dcb74b25520309e59ebf24be\nsecuritycenter: add missing proto deps for Bazel build\n\nPiperOrigin-RevId: 286417075\n\n8b991eb3eb82483b0ca1f1361a9c8e5b375c4747\nAdd secretmanager client package name options.\n\nPiperOrigin-RevId: 286415883\n\nd400cb8d45df5b2ae796b909f098a215b2275c1d\ndialogflow: add operation_info annotations to BatchUpdateEntities and BatchDeleteEntities.\n\nPiperOrigin-RevId: 286312673\n\nf2b25232db397ebd4f67eb901a2a4bc99f7cc4c6\nIncreased the default timeout time for all the Cloud Security Command Center client libraries.\n\nPiperOrigin-RevId: 286263771\n\ncb2f1eefd684c7efd56fd375cde8d4084a20439e\nExposing new Resource fields in the SecurityCenterProperties proto, added more comments to the filter logic for these Resource fields, and updated the response proto for the ListFindings API with the new Resource fields.\n\nPiperOrigin-RevId: 286263092\n\n73cebb20432b387c3d8879bb161b517d60cf2552\nUpdate v1beta2 clusters and jobs to include resource ids in GRPC header.\n\nPiperOrigin-RevId: 286261392\n\n1b4e453d51c0bd77e7b73896cdd8357d62768d83\nsecuritycenter: publish v1beta1 protos with annotations\n\nPiperOrigin-RevId: 286228860\n\na985eeda90ae98e8519d2320bee4dec148eb8ccb\nAdd default retry configurations for speech_v1p1beta1.\n\nSettings are copied from speech_gapic.legacy.yaml. The Python client library is being generated with timeouts that are too low. See https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2578\n\nPiperOrigin-RevId: 286191318\n\n3352100a15ede383f5ab3c34599f7a10a3d066fe\nMake importing rule with the same name (but different aliases) from different repositories possible.\n\nThis is needed to allow monolitic gapic-generator and microgenerators coexist during transition period.\n\nTo plug a microgenerator:\n\n1) Add corresponding rules bidnings under `switched_rules_by_language` in repository_rules.bzl:\n rules[\"go_gapic_library2\"] = _switch(\n go and grpc and gapic,\n \"@gapic_generator_go//rules_go_gapic/go_gapic.bzl\",\n \"go_gapic_library\",\n )\n\n2) Import microgenerator in WORKSPACE (the above example assumes that the generator was imported under name \"gapic_generator_go\").\n\n3) To migrate an API from monolith to micro generator (this is done per API and per language) modify the corresponding load statement in the API's BUILD.bazel file. For example, for the example above, to migrate to go microgenerator modify the go-specific load statement in BUILD.bazel file of a specific API (which you want to migrate) to the following:\n\nload(\n \"@com_google_googleapis_imports//:imports.bzl\",\n \"go_gapic_assembly_pkg\",\n go_gapic_library = \"go_gapic_library2\",\n \"go_proto_library\",\n \"go_test\",\n)\n\nPiperOrigin-RevId: 286065440\n\n6ad2bb13bc4b0f3f785517f0563118f6ca52ddfd\nUpdated v1beta1 protos for the client:\n- added support for GenericSignedAttestation which has a generic Signature\n- added support for CVSSv3 and WindowsDetail in Vulnerability\n- documentation updates\n\nPiperOrigin-RevId: 286008145\n\nfe1962e49999a832eed8162c45f23096336a9ced\nAdMob API v1 20191210\n\nBasic account info, mediation and network report available. See https://developers.google.com/admob/api/release-notes for more details.\n\nPiperOrigin-RevId: 285894502\n\n41fc1403738b61427f3a798ca9750ef47eb9c0f2\nAnnotate the required fields for the Monitoring Dashboards API\n\nPiperOrigin-RevId: 285824386\n\n27d0e0f202cbe91bf155fcf36824a87a5764ef1e\nRemove inappropriate resource_reference annotations for UpdateWorkflowTemplateRequest.template.\n\nPiperOrigin-RevId: 285802643\n\ne5c4d3a2b5b5bef0a30df39ebb27711dc98dee64\nAdd Artman BUILD.bazel file for the Monitoring Dashboards API\n\nPiperOrigin-RevId: 285445602\n\n2085a0d3c76180ee843cf2ecef2b94ca5266be31\nFix path in the artman config for Monitoring Dashboard API.\n\nPiperOrigin-RevId: 285233245\n\n2da72dfe71e4cca80902f9e3e125c40f02c2925b\nAdd Artman and GAPIC configs for the Monitoring Dashboards API.\n\nPiperOrigin-RevId: 285211544\n\n9f6eeebf1f30f51ffa02acea5a71680fe592348e\nAdd annotations to Dataproc v1. (Also forwarding comment changes from internal source control.)\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 285197557\n\n19c4589a3cb44b3679f7b3fba88365b3d055d5f8\noslogin: fix v1beta retry configuration\n\nPiperOrigin-RevId: 285013366\n\nee3f02926d0f8a0bc13f8d716581aad20f575751\nAdd Monitoring Dashboards API protocol buffers to Google Cloud Monitoring API.\n\nPiperOrigin-RevId: 284982647\n\ne47fdd266542386e5e7346697f90476e96dc7ee8\nbigquery datatransfer: Remove non-publicly available DataSourceService.\n\nPiperOrigin-RevId: 284822593\n\n6156f433fd1d9d5e4a448d6c6da7f637921d92ea\nAdds OSConfig v1beta protos and initial client library config\n\nPiperOrigin-RevId: 284799663\n\n6cc9499e225a4f6a5e34fe07e390f67055d7991c\nAdd datetime.proto to google/type/BUILD.bazel\n\nPiperOrigin-RevId: 284643689\n\nfe7dd5277e39ffe0075729c61e8d118d7527946d\nCosmetic changes to proto comment as part of testing internal release instructions.\n\nPiperOrigin-RevId: 284608712\n\n68d109adad726b89f74276d2f4b2ba6aac6ec04a\nAdd annotations to securitycenter v1, but leave GAPIC v1 in place.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 284580511\n\ndf8a1707a910fc17c71407a75547992fd1864c51\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 284568564\n\na69a974976221ce3bb944901b739418b85d6408c\nclient library update\n\nPiperOrigin-RevId: 284463979\n\na4adac3a12aca6e3a792c9c35ee850435fe7cf7e\nAdded DateTime, TimeZone, and Month proto files to google/type\n\nPiperOrigin-RevId: 284277770\n\ned5dec392906078db4f7745fe4f11d34dd401ae9\nchange common resources from message-level annotations to file-level annotations.\n\nPiperOrigin-RevId: 284236794\n\na00e2c575ef1b637667b4ebe96b8c228b2ddb273\nbigquerydatatransfer: change resource type TransferRun to Run to be consistent with gapic configs\nbigquerydatatransfer: add missing patterns for DataSource, TransferConfig and Run (to allow the location segment)\nbigquerydatatransfer: add file-level Parent resource type (to allow the location segement)\nbigquerydatatransfer: update grpc service config with correct retry delays\n\nPiperOrigin-RevId: 284234378\n\nb10e4547017ca529ac8d183e839f3c272e1c13de\ncloud asset: replace required fields for batchgetassethistory. Correct the time out duration.\n\nPiperOrigin-RevId: 284059574\n\n6690161e3dcc3367639a2ec10db67bf1cf392550\nAdd default retry configurations for speech_v1.\n\nSettings are copied from speech_gapic.legacy.yaml. The Python client library is being generated with timeouts that are too low. See https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2578\n\nPiperOrigin-RevId: 284035915\n\n9b2635ef91e114f0357bdb87652c26a8f59316d5\ncloudtasks: fix gapic v2 config\n\nPiperOrigin-RevId: 284020555\n\ne5676ba8b863951a8ed0bfd6046e1db38062743c\nReinstate resource name handling in GAPIC config for Asset v1.\n\nPiperOrigin-RevId: 283993903\n\nf337f7fb702c85833b7b6ca56afaf9a1bf32c096\nOSConfig AgentEndpoint: add LookupEffectiveGuestPolicy rpc\n\nPiperOrigin-RevId: 283989762\n\nc0ac9b55f2e2efd0ee525b3a6591a1b09330e55a\nInclude real time feed api into v1 version\n\nPiperOrigin-RevId: 283845474\n\n2427a3a0f6f4222315362d973d91a082a3a884a7\nfirestore admin: update v1 protos with annotations & retry config\n\nPiperOrigin-RevId: 283826605\n\n555e844dbe04af50a8f55fe1217fa9d39a0a80b2\nchore: publish retry configs for iam admin, cloud asset, and remoteworkers\n\nPiperOrigin-RevId: 283801979\n\n6311dc536668849142d1fe5cd9fc46da66d1f77f\nfirestore: update v1beta1 protos with annotations and retry config\n\nPiperOrigin-RevId: 283794315\n\nda0edeeef953b05eb1524d514d2e9842ac2df0fd\nfeat: publish several retry config files for client generation\n\nPiperOrigin-RevId: 283614497\n\n59a78053537e06190f02d0a7ffb792c34e185c5a\nRemoving TODO comment\n\nPiperOrigin-RevId: 283592535\n\n8463992271d162e2aff1d5da5b78db11f2fb5632\nFix bazel build\n\nPiperOrigin-RevId: 283589351\n\n3bfcb3d8df10dfdba58f864d3bdb8ccd69364669\nPublic client library for bebop_jobs_api_20191118_1_RC3 release.\n\nPiperOrigin-RevId: 283568877\n\n27ab0db61021d267c452b34d149161a7bf0d9f57\nfirestore: publish annotated protos and new retry config\n\nPiperOrigin-RevId: 283565148\n\n38dc36a2a43cbab4a2a9183a43dd0441670098a9\nfeat: add http annotations for operations calls\n\nPiperOrigin-RevId: 283384331\n\n366caab94906975af0e17822e372f1d34e319d51\ndatastore: add a legacy artman config for PHP generation\n\nPiperOrigin-RevId: 283378578\n\n82944da21578a53b74e547774cf62ed31a05b841\nMigrate container v1beta1 to GAPIC v2.\n\nPiperOrigin-RevId: 283342796\n\n584dcde5826dd11ebe222016b7b208a4e1196f4b\nRemove resource name annotation for UpdateKeyRequest.key, because it's the resource, not a name.\n\nPiperOrigin-RevId: 283167368\n\n6ab0171e3688bfdcf3dbc4056e2df6345e843565\nAdded resource annotation for Key message.\n\nPiperOrigin-RevId: 283066965\n\n86c1a2db1707a25cec7d92f8850cc915163ec3c3\nExpose Admin API methods for Key manipulation.\n\nPiperOrigin-RevId: 282988776\n\n3ddad085965896ffb205d44cb0c0616fe3def10b\nC++ targets: correct deps so they build, rename them from trace* to cloudtrace*\nto match the proto names.\n\nPiperOrigin-RevId: 282857635\n\ne9389365a971ad6457ceb9646c595e79dfdbdea5\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 282810797\n\ne42eaaa9abed3c4d63d64f790bd3191448dbbca6\nPut back C++ targets for cloud trace v2 api.\n\nPiperOrigin-RevId: 282803841\n\nd8896a3d8a191702a9e39f29cf4c2e16fa05f76d\nAdd initial BUILD.bazel for secretmanager.googleapis.com\n\nPiperOrigin-RevId: 282674885\n\n2cc56cb83ea3e59a6364e0392c29c9e23ad12c3a\nCreate sample for list recommendations\n\nPiperOrigin-RevId: 282665402\n\nf88e2ca65790e3b44bb3455e4779b41de1bf7136\nbump Go to ga\n\nPiperOrigin-RevId: 282651105\n\naac86d932b3cefd7d746f19def6935d16d6235e0\nDocumentation update. Add location_id in preparation for regionalization.\n\nPiperOrigin-RevId: 282586371\n\n5b501cd384f6b842486bd41acce77854876158e7\nMigrate Datastore Admin to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 282570874\n\n6a16d474d5be201b20a27646e2009c4dfde30452\nMigrate Datastore to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 282564329\n\n74bd9b95ac8c70b883814e4765a725cffe43d77c\nmark Go lib ga\n\nPiperOrigin-RevId: 282562558\n\nf7b3d434f44f6a77cf6c37cae5474048a0639298\nAdd secretmanager.googleapis.com protos\n\nPiperOrigin-RevId: 282546399\n\nc34a911aaa0660a45f5a556578f764f135e6e060\niot: bump Go GAPIC to GA release level\n\nPiperOrigin-RevId: 282494787\n\n79b7f1c5ba86859dbf70aa6cd546057c1002cdc0\nPut back C++ targets.\nPrevious change overrode custom C++ targets made by external teams. This PR puts those targets back.\n\nPiperOrigin-RevId: 282458292\n\n06a840781d2dc1b0a28e03e30fb4b1bfb0b29d1e\nPopulate BAZEL.build files for around 100 APIs (all APIs we publish) in all 7 langauges.\n\nPiperOrigin-RevId: 282449910\n\n777b580a046c4fa84a35e1d00658b71964120bb0\nCreate BUILD file for recommender v1beta1\n\nPiperOrigin-RevId: 282068850\n\n48b385b6ef71dfe2596490ea34c9a9a434e74243\nGenerate recommender v1beta1 gRPC ServiceConfig file\n\nPiperOrigin-RevId: 282067795\n\n8395b0f1435a4d7ce8737b3b55392627758bd20c\nfix: Set timeout to 25s, because Tasks fails for any deadline above 30s.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 282017295\n\n3ba7ddc4b2acf532bdfb0004ca26311053c11c30\nfix: Shift Ruby and PHP to legacy GAPIC YAMLs for back-compat.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281852671\n\nad6f0c002194c3ec6c13d592d911d122d2293931\nRemove unneeded yaml files\n\nPiperOrigin-RevId: 281835839\n\n1f42588e4373750588152cdf6f747de1cadbcbef\nrefactor: Migrate Tasks beta 2 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281769558\n\n902b51f2073e9958a2aba441f7f7ac54ea00966d\nrefactor: Migrate Tasks to GAPIC v2 (for real this time).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281769522\n\n17561f59970eede87f61ef6e9c322fa1198a2f4d\nMigrate Tasks Beta 3 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281769519\n\nf95883b15a1ddd58eb7e3583fdefe7b00505faa3\nRegenerate recommender v1beta1 protos and sanitized yaml\n\nPiperOrigin-RevId: 281765245\n\n9a52df54c626b36699a058013d1735a166933167\nadd gRPC ServiceConfig for grafeas v1\n\nPiperOrigin-RevId: 281762754\n\n7a79d682ef40c5ca39c3fca1c0901a8e90021f8a\nfix: Roll back Tasks GAPIC v2 while we investigate C# issue.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281758548\n\n3fc31491640a90f029f284289e7e97f78f442233\nMigrate Tasks to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281751187\n\n5bc0fecee454f857cec042fb99fe2d22e1bff5bc\nfix: adds operation HTTP rules back to v1p1beta1 config\n\nPiperOrigin-RevId: 281635572\n\n5364a19284a1333b3ffe84e4e78a1919363d9f9c\nbazel: Fix build\n\n1) Update to latest gapic-generator (has iam resource names fix for java).\n2) Fix non-trivial issues with oslogin (resources defined in sibling package to the one they are used from) and monitoring.\n3) Fix trivial missing dependencies in proto_library targets for other apis.\n\nThis is to prepare the repository to being populated with BUILD.bazel files for all supported apis (101 API) in all 7 languages.\n\nPiperOrigin-RevId: 281618750\n\n0aa77cbe45538d5e5739eb637db3f2940b912789\nUpdating common proto files in google/type/ with their latest versions.\n\nPiperOrigin-RevId: 281603926\n\nd47e1b4485b3effbb2298eb10dd13a544c0f66dc\nfix: replace Speech Recognize RPC retry_codes_name for non-standard assignment\n\nPiperOrigin-RevId: 281594037\n\n16543773103e2619d2b5f52456264de5bb9be104\nRegenerating public protos for datacatalog, also adding gRPC service config.\n\nPiperOrigin-RevId: 281423227\n\n328ebe76adb06128d12547ed70107fb841aebf4e\nChange custom data type from String to google.protobuf.Struct to be consistent with other docs such as\nhttps://developers.google.com/actions/smarthome/develop/process-intents#response_format\n\nPiperOrigin-RevId: 281402467\n\n5af83f47b9656261cafcf88b0b3334521ab266b3\n(internal change without visible public changes)\n\nPiperOrigin-RevId: 281334391\n\nc53ed56649583a149382bd88d3c427be475b91b6\nFix typo in protobuf docs.\n\nPiperOrigin-RevId: 281293109\n\nd8dd7fe8d5304f7bd1c52207703d7f27d5328c5a\nFix build by adding missing deps.\n\nPiperOrigin-RevId: 281088257\n\n3ef5ffd7351809d75c1332d2eaad1f24d9c318e4\nMigrate Error Reporting v1beta1 to proto annotations / GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281075722\n\n418ee8e24a56b5959e1c1defa4b6c97f883be379\nTrace v2: Add remaining proto annotations, migrate to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281068859\n\nc89394342a9ef70acaf73a6959e04b943fbc817b\nThis change updates an outdated comment for the feature importance proto field since they are no longer in [0, 1] for online predictions.\n\nPiperOrigin-RevId: 280761373\n\n1ec8b8e2c3c8f41d7d2b22c594c025276d6a4ae6\nCode refactoring\n\nPiperOrigin-RevId: 280760149\n\n427a22b04039f93b769d89accd6f487413f667c1\nImport automl operation protos.\n\nPiperOrigin-RevId: 280703572\n\n45749a04dac104e986f6cc47da3baf7c8bb6f9b0\nfix: bigqueryconnection_gapic.yaml to reflect proto annotations\n\n* remove connection_credential resource\n* make CreateCredentialRequest.connection_id optional\n* shuffle field ordering in CreateCredential flattening\n\nPiperOrigin-RevId: 280685438\n\n8385366aa1e5d7796793db02a9c5e167d1fd8f17\nRevert the Trace v2 GAPIC for now.\nCommitter: @lukesneeringer\n\nPiperOrigin-RevId: 280669295\n\n5c8ab2c072d557c2f4c4e54b544394e2d62202d5\nMigrate Trace v1 and Trace v2 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 280667429\n\nf6808ff4e8b966cd571e99279d4a2780ed97dff2\nRename the `endpoint_urls` field to `endpoint_uris` to be consistent with\nGoogle API nomenclature.\n\nPiperOrigin-RevId: 280581337\n\n1935fb8889686f5c9d107f11b3c6870fc3aa7cdc\nComment updates\n\nPiperOrigin-RevId: 280451656\n\n0797fd5b9029d630e68a0899734715d62ad38e33\nComment updates\n\nPiperOrigin-RevId: 280451600\n\n9bc8d07b8b749e791d16c8d559526928ceaf1994\nRollback of \"Migrate Cloud Error Reporting to proto annotations & GAPIC v2.\"\n\nPiperOrigin-RevId: 280445975\n\nf8720321aecf4aab42e03602ac2c67f9777d9170\nfix: bigtable retry config in GAPIC v2\n\nPiperOrigin-RevId: 280434856\n\nb11664ba64f92d96d748e0dd9724d006dcafd120\nMigrate Cloud Error Reporting to proto annotations & GAPIC v2.\n\nPiperOrigin-RevId: 280432937\n\n4f747bda9b099b4426f495985680d16d0227fa5f\n1. Change DataCatalog package name in java from com.google.cloud.datacatalog to com.google.cloud.datacatalog.v1beta1 (API version is included in the package). *This is a breaking change.*\n\n2. Add API for Taxonomies (PolicyTagManager and PolicyTagManagerSerialization services).\n\n3. Minor changes to documentation.\n\nPiperOrigin-RevId: 280394936\n\nbc76ffd87360ce1cd34e3a6eac28afd5e1efda76\nUse rules_proto bzl files to load proto_library\n\nThis makes googleapis forward compatible with Bazel incompatible change https://github.com/bazelbuild/bazel/issues/8922.\n\nThis CL was created by adding @rules_proto to the WORKSPACE file and then running:\n\nfind . -name BUILD.bazel | \\\n while read build; do \\\n buildifier --lint=fix --warnings=load $build; \\\n done\n\nSince buildifier cannot be told not to reformat the BUILD file, some files are reformatted.\n\nPiperOrigin-RevId: 280356106\n\n218164b3deba1075979c9dca5f71461379e42dd1\nMake the `permissions` argument in TestIamPermissions required.\n\nPiperOrigin-RevId: 280279014\n\ndec8fd8ea5dc464496606189ba4b8949188639c8\nUpdating Cloud Billing Budget API documentation for clarity.\n\nPiperOrigin-RevId: 280225437\n\na667ffab90deb5e2669eb40ec7b61ec96a3d0454\nIntroduced detailed status message for CreateTimeSeries: CreateTimeSeriesSummary replaces CreateTimeSeriesError, which is now deprecated and unused.\n\nPiperOrigin-RevId: 280221707\n\nbe0a25eceec8916633447a37af0ecea801b85186\nMigrate Bigtable API to GAPIC v2 config.\n\nPiperOrigin-RevId: 280199643\n\n88bbf96b90089994ed16208a0f38cdd07f743742\nFix location of monitoring.yaml in Artman config for monitoring v3.\n\nPiperOrigin-RevId: 280134477\n\ndbaa01a20303758eed0c5a95ad2239ea306ad9a5\nUpdate namespace for PHP.\n\nPiperOrigin-RevId: 280085199\n\nf73b3796a635b2026a590d5133af7fa1f0eb807b\nStandardize pub/sub client default settings across clients:\n- Add retry codes for streaming pull\n- Decrease publish's max_rpc_timeout (mini-timeout) from 10 mins to 1 min\n- Decrease publish's total timeout from 10 mins to 1 min\n- Increase publish batching threshold from 10 to 100 elements\n- Increase publish batching size threshold from 1 KiB to 1 MiB\n\nPiperOrigin-RevId: 280044012\n\n822172613e1d93bede3beaf78b123c42a5876e2b\nReplace local_repository with http_archive in WORKSPACE\n\nPiperOrigin-RevId: 280039052\n\n6a8c7914d1b79bd832b5157a09a9332e8cbd16d4\nAdded notification_supported_by_agent to indicate whether the agent is sending notifications to Google or not.\n\nPiperOrigin-RevId: 279991530\n\n675de3dc9ab98cc1cf54216ad58c933ede54e915\nAdd an endpoint_urls field to the instance admin proto and adds a field_mask field to the GetInstanceRequest.\n\nPiperOrigin-RevId: 279982263\n\n" } }, { "template": { - "name": "python_library", + "name": "python_split_library", "origin": "synthtool.gcp", - "version": "2019.10.17" + "version": "2020.2.4" } } ], From b6dfc55fa58af9af1a4c94c8be4b8ceef243388a Mon Sep 17 00:00:00 2001 From: Chris Wilcox Date: Thu, 6 Feb 2020 23:04:38 +0000 Subject: [PATCH 251/611] fix: add test_utils --- .../test_utils/credentials.json.enc | 49 ++++ .../scripts/circleci/get_tagged_package.py | 64 +++++ .../scripts/circleci/twine_upload.sh | 36 +++ .../test_utils/scripts/get_target_packages.py | 268 ++++++++++++++++++ .../scripts/get_target_packages_kokoro.py | 98 +++++++ .../test_utils/scripts/run_emulator.py | 199 +++++++++++++ .../test_utils/scripts/update_docs.sh | 93 ++++++ .../test_utils/setup.py | 64 +++++ .../test_utils/test_utils/__init__.py | 0 .../test_utils/test_utils/imports.py | 38 +++ .../test_utils/test_utils/retry.py | 207 ++++++++++++++ .../test_utils/test_utils/system.py | 81 ++++++ .../test_utils/test_utils/vpcsc_config.py | 118 ++++++++ 13 files changed, 1315 insertions(+) create mode 100644 packages/google-cloud-datastore/test_utils/credentials.json.enc create mode 100644 packages/google-cloud-datastore/test_utils/scripts/circleci/get_tagged_package.py create mode 100755 packages/google-cloud-datastore/test_utils/scripts/circleci/twine_upload.sh create mode 100644 packages/google-cloud-datastore/test_utils/scripts/get_target_packages.py create mode 100644 packages/google-cloud-datastore/test_utils/scripts/get_target_packages_kokoro.py create mode 100644 packages/google-cloud-datastore/test_utils/scripts/run_emulator.py create mode 100755 packages/google-cloud-datastore/test_utils/scripts/update_docs.sh create mode 100644 packages/google-cloud-datastore/test_utils/setup.py create mode 100644 packages/google-cloud-datastore/test_utils/test_utils/__init__.py create mode 100644 packages/google-cloud-datastore/test_utils/test_utils/imports.py create mode 100644 packages/google-cloud-datastore/test_utils/test_utils/retry.py create mode 100644 packages/google-cloud-datastore/test_utils/test_utils/system.py create mode 100644 packages/google-cloud-datastore/test_utils/test_utils/vpcsc_config.py diff --git a/packages/google-cloud-datastore/test_utils/credentials.json.enc b/packages/google-cloud-datastore/test_utils/credentials.json.enc new file mode 100644 index 000000000000..f073c7e4f774 --- /dev/null +++ b/packages/google-cloud-datastore/test_utils/credentials.json.enc @@ -0,0 +1,49 @@ +U2FsdGVkX1/vVm/dOEg1DCACYbdOcL+ey6+64A+DZGZVgF8Z/3skK6rpPocu6GOA +UZAqASsBH9QifDf8cKVXQXVYpYq6HSv2O0w7vOmVorZO9GYPo98s9/8XO+4ty/AU +aB6TD68frBAYv4cT/l5m7aYdzfzMTy0EOXoleZT09JYP3B5FV3KCO114FzMXGwrj +HXsR6E5SyUUlUnWPC3eD3aqmovay0gxOKYO3ZwjFK1nlbN/8q6/8nwBCf/Bg6SHV +V93pNxdolRlJev9kgKz4RN1z4jGCy5PAndhSLE82NFIs9LoAiEOU5YeMlN+Ulqus +J92nh+ptUe9a4pJGbAuveUWO7zdS1QyXvTMUcmmSfXCNm/eIQjNuu5+rHtIjWKh8 +Ilwj2w1aTfSptQEhk/kwRgFz/d11vfwJzvwTmCxO6zyOeL0VUWLqdCBGgG5As9He +/RenF8PZ1O0WbTt7fns5oTlTk/MUo+0xJ1xqvu/y45LaqqcBAnEdrWKmtM3dJHWv +ufQku+kD+83F/VwBnQdvgMHu6KZEs6LRrNo58r4QuK6fS7VCACdzxID1RM2cL7kT +6BFRlyGj1aigmjne9g9M9Jx4R+mZDpPU1WDzzG71J4qCUwaX8Dfwutuv4uiFvzwq +NUF0wLJJPtKWmtW+hnZ/fhHQGCRsOpZzFnqp6Zv7J7k6esqxMgIjfal7Djk5Acy8 +j3iVvm6CYmKMVqzL62JHYS9Ye83tzBCaR8hpnJQKgH3FSOFY8HSwrtQSIsl/hSeF +41sgnz0Y+/gkzNeU18qFk+eCZmvljyu+JK0nPYUgpOCJYVBNQpNHz5PUyiAEKhtM +IOSdjPRW1Y+Xf4RroJnLPoF24Ijwrow5LCm9hBRY6TPPMMmnIXCd23xcLJ1rMj6g +x4ZikElans+cwuc9wtbb7w01DcpTwQ1+eIV1qV+KIgpnLjRGLhZD4etobBsrwYu/ +vnIwy2QHCKENPb8sbdgp7x2mF7VSX0/7tf+9+i70EBiMzpOKBkiZhtLzm6hOBkEy +ODaWrx4lTTwbSw8Rmtf58APhPFMsjHoNsjiUoK249Y8Y2Ff4fMfqYsXu6VC1n/At +CuWYHc3EfBwFcLJS+RQB9kFk/4FygFBWq4Kj0MqoRruLbKmoGeJKH9q35W0f0NCD +j+iHt3014kMGiuyJe1UDQ6fvEihFFdHuDivFpPAXDt4PTY/WtpDhaGMx23kb54pK +jkAuxpznAB1lK3u9bGRXDasGeHIrNtIlPvgkrWHXvoBVqM7zry8TGtoxp3E3I42Z +cUfDWfB9GqVdrOwvrTzyZsl2uShRkAJaZFZj5aMyYxiptp4gM8CwWiNtOd2EwtRO +LxZX4M02PQFIqXV3FSDA0q6EwglUrTZdAlYeOEkopaKCtG31dEPOSQG3NGJAEYso +Cxm99H7970dp0OAgpNSgRbcWDbhVbQXnRzvFGqLeH6a9dQ/a8uD3s8Qm9Du/kB6d +XxTRe2OGxzcD0AgI8GClE4rIZHCLbcwuJRp0EYcN+pgY80O4U98fZ5RYpU6OYbU/ +MEiaBYFKtZtGkV6AQD568V7hHJWqc5DDfVHUQ/aeQwnKi2vnU66u+nnV2rZxXxLP ++dqeLRpul+wKa5b/Z5SfQ14Ff8s7aVyxaogGpyggyPL1vyq4KWZ6Or/wEE5hgNO4 +kBh6ht0QT1Hti8XY2JK1M+Jgbjgcg4jkHBGVqegrG1Rvcc2A4TYKwx+QMSBhyxrU +5qhROjS4lTcC42hQslMUkUwc4U/Y91XdFbOOnaAkwzI36NRYL0pmgZnYxGJZeRvr +E5foOhnOEVSFGdOkLfFh+FkWZQf56Lmn8Gg2wHE3dZTxLHibiUYfkgOr1uEosq29 +D1NstvlJURPQ0Q+8QQNWcl9nEZHMAjOmnL1hbx+QfuC6seucp+sXGzdZByMLZbvT +tG8KNL293CmyQowgf9MXToWYnwRkcvqfTaKyor2Ggze3JtoFW4t0j4DI1XPciZFX +XmfApHrzdB/bZadzxyaZ2NE0CuH9zDelwI6rz38xsN5liYnp5qmNKVCZVOHccXa6 +J8x365m5/VaaA2RrtdPqKxn8VaKy7+T690QgMXVGM4PbzQzQxHuSleklocqlP+sB +jSMXCZY+ng/i4UmRO9noiyW3UThYh0hIdMYs12EmmI9cnF/OuYZpl30fmqwV+VNM +td5B2fYvAvvsjiX60SFCn3DATP1GrPMBlZSmhhP3GYS+xrWt3Xxta9qIX2BEF1Gg +twnZZRjoULSRFUYPfJPEOfEH2UQwm84wxx/GezVE+S/RpBlatPOgCiLnNNaLfdTC +mTG9qY9elJv3GGQO8Lqgf4i8blExs05lSPk1BDhzTB6H9TLz+Ge0/l1QxKf3gPXU +aImK1azieXMXHECkdKxrzmehwu1dZ/oYOLc/OFQCETwSRoLPFOFpYUpizwmVVHR6 +uLSfRptte4ZOU3zHfpd/0+J4tkwHwEkGzsmMdqudlm7qME6upuIplyVBH8JiXzUK +n1RIH/OPmVEluAnexWRLZNdk7MrakIO4XACVbICENiYQgAIErP568An6twWEGDbZ +bEN64E3cVDTDRPRAunIhhsEaapcxpFEPWlHorxv36nMUt0R0h0bJlCu5QdzckfcX +ZrRuu1kl76ZfbSE8T0G4/rBb9gsU4Gn3WyvLIO3MgFBuxR68ZwcR8LpEUd8qp38H +NG4cxPmN1nGKo663Z+xI2Gt5up4gpl+fOt4mXqxY386rB7yHaOfElMG5TUYdrS9w +1xbbCVgeJ6zxX+NFlndG33cSAPprhw+C18eUu6ZU63WZcYFo3GfK6rs3lvYtofvE +8DxztdTidQedNVNE+63YCjhxd/cZUI5n/UpgYkr9owp7hNGJiR3tdoNLR2gcoGqL +qWhH928k2aSgF2j97LZ2OqoPCp0tUB7ho4jD2u4Ik3GLVNlCc3dCvWRvpHtDTQDv +tujESMfHUc9I2r4S/PD3bku/ABGwa977Yp1PjzJGr9RajA5is5n6GVpyynwjtKG4 +iyyITpdwpCgr8pueTBLwZnas3slmiMOog/E4PmPgctHzvC+vhQijhUtw5zSsmv0l +bZlw/mVhp5Ta7dTcLBKR8DA3m3vTbaEGkz0xpfQr7GfiSMRbJyvIw88pDK0gyTMD diff --git a/packages/google-cloud-datastore/test_utils/scripts/circleci/get_tagged_package.py b/packages/google-cloud-datastore/test_utils/scripts/circleci/get_tagged_package.py new file mode 100644 index 000000000000..c148b9dc2370 --- /dev/null +++ b/packages/google-cloud-datastore/test_utils/scripts/circleci/get_tagged_package.py @@ -0,0 +1,64 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helper to determine package from tag. +Get the current package directory corresponding to the Circle Tag. +""" + +from __future__ import print_function + +import os +import re +import sys + + +TAG_RE = re.compile(r""" + ^ + (?P + (([a-z]+)[_-])*) # pkg-name-with-hyphens-or-underscores (empty allowed) + ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints) + $ +""", re.VERBOSE) +TAG_ENV = 'CIRCLE_TAG' +ERROR_MSG = '%s env. var. not set' % (TAG_ENV,) +BAD_TAG_MSG = 'Invalid tag name: %s. Expected pkg-name-x.y.z' +CIRCLE_CI_SCRIPTS_DIR = os.path.dirname(__file__) +ROOT_DIR = os.path.realpath( + os.path.join(CIRCLE_CI_SCRIPTS_DIR, '..', '..', '..')) + + +def main(): + """Get the current package directory. + Prints the package directory out so callers can consume it. + """ + if TAG_ENV not in os.environ: + print(ERROR_MSG, file=sys.stderr) + sys.exit(1) + + tag_name = os.environ[TAG_ENV] + match = TAG_RE.match(tag_name) + if match is None: + print(BAD_TAG_MSG % (tag_name,), file=sys.stderr) + sys.exit(1) + + pkg_name = match.group('pkg') + if pkg_name is None: + print(ROOT_DIR) + else: + pkg_dir = pkg_name.rstrip('-').replace('-', '_') + print(os.path.join(ROOT_DIR, pkg_dir)) + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-datastore/test_utils/scripts/circleci/twine_upload.sh b/packages/google-cloud-datastore/test_utils/scripts/circleci/twine_upload.sh new file mode 100755 index 000000000000..23a4738e90b9 --- /dev/null +++ b/packages/google-cloud-datastore/test_utils/scripts/circleci/twine_upload.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -ev + +# If this is not a CircleCI tag, no-op. +if [[ -z "$CIRCLE_TAG" ]]; then + echo "This is not a release tag. Doing nothing." + exit 0 +fi + +# H/T: http://stackoverflow.com/a/246128/1068170 +SCRIPT="$(dirname "${BASH_SOURCE[0]}")/get_tagged_package.py" +# Determine the package directory being deploying on this tag. +PKG_DIR="$(python ${SCRIPT})" + +# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. +python3 -m pip install --upgrade twine wheel setuptools + +# Move into the package, build the distribution and upload. +cd ${PKG_DIR} +python3 setup.py sdist bdist_wheel +twine upload dist/* diff --git a/packages/google-cloud-datastore/test_utils/scripts/get_target_packages.py b/packages/google-cloud-datastore/test_utils/scripts/get_target_packages.py new file mode 100644 index 000000000000..1d51830cc23a --- /dev/null +++ b/packages/google-cloud-datastore/test_utils/scripts/get_target_packages.py @@ -0,0 +1,268 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Print a list of packages which require testing.""" + +import os +import re +import subprocess +import warnings + + +CURRENT_DIR = os.path.realpath(os.path.dirname(__file__)) +BASE_DIR = os.path.realpath(os.path.join(CURRENT_DIR, '..', '..')) +GITHUB_REPO = os.environ.get('GITHUB_REPO', 'google-cloud-python') +CI = os.environ.get('CI', '') +CI_BRANCH = os.environ.get('CIRCLE_BRANCH') +CI_PR = os.environ.get('CIRCLE_PR_NUMBER') +CIRCLE_TAG = os.environ.get('CIRCLE_TAG') +head_hash, head_name = subprocess.check_output(['git', 'show-ref', 'HEAD'] +).strip().decode('ascii').split() +rev_parse = subprocess.check_output( + ['git', 'rev-parse', '--abbrev-ref', 'HEAD'] +).strip().decode('ascii') +MAJOR_DIV = '#' * 78 +MINOR_DIV = '#' + '-' * 77 + +# NOTE: This reg-ex is copied from ``get_tagged_packages``. +TAG_RE = re.compile(r""" + ^ + (?P + (([a-z]+)-)*) # pkg-name-with-hyphens- (empty allowed) + ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints) + $ +""", re.VERBOSE) + +# This is the current set of dependencies by package. +# As of this writing, the only "real" dependency is that of error_reporting +# (on logging), the rest are just system test dependencies. +PKG_DEPENDENCIES = { + 'logging': {'pubsub'}, +} + + +def get_baseline(): + """Return the baseline commit. + + On a pull request, or on a branch, return the common parent revision + with the master branch. + + Locally, return a value pulled from environment variables, or None if + the environment variables are not set. + + On a push to master, return None. This will effectively cause everything + to be considered to be affected. + """ + + # If this is a pull request or branch, return the tip for master. + # We will test only packages which have changed since that point. + ci_non_master = (CI == 'true') and any([CI_BRANCH != 'master', CI_PR]) + + if ci_non_master: + + repo_url = 'git@github.com:GoogleCloudPlatform/{}'.format(GITHUB_REPO) + subprocess.run(['git', 'remote', 'add', 'baseline', repo_url], + stderr=subprocess.DEVNULL) + subprocess.run(['git', 'pull', 'baseline'], stderr=subprocess.DEVNULL) + + if CI_PR is None and CI_BRANCH is not None: + output = subprocess.check_output([ + 'git', 'merge-base', '--fork-point', + 'baseline/master', CI_BRANCH]) + return output.strip().decode('ascii') + + return 'baseline/master' + + # If environment variables are set identifying what the master tip is, + # use that. + if os.environ.get('GOOGLE_CLOUD_TESTING_REMOTE', ''): + remote = os.environ['GOOGLE_CLOUD_TESTING_REMOTE'] + branch = os.environ.get('GOOGLE_CLOUD_TESTING_BRANCH', 'master') + return '%s/%s' % (remote, branch) + + # If we are not in CI and we got this far, issue a warning. + if not CI: + warnings.warn('No baseline could be determined; this means tests ' + 'will run for every package. If this is local ' + 'development, set the $GOOGLE_CLOUD_TESTING_REMOTE ' + 'environment variable.') + + # That is all we can do; return None. + return None + + +def get_changed_files(): + """Return a list of files that have been changed since the baseline. + + If there is no base, return None. + """ + # Get the baseline, and fail quickly if there is no baseline. + baseline = get_baseline() + print('# Baseline commit: {}'.format(baseline)) + if not baseline: + return None + + # Return a list of altered files. + try: + return subprocess.check_output([ + 'git', 'diff', '--name-only', '{}..HEAD'.format(baseline), + ], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n') + except subprocess.CalledProcessError: + warnings.warn('Unable to perform git diff; falling back to assuming ' + 'all packages have changed.') + return None + + +def reverse_map(dict_of_sets): + """Reverse a map of one-to-many. + + So the map:: + + { + 'A': {'B', 'C'}, + 'B': {'C'}, + } + + becomes + + { + 'B': {'A'}, + 'C': {'A', 'B'}, + } + + Args: + dict_of_sets (dict[set]): A dictionary of sets, mapping + one value to many. + + Returns: + dict[set]: The reversed map. + """ + result = {} + for key, values in dict_of_sets.items(): + for value in values: + result.setdefault(value, set()).add(key) + + return result + +def get_changed_packages(file_list): + """Return a list of changed packages based on the provided file list. + + If the file list is None, then all packages should be considered to be + altered. + """ + # Determine a complete list of packages. + all_packages = set() + for file_ in os.listdir(BASE_DIR): + abs_file = os.path.realpath(os.path.join(BASE_DIR, file_)) + nox_file = os.path.join(abs_file, 'nox.py') + if os.path.isdir(abs_file) and os.path.isfile(nox_file): + all_packages.add(file_) + + # If ther is no file list, send down the full package set. + if file_list is None: + return all_packages + + # Create a set based on the list of changed files. + answer = set() + reverse_deps = reverse_map(PKG_DEPENDENCIES) + for file_ in file_list: + # Ignore root directory changes (setup.py, .gitignore, etc.). + if os.path.sep not in file_: + continue + + # Ignore changes that are not in a package (usually this will be docs). + package = file_.split(os.path.sep, 1)[0] + if package not in all_packages: + continue + + # If there is a change in core, short-circuit now and return + # everything. + if package in ('core',): + return all_packages + + # Add the package, as well as any dependencies this package has. + # NOTE: For now, dependencies only go down one level. + answer.add(package) + answer = answer.union(reverse_deps.get(package, set())) + + # We got this far without being short-circuited; return the final answer. + return answer + + +def get_tagged_package(): + """Return the package corresponding to the current tag. + + If there is not tag, will return :data:`None`. + """ + if CIRCLE_TAG is None: + return + + match = TAG_RE.match(CIRCLE_TAG) + if match is None: + return + + pkg_name = match.group('pkg') + if pkg_name == '': + # NOTE: This corresponds to the "umbrella" tag. + return + + return pkg_name.rstrip('-').replace('-', '_') + + +def get_target_packages(): + """Return a list of target packages to be run in the current build. + + If in a tag build, will run only the package(s) that are tagged, otherwise + will run the packages that have file changes in them (or packages that + depend on those). + """ + tagged_package = get_tagged_package() + if tagged_package is None: + file_list = get_changed_files() + print(MAJOR_DIV) + print('# Changed files:') + print(MINOR_DIV) + for file_ in file_list or (): + print('# {}'.format(file_)) + for package in sorted(get_changed_packages(file_list)): + yield package + else: + yield tagged_package + + +def main(): + print(MAJOR_DIV) + print('# Environment') + print(MINOR_DIV) + print('# CircleCI: {}'.format(CI)) + print('# CircleCI branch: {}'.format(CI_BRANCH)) + print('# CircleCI pr: {}'.format(CI_PR)) + print('# CircleCI tag: {}'.format(CIRCLE_TAG)) + print('# HEAD ref: {}'.format(head_hash)) + print('# {}'.format(head_name)) + print('# Git branch: {}'.format(rev_parse)) + print(MAJOR_DIV) + + packages = list(get_target_packages()) + + print(MAJOR_DIV) + print('# Target packages:') + print(MINOR_DIV) + for package in packages: + print(package) + print(MAJOR_DIV) + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-datastore/test_utils/scripts/get_target_packages_kokoro.py b/packages/google-cloud-datastore/test_utils/scripts/get_target_packages_kokoro.py new file mode 100644 index 000000000000..27d3a0c940ea --- /dev/null +++ b/packages/google-cloud-datastore/test_utils/scripts/get_target_packages_kokoro.py @@ -0,0 +1,98 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Print a list of packages which require testing.""" + +import pathlib +import subprocess + +import ci_diff_helper +import requests + + +def print_environment(environment): + print("-> CI environment:") + print('Branch', environment.branch) + print('PR', environment.pr) + print('In PR', environment.in_pr) + print('Repo URL', environment.repo_url) + if environment.in_pr: + print('PR Base', environment.base) + + +def get_base(environment): + if environment.in_pr: + return environment.base + else: + # If we're not in a PR, just calculate the changes between this commit + # and its parent. + return 'HEAD~1' + + +def get_changed_files_from_base(base): + return subprocess.check_output([ + 'git', 'diff', '--name-only', f'{base}..HEAD', + ], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n') + + +_URL_TEMPLATE = ( + 'https://api.github.com/repos/googleapis/google-cloud-python/pulls/' + '{}/files' +) + + +def get_changed_files_from_pr(pr): + url = _URL_TEMPLATE.format(pr) + while url is not None: + response = requests.get(url) + for info in response.json(): + yield info['filename'] + url = response.links.get('next', {}).get('url') + + +def determine_changed_packages(changed_files): + packages = [ + path.parent for path in pathlib.Path('.').glob('*/noxfile.py') + ] + + changed_packages = set() + for file in changed_files: + file = pathlib.Path(file) + for package in packages: + if package in file.parents: + changed_packages.add(package) + + return changed_packages + + +def main(): + environment = ci_diff_helper.get_config() + print_environment(environment) + base = get_base(environment) + + if environment.in_pr: + changed_files = list(get_changed_files_from_pr(environment.pr)) + else: + changed_files = get_changed_files_from_base(base) + + packages = determine_changed_packages(changed_files) + + print(f"Comparing against {base}.") + print("-> Changed packages:") + + for package in packages: + print(package) + + +main() diff --git a/packages/google-cloud-datastore/test_utils/scripts/run_emulator.py b/packages/google-cloud-datastore/test_utils/scripts/run_emulator.py new file mode 100644 index 000000000000..287b08640691 --- /dev/null +++ b/packages/google-cloud-datastore/test_utils/scripts/run_emulator.py @@ -0,0 +1,199 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Run system tests locally with the emulator. + +First makes system calls to spawn the emulator and get the local environment +variable needed for it. Then calls the system tests. +""" + + +import argparse +import os +import subprocess + +import psutil + +from google.cloud.environment_vars import BIGTABLE_EMULATOR +from google.cloud.environment_vars import GCD_DATASET +from google.cloud.environment_vars import GCD_HOST +from google.cloud.environment_vars import PUBSUB_EMULATOR +from run_system_test import run_module_tests + + +BIGTABLE = 'bigtable' +DATASTORE = 'datastore' +PUBSUB = 'pubsub' +PACKAGE_INFO = { + BIGTABLE: (BIGTABLE_EMULATOR,), + DATASTORE: (GCD_DATASET, GCD_HOST), + PUBSUB: (PUBSUB_EMULATOR,), +} +EXTRA = { + DATASTORE: ('--no-legacy',), +} +_DS_READY_LINE = '[datastore] Dev App Server is now running.\n' +_PS_READY_LINE_PREFIX = '[pubsub] INFO: Server started, listening on ' +_BT_READY_LINE_PREFIX = '[bigtable] Cloud Bigtable emulator running on ' + + +def get_parser(): + """Get simple ``argparse`` parser to determine package. + + :rtype: :class:`argparse.ArgumentParser` + :returns: The parser for this script. + """ + parser = argparse.ArgumentParser( + description='Run google-cloud system tests against local emulator.') + parser.add_argument('--package', dest='package', + choices=sorted(PACKAGE_INFO.keys()), + default=DATASTORE, help='Package to be tested.') + return parser + + +def get_start_command(package): + """Get command line arguments for starting emulator. + + :type package: str + :param package: The package to start an emulator for. + + :rtype: tuple + :returns: The arguments to be used, in a tuple. + """ + result = ('gcloud', 'beta', 'emulators', package, 'start') + extra = EXTRA.get(package, ()) + return result + extra + + +def get_env_init_command(package): + """Get command line arguments for getting emulator env. info. + + :type package: str + :param package: The package to get environment info for. + + :rtype: tuple + :returns: The arguments to be used, in a tuple. + """ + result = ('gcloud', 'beta', 'emulators', package, 'env-init') + extra = EXTRA.get(package, ()) + return result + extra + + +def datastore_wait_ready(popen): + """Wait until the datastore emulator is ready to use. + + :type popen: :class:`subprocess.Popen` + :param popen: An open subprocess to interact with. + """ + emulator_ready = False + while not emulator_ready: + emulator_ready = popen.stderr.readline() == _DS_READY_LINE + + +def wait_ready_prefix(popen, prefix): + """Wait until the a process encounters a line with matching prefix. + + :type popen: :class:`subprocess.Popen` + :param popen: An open subprocess to interact with. + + :type prefix: str + :param prefix: The prefix to match + """ + emulator_ready = False + while not emulator_ready: + emulator_ready = popen.stderr.readline().startswith(prefix) + + +def wait_ready(package, popen): + """Wait until the emulator is ready to use. + + :type package: str + :param package: The package to check if ready. + + :type popen: :class:`subprocess.Popen` + :param popen: An open subprocess to interact with. + + :raises: :class:`KeyError` if the ``package`` is not among + ``datastore``, ``pubsub`` or ``bigtable``. + """ + if package == DATASTORE: + datastore_wait_ready(popen) + elif package == PUBSUB: + wait_ready_prefix(popen, _PS_READY_LINE_PREFIX) + elif package == BIGTABLE: + wait_ready_prefix(popen, _BT_READY_LINE_PREFIX) + else: + raise KeyError('Package not supported', package) + + +def cleanup(pid): + """Cleanup a process (including all of its children). + + :type pid: int + :param pid: Process ID. + """ + proc = psutil.Process(pid) + for child_proc in proc.children(recursive=True): + try: + child_proc.kill() + child_proc.terminate() + except psutil.NoSuchProcess: + pass + proc.terminate() + proc.kill() + + +def run_tests_in_emulator(package): + """Spawn an emulator instance and run the system tests. + + :type package: str + :param package: The package to run system tests against. + """ + # Make sure this package has environment vars to replace. + env_vars = PACKAGE_INFO[package] + + start_command = get_start_command(package) + # Ignore stdin and stdout, don't pollute the user's output with them. + proc_start = subprocess.Popen(start_command, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + try: + wait_ready(package, proc_start) + env_init_command = get_env_init_command(package) + proc_env = subprocess.Popen(env_init_command, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + env_status = proc_env.wait() + if env_status != 0: + raise RuntimeError(env_status, proc_env.stderr.read()) + env_lines = proc_env.stdout.read().strip().split('\n') + # Set environment variables before running the system tests. + for env_var in env_vars: + line_prefix = 'export ' + env_var + '=' + value, = [line.split(line_prefix, 1)[1] for line in env_lines + if line.startswith(line_prefix)] + os.environ[env_var] = value + run_module_tests(package, + ignore_requirements=True) + finally: + cleanup(proc_start.pid) + + +def main(): + """Main method to run this script.""" + parser = get_parser() + args = parser.parse_args() + run_tests_in_emulator(args.package) + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-datastore/test_utils/scripts/update_docs.sh b/packages/google-cloud-datastore/test_utils/scripts/update_docs.sh new file mode 100755 index 000000000000..8cbab9f0dad0 --- /dev/null +++ b/packages/google-cloud-datastore/test_utils/scripts/update_docs.sh @@ -0,0 +1,93 @@ +#!/bin/bash + +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -ev + +GH_OWNER='GoogleCloudPlatform' +GH_PROJECT_NAME='google-cloud-python' + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +# Function to build the docs. +function build_docs { + rm -rf docs/_build/ + rm -f docs/bigquery/generated/*.rst + # -W -> warnings as errors + # -T -> show full traceback on exception + # -N -> no color + sphinx-build \ + -W -T -N \ + -b html \ + -d docs/_build/doctrees \ + docs/ \ + docs/_build/html/ + return $? +} + +# Only update docs if we are on CircleCI. +if [[ "${CIRCLE_BRANCH}" == "master" ]] && [[ -z "${CIRCLE_PR_NUMBER}" ]]; then + echo "Building new docs on a merged commit." +elif [[ "$1" == "kokoro" ]]; then + echo "Building and publishing docs on Kokoro." +elif [[ -n "${CIRCLE_TAG}" ]]; then + echo "Building new docs on a tag (but will not deploy)." + build_docs + exit $? +else + echo "Not on master nor a release tag." + echo "Building new docs for testing purposes, but not deploying." + build_docs + exit $? +fi + +# Adding GitHub pages branch. `git submodule add` checks it +# out at HEAD. +GH_PAGES_DIR='ghpages' +git submodule add -q -b gh-pages \ + "git@github.com:${GH_OWNER}/${GH_PROJECT_NAME}" ${GH_PAGES_DIR} + +# Determine if we are building a new tag or are building docs +# for master. Then build new docs in docs/_build from master. +if [[ -n "${CIRCLE_TAG}" ]]; then + # Sphinx will use the package version by default. + build_docs +else + SPHINX_RELEASE=$(git log -1 --pretty=%h) build_docs +fi + +# Update gh-pages with the created docs. +cd ${GH_PAGES_DIR} +git rm -fr latest/ +cp -R ../docs/_build/html/ latest/ + +# Update the files push to gh-pages. +git add . +git status + +# If there are no changes, just exit cleanly. +if [[ -z "$(git status --porcelain)" ]]; then + echo "Nothing to commit. Exiting without pushing changes." + exit +fi + +# Commit to gh-pages branch to apply changes. +git config --global user.email "dpebot@google.com" +git config --global user.name "dpebot" +git commit -m "Update docs after merge to master." + +# NOTE: This may fail if two docs updates (on merges to master) +# happen in close proximity. +git push -q origin HEAD:gh-pages diff --git a/packages/google-cloud-datastore/test_utils/setup.py b/packages/google-cloud-datastore/test_utils/setup.py new file mode 100644 index 000000000000..8e9222a7f862 --- /dev/null +++ b/packages/google-cloud-datastore/test_utils/setup.py @@ -0,0 +1,64 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from setuptools import find_packages +from setuptools import setup + + +PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) + + +# NOTE: This is duplicated throughout and we should try to +# consolidate. +SETUP_BASE = { + 'author': 'Google Cloud Platform', + 'author_email': 'googleapis-publisher@google.com', + 'scripts': [], + 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', + 'license': 'Apache 2.0', + 'platforms': 'Posix; MacOS X; Windows', + 'include_package_data': True, + 'zip_safe': False, + 'classifiers': [ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Topic :: Internet', + ], +} + + +REQUIREMENTS = [ + 'google-auth >= 0.4.0', + 'six', +] + +setup( + name='google-cloud-testutils', + version='0.24.0', + description='System test utilities for google-cloud-python', + packages=find_packages(), + install_requires=REQUIREMENTS, + python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*', + **SETUP_BASE +) diff --git a/packages/google-cloud-datastore/test_utils/test_utils/__init__.py b/packages/google-cloud-datastore/test_utils/test_utils/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-datastore/test_utils/test_utils/imports.py b/packages/google-cloud-datastore/test_utils/test_utils/imports.py new file mode 100644 index 000000000000..5991af7fc465 --- /dev/null +++ b/packages/google-cloud-datastore/test_utils/test_utils/imports.py @@ -0,0 +1,38 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock +import six + + +def maybe_fail_import(predicate): + """Create and return a patcher that conditionally makes an import fail. + + Args: + predicate (Callable[[...], bool]): A callable that, if it returns `True`, + triggers an `ImportError`. It must accept the same arguments as the + built-in `__import__` function. + https://docs.python.org/3/library/functions.html#__import__ + + Returns: + A mock patcher object that can be used to enable patched import behavior. + """ + orig_import = six.moves.builtins.__import__ + + def custom_import(name, globals=None, locals=None, fromlist=(), level=0): + if predicate(name, globals, locals, fromlist, level): + raise ImportError + return orig_import(name, globals, locals, fromlist, level) + + return mock.patch.object(six.moves.builtins, "__import__", new=custom_import) diff --git a/packages/google-cloud-datastore/test_utils/test_utils/retry.py b/packages/google-cloud-datastore/test_utils/test_utils/retry.py new file mode 100644 index 000000000000..e61c001a03e1 --- /dev/null +++ b/packages/google-cloud-datastore/test_utils/test_utils/retry.py @@ -0,0 +1,207 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +from functools import wraps + +import six + +MAX_TRIES = 4 +DELAY = 1 +BACKOFF = 2 + + +def _retry_all(_): + """Retry all caught exceptions.""" + return True + + +class BackoffFailed(Exception): + """Retry w/ backoffs did not complete successfully.""" + + +class RetryBase(object): + """Base for retrying calling a decorated function w/ exponential backoff. + + :type max_tries: int + :param max_tries: Number of times to try (not retry) before giving up. + + :type delay: int + :param delay: Initial delay between retries in seconds. + + :type backoff: int + :param backoff: Backoff multiplier e.g. value of 2 will double the + delay each retry. + + :type logger: logging.Logger instance + :param logger: Logger to use. If None, print. + """ + def __init__(self, max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, + logger=None): + self.max_tries = max_tries + self.delay = delay + self.backoff = backoff + self.logger = logger.warning if logger else six.print_ + + +class RetryErrors(RetryBase): + """Decorator for retrying given exceptions in testing. + + :type exception: Exception or tuple of Exceptions + :param exception: The exception to check or may be a tuple of + exceptions to check. + + :type error_predicate: function, takes caught exception, returns bool + :param error_predicate: Predicate evaluating whether to retry after a + caught exception. + + :type max_tries: int + :param max_tries: Number of times to try (not retry) before giving up. + + :type delay: int + :param delay: Initial delay between retries in seconds. + + :type backoff: int + :param backoff: Backoff multiplier e.g. value of 2 will double the + delay each retry. + + :type logger: logging.Logger instance + :param logger: Logger to use. If None, print. + """ + def __init__(self, exception, error_predicate=_retry_all, + max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, + logger=None): + super(RetryErrors, self).__init__(max_tries, delay, backoff, logger) + self.exception = exception + self.error_predicate = error_predicate + + def __call__(self, to_wrap): + @wraps(to_wrap) + def wrapped_function(*args, **kwargs): + tries = 0 + while tries < self.max_tries: + try: + return to_wrap(*args, **kwargs) + except self.exception as caught_exception: + + if not self.error_predicate(caught_exception): + raise + + delay = self.delay * self.backoff**tries + msg = ("%s, Trying again in %d seconds..." % + (caught_exception, delay)) + self.logger(msg) + + time.sleep(delay) + tries += 1 + return to_wrap(*args, **kwargs) + + return wrapped_function + + +class RetryResult(RetryBase): + """Decorator for retrying based on non-error result. + + :type result_predicate: function, takes result, returns bool + :param result_predicate: Predicate evaluating whether to retry after a + result is returned. + + :type max_tries: int + :param max_tries: Number of times to try (not retry) before giving up. + + :type delay: int + :param delay: Initial delay between retries in seconds. + + :type backoff: int + :param backoff: Backoff multiplier e.g. value of 2 will double the + delay each retry. + + :type logger: logging.Logger instance + :param logger: Logger to use. If None, print. + """ + def __init__(self, result_predicate, + max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, + logger=None): + super(RetryResult, self).__init__(max_tries, delay, backoff, logger) + self.result_predicate = result_predicate + + def __call__(self, to_wrap): + @wraps(to_wrap) + def wrapped_function(*args, **kwargs): + tries = 0 + while tries < self.max_tries: + result = to_wrap(*args, **kwargs) + if self.result_predicate(result): + return result + + delay = self.delay * self.backoff**tries + msg = "%s. Trying again in %d seconds..." % ( + self.result_predicate.__name__, delay,) + self.logger(msg) + + time.sleep(delay) + tries += 1 + raise BackoffFailed() + + return wrapped_function + + +class RetryInstanceState(RetryBase): + """Decorator for retrying based on instance state. + + :type instance_predicate: function, takes instance, returns bool + :param instance_predicate: Predicate evaluating whether to retry after an + API-invoking method is called. + + :type max_tries: int + :param max_tries: Number of times to try (not retry) before giving up. + + :type delay: int + :param delay: Initial delay between retries in seconds. + + :type backoff: int + :param backoff: Backoff multiplier e.g. value of 2 will double the + delay each retry. + + :type logger: logging.Logger instance + :param logger: Logger to use. If None, print. + """ + def __init__(self, instance_predicate, + max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, + logger=None): + super(RetryInstanceState, self).__init__( + max_tries, delay, backoff, logger) + self.instance_predicate = instance_predicate + + def __call__(self, to_wrap): + instance = to_wrap.__self__ # only instance methods allowed + + @wraps(to_wrap) + def wrapped_function(*args, **kwargs): + tries = 0 + while tries < self.max_tries: + result = to_wrap(*args, **kwargs) + if self.instance_predicate(instance): + return result + + delay = self.delay * self.backoff**tries + msg = "%s. Trying again in %d seconds..." % ( + self.instance_predicate.__name__, delay,) + self.logger(msg) + + time.sleep(delay) + tries += 1 + raise BackoffFailed() + + return wrapped_function diff --git a/packages/google-cloud-datastore/test_utils/test_utils/system.py b/packages/google-cloud-datastore/test_utils/test_utils/system.py new file mode 100644 index 000000000000..590dc62a06e6 --- /dev/null +++ b/packages/google-cloud-datastore/test_utils/test_utils/system.py @@ -0,0 +1,81 @@ +# Copyright 2014 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function +import os +import sys +import time + +import google.auth.credentials +from google.auth.environment_vars import CREDENTIALS as TEST_CREDENTIALS + + +# From shell environ. May be None. +CREDENTIALS = os.getenv(TEST_CREDENTIALS) + +ENVIRON_ERROR_MSG = """\ +To run the system tests, you need to set some environment variables. +Please check the CONTRIBUTING guide for instructions. +""" + + +class EmulatorCreds(google.auth.credentials.Credentials): + """A mock credential object. + + Used to avoid unnecessary token refreshing or reliance on the network + while an emulator is running. + """ + + def __init__(self): # pylint: disable=super-init-not-called + self.token = b'seekrit' + self.expiry = None + + @property + def valid(self): + """Would-be validity check of the credentials. + + Always is :data:`True`. + """ + return True + + def refresh(self, unused_request): # pylint: disable=unused-argument + """Off-limits implementation for abstract method.""" + raise RuntimeError('Should never be refreshed.') + + +def check_environ(): + err_msg = None + if CREDENTIALS is None: + err_msg = '\nMissing variables: ' + TEST_CREDENTIALS + elif not os.path.isfile(CREDENTIALS): + err_msg = '\nThe %s path %r is not a file.' % (TEST_CREDENTIALS, + CREDENTIALS) + + if err_msg is not None: + msg = ENVIRON_ERROR_MSG + err_msg + print(msg, file=sys.stderr) + sys.exit(1) + + +def unique_resource_id(delimiter='_'): + """A unique identifier for a resource. + + Intended to help locate resources created in particular + testing environments and at particular times. + """ + build_id = os.getenv('CIRCLE_BUILD_NUM', '') + if build_id == '': + return '%s%d' % (delimiter, 1000 * time.time()) + else: + return '%s%s%s%d' % (delimiter, build_id, delimiter, time.time()) diff --git a/packages/google-cloud-datastore/test_utils/test_utils/vpcsc_config.py b/packages/google-cloud-datastore/test_utils/test_utils/vpcsc_config.py new file mode 100644 index 000000000000..36b15d6be991 --- /dev/null +++ b/packages/google-cloud-datastore/test_utils/test_utils/vpcsc_config.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import pytest + + +INSIDE_VPCSC_ENVVAR = "GOOGLE_CLOUD_TESTS_IN_VPCSC" +PROJECT_INSIDE_ENVVAR = "PROJECT_ID" +PROJECT_OUTSIDE_ENVVAR = "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT" +BUCKET_OUTSIDE_ENVVAR = "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_BUCKET" + + +class VPCSCTestConfig(object): + """System test utility for VPCSC detection. + + See: https://cloud.google.com/vpc-service-controls/docs/ + """ + + @property + def inside_vpcsc(self): + """Test whether the test environment is configured to run inside VPCSC. + + Returns: + bool: + true if the environment is configured to run inside VPCSC, + else false. + """ + return INSIDE_VPCSC_ENVVAR in os.environ + + @property + def project_inside(self): + """Project ID for testing outside access. + + Returns: + str: project ID used for testing outside access; None if undefined. + """ + return os.environ.get(PROJECT_INSIDE_ENVVAR, None) + + @property + def project_outside(self): + """Project ID for testing inside access. + + Returns: + str: project ID used for testing inside access; None if undefined. + """ + return os.environ.get(PROJECT_OUTSIDE_ENVVAR, None) + + @property + def bucket_outside(self): + """GCS bucket for testing inside access. + + Returns: + str: bucket ID used for testing inside access; None if undefined. + """ + return os.environ.get(BUCKET_OUTSIDE_ENVVAR, None) + + def skip_if_inside_vpcsc(self, testcase): + """Test decorator: skip if running inside VPCSC.""" + reason = ( + "Running inside VPCSC. " + "Unset the {} environment variable to enable this test." + ).format(INSIDE_VPCSC_ENVVAR) + skip = pytest.mark.skipif(self.inside_vpcsc, reason=reason) + return skip(testcase) + + def skip_unless_inside_vpcsc(self, testcase): + """Test decorator: skip if running outside VPCSC.""" + reason = ( + "Running outside VPCSC. " + "Set the {} environment variable to enable this test." + ).format(INSIDE_VPCSC_ENVVAR) + skip = pytest.mark.skipif(not self.inside_vpcsc, reason=reason) + return skip(testcase) + + def skip_unless_inside_project(self, testcase): + """Test decorator: skip if inside project env var not set.""" + reason = ( + "Project ID for running inside VPCSC not set. " + "Set the {} environment variable to enable this test." + ).format(PROJECT_INSIDE_ENVVAR) + skip = pytest.mark.skipif(self.project_inside is None, reason=reason) + return skip(testcase) + + def skip_unless_outside_project(self, testcase): + """Test decorator: skip if outside project env var not set.""" + reason = ( + "Project ID for running outside VPCSC not set. " + "Set the {} environment variable to enable this test." + ).format(PROJECT_OUTSIDE_ENVVAR) + skip = pytest.mark.skipif(self.project_outside is None, reason=reason) + return skip(testcase) + + def skip_unless_outside_bucket(self, testcase): + """Test decorator: skip if outside bucket env var not set.""" + reason = ( + "Bucket ID for running outside VPCSC not set. " + "Set the {} environment variable to enable this test." + ).format(BUCKET_OUTSIDE_ENVVAR) + skip = pytest.mark.skipif(self.bucket_outside is None, reason=reason) + return skip(testcase) + + +vpcsc_config = VPCSCTestConfig() From 8b4f1f1c25b962e99708bcac2d64b650d8ce4475 Mon Sep 17 00:00:00 2001 From: Chris Wilcox Date: Thu, 6 Feb 2020 23:25:43 +0000 Subject: [PATCH 252/611] fix: install test_utils during system tests --- packages/google-cloud-datastore/noxfile.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 7fbb1febff8f..1629721929f6 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -113,6 +113,7 @@ def system(session): session.install("mock", "pytest") session.install("-e", ".") + session.install("-e", "test_utils") # Run py.test against the system tests. if system_test_exists: From 878bae65beccfc8062a386145e5009004a54ae28 Mon Sep 17 00:00:00 2001 From: Chris Wilcox Date: Thu, 6 Feb 2020 23:55:52 +0000 Subject: [PATCH 253/611] fix: update synth to have test_utils --- packages/google-cloud-datastore/noxfile.py | 2 +- packages/google-cloud-datastore/synth.metadata | 8 ++++---- packages/google-cloud-datastore/synth.py | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 1629721929f6..b090f01fc853 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -130,7 +130,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") + session.run("coverage", "report", "--show-missing", "--fail-under=99") session.run("coverage", "erase") diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 4fbc8e273696..4975d919a966 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,5 +1,5 @@ { - "updateTime": "2020-02-06T22:53:26.159627Z", + "updateTime": "2020-02-06T23:50:52.082564Z", "sources": [ { "generator": { @@ -12,9 +12,9 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "2e23b8fbc45f5d9e200572ca662fe1271bcd6760", - "internalRef": "293666452", - "log": "2e23b8fbc45f5d9e200572ca662fe1271bcd6760\nAdd ListEntryGroups method, add http bindings to support entry group tagging, and update some comments.\n\nPiperOrigin-RevId: 293666452\n\n0275e38a4ca03a13d3f47a9613aac8c8b0d3f1f2\nAdd proto_package field to managedidentities API. It is needed for APIs that still depend on artman generation.\n\nPiperOrigin-RevId: 293643323\n\n4cdfe8278cb6f308106580d70648001c9146e759\nRegenerating public protos for Data Catalog to add new Custom Type Entry feature.\n\nPiperOrigin-RevId: 293614782\n\n45d2a569ab526a1fad3720f95eefb1c7330eaada\nEnable client generation for v1 ManagedIdentities API.\n\nPiperOrigin-RevId: 293515675\n\n2c17086b77e6f3bcf04a1f65758dfb0c3da1568f\nAdd the Actions on Google common types (//google/actions/type/*).\n\nPiperOrigin-RevId: 293478245\n\n781aadb932e64a12fb6ead7cd842698d99588433\nDialogflow weekly v2/v2beta1 library update:\n- Documentation updates\nImportant updates are also posted at\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 293443396\n\ne2602608c9138c2fca24162720e67f9307c30b95\nDialogflow weekly v2/v2beta1 library update:\n- Documentation updates\nImportant updates are also posted at\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 293442964\n\nc8aef82028d06b7992278fa9294c18570dc86c3d\nAdd cc_proto_library and cc_grpc_library targets for Bigtable protos.\n\nAlso fix indentation of cc_grpc_library targets in Spanner and IAM protos.\n\nPiperOrigin-RevId: 293440538\n\ne2faab04f4cb7f9755072330866689b1943a16e9\ncloudtasks: v2 replace non-standard retry params in gapic config v2\n\nPiperOrigin-RevId: 293424055\n\ndfb4097ea628a8470292c6590a4313aee0c675bd\nerrorreporting: v1beta1 add legacy artman config for php\n\nPiperOrigin-RevId: 293423790\n\nb18aed55b45bfe5b62476292c72759e6c3e573c6\nasset: v1p1beta1 updated comment for `page_size` limit.\n\nPiperOrigin-RevId: 293421386\n\nc9ef36b7956d9859a2fc86ad35fcaa16958ab44f\nbazel: Refactor CI build scripts\n\nPiperOrigin-RevId: 293387911\n\na8ed9d921fdddc61d8467bfd7c1668f0ad90435c\nfix: set Ruby module name for OrgPolicy\n\nPiperOrigin-RevId: 293257997\n\n6c7d28509bd8315de8af0889688ee20099594269\nredis: v1beta1 add UpgradeInstance and connect_mode field to Instance\n\nPiperOrigin-RevId: 293242878\n\nae0abed4fcb4c21f5cb67a82349a049524c4ef68\nredis: v1 add connect_mode field to Instance\n\nPiperOrigin-RevId: 293241914\n\n3f7a0d29b28ee9365771da2b66edf7fa2b4e9c56\nAdds service config definition for bigqueryreservation v1beta1\n\nPiperOrigin-RevId: 293234418\n\n0c88168d5ed6fe353a8cf8cbdc6bf084f6bb66a5\naddition of BUILD & configuration for accessapproval v1\n\nPiperOrigin-RevId: 293219198\n\n39bedc2e30f4778ce81193f6ba1fec56107bcfc4\naccessapproval: v1 publish protos\n\nPiperOrigin-RevId: 293167048\n\n69d9945330a5721cd679f17331a78850e2618226\nAdd file-level `Session` resource definition\n\nPiperOrigin-RevId: 293080182\n\nf6a1a6b417f39694275ca286110bc3c1ca4db0dc\nAdd file-level `Session` resource definition\n\nPiperOrigin-RevId: 293080178\n\n29d40b78e3dc1579b0b209463fbcb76e5767f72a\nExpose managedidentities/v1beta1/ API for client library usage.\n\nPiperOrigin-RevId: 292979741\n\na22129a1fb6e18056d576dfb7717aef74b63734a\nExpose managedidentities/v1/ API for client library usage.\n\nPiperOrigin-RevId: 292968186\n\nb5cbe4a4ba64ab19e6627573ff52057a1657773d\nSecurityCenter v1p1beta1: move file-level option on top to workaround protobuf.js bug.\n\nPiperOrigin-RevId: 292647187\n\nb224b317bf20c6a4fbc5030b4a969c3147f27ad3\nAdds API definitions for bigqueryreservation v1beta1.\n\nPiperOrigin-RevId: 292634722\n\nc1468702f9b17e20dd59007c0804a089b83197d2\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 292626173\n\nffdfa4f55ab2f0afc11d0eb68f125ccbd5e404bd\nvision: v1p3beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292605599\n\n78f61482cd028fc1d9892aa5d89d768666a954cd\nvision: v1p1beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292605125\n\n60bb5a294a604fd1778c7ec87b265d13a7106171\nvision: v1p2beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292604980\n\n3bcf7aa79d45eb9ec29ab9036e9359ea325a7fc3\nvision: v1p4beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292604656\n\n2717b8a1c762b26911b45ecc2e4ee01d98401b28\nFix dataproc artman client library generation.\n\nPiperOrigin-RevId: 292555664\n\n7ac66d9be8a7d7de4f13566d8663978c9ee9dcd7\nAdd Dataproc Autoscaling API to V1.\n\nPiperOrigin-RevId: 292450564\n\n5d932b2c1be3a6ef487d094e3cf5c0673d0241dd\n- Improve documentation\n- Add a client_id field to StreamingPullRequest\n\nPiperOrigin-RevId: 292434036\n\neaff9fa8edec3e914995ce832b087039c5417ea7\nmonitoring: v3 publish annotations and client retry config\n\nPiperOrigin-RevId: 292425288\n\n70958bab8c5353870d31a23fb2c40305b050d3fe\nBigQuery Storage Read API v1 clients.\n\nPiperOrigin-RevId: 292407644\n\n7a15e7fe78ff4b6d5c9606a3264559e5bde341d1\nUpdate backend proto for Google Cloud Endpoints\n\nPiperOrigin-RevId: 292391607\n\n3ca2c014e24eb5111c8e7248b1e1eb833977c83d\nbazel: Add --flaky_test_attempts=3 argument to prevent CI failures caused by flaky tests\n\nPiperOrigin-RevId: 292382559\n\n9933347c1f677e81e19a844c2ef95bfceaf694fe\nbazel:Integrate latest protoc-java-resource-names-plugin changes (fix for PyYAML dependency in bazel rules)\n\nPiperOrigin-RevId: 292376626\n\nb835ab9d2f62c88561392aa26074c0b849fb0bd3\nasset: v1p2beta1 add client config annotations\n\n* remove unintentionally exposed RPCs\n* remove messages relevant to removed RPCs\n\nPiperOrigin-RevId: 292369593\n\nc1246a29e22b0f98e800a536b5b0da2d933a55f2\nUpdating v1 protos with the latest inline documentation (in comments) and config options. Also adding a per-service .yaml file.\n\nPiperOrigin-RevId: 292310790\n\nb491d07cadaae7cde5608321f913e5ca1459b32d\nRevert accidental local_repository change\n\nPiperOrigin-RevId: 292245373\n\naf3400a8cb6110025198b59a0f7d018ae3cda700\nUpdate gapic-generator dependency (prebuilt PHP binary support).\n\nPiperOrigin-RevId: 292243997\n\n341fd5690fae36f36cf626ef048fbcf4bbe7cee6\ngrafeas: v1 add resource_definition for the grafeas.io/Project and change references for Project.\n\nPiperOrigin-RevId: 292221998\n\n42e915ec2ece1cd37a590fbcd10aa2c0fb0e5b06\nUpdate the gapic-generator, protoc-java-resource-name-plugin and protoc-docs-plugin to the latest commit.\n\nPiperOrigin-RevId: 292182368\n\nf035f47250675d31492a09f4a7586cfa395520a7\nFix grafeas build and update build.sh script to include gerafeas.\n\nPiperOrigin-RevId: 292168753\n\n26ccb214b7bc4a716032a6266bcb0a9ca55d6dbb\nasset: v1p1beta1 add client config annotations and retry config\n\nPiperOrigin-RevId: 292154210\n\n974ee5c0b5d03e81a50dafcedf41e0efebb5b749\nasset: v1beta1 add client config annotations\n\nPiperOrigin-RevId: 292152573\n\ncf3b61102ed5f36b827bc82ec39be09525f018c8\n Fix to protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 292034635\n\n4e1cfaa7c0fede9e65d64213ca3da1b1255816c0\nUpdate the public proto to support UTF-8 encoded id for CatalogService API, increase the ListCatalogItems deadline to 300s and some minor documentation change\n\nPiperOrigin-RevId: 292030970\n\n9c483584f8fd5a1b862ae07973f4cc7bb3e46648\nasset: add annotations to v1p1beta1\n\nPiperOrigin-RevId: 292009868\n\ne19209fac29731d0baf6d9ac23da1164f7bdca24\nAdd the google.rpc.context.AttributeContext message to the open source\ndirectories.\n\nPiperOrigin-RevId: 291999930\n\nae5662960573f279502bf98a108a35ba1175e782\noslogin API: move file level option on top of the file to avoid protobuf.js bug.\n\nPiperOrigin-RevId: 291990506\n\neba3897fff7c49ed85d3c47fc96fe96e47f6f684\nAdd cc_proto_library and cc_grpc_library targets for Spanner and IAM protos.\n\nPiperOrigin-RevId: 291988651\n\n8e981acfd9b97ea2f312f11bbaa7b6c16e412dea\nBeta launch for PersonDetection and FaceDetection features.\n\nPiperOrigin-RevId: 291821782\n\n994e067fae3b21e195f7da932b08fff806d70b5d\nasset: add annotations to v1p2beta1\n\nPiperOrigin-RevId: 291815259\n\n244e1d2c89346ca2e0701b39e65552330d68545a\nAdd Playable Locations service\n\nPiperOrigin-RevId: 291806349\n\n909f8f67963daf45dd88d020877fb9029b76788d\nasset: add annotations to v1beta2\n\nPiperOrigin-RevId: 291805301\n\n3c39a1d6e23c1ef63c7fba4019c25e76c40dfe19\nKMS: add file-level message for CryptoKeyPath, it is defined in gapic yaml but not\nin proto files.\n\nPiperOrigin-RevId: 291420695\n\nc6f3f350b8387f8d1b85ed4506f30187ebaaddc3\ncontaineranalysis: update v1beta1 and bazel build with annotations\n\nPiperOrigin-RevId: 291401900\n\n92887d74b44e4e636252b7b8477d0d2570cd82db\nfix: fix the location of grpc config file.\n\nPiperOrigin-RevId: 291396015\n\ne26cab8afd19d396b929039dac5d874cf0b5336c\nexpr: add default_host and method_signature annotations to CelService\n\nPiperOrigin-RevId: 291240093\n\n06093ae3952441c34ec176d1f7431b8765cec0be\nirm: fix v1alpha2 bazel build by adding missing proto imports\n\nPiperOrigin-RevId: 291227940\n\na8a2514af326e4673063f9a3c9d0ef1091c87e6c\nAdd proto annotation for cloud/irm API\n\nPiperOrigin-RevId: 291217859\n\n8d16f76de065f530d395a4c7eabbf766d6a120fd\nGenerate Memcache v1beta2 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 291008516\n\n3af1dabd93df9a9f17bf3624d3b875c11235360b\ngrafeas: Add containeranalysis default_host to Grafeas service\n\nPiperOrigin-RevId: 290965849\n\nbe2663fa95e31cba67d0cd62611a6674db9f74b7\nfix(google/maps/roads): add missing opening bracket\n\nPiperOrigin-RevId: 290964086\n\nfacc26550a0af0696e0534bc9cae9df14275aa7c\nUpdating v2 protos with the latest inline documentation (in comments) and adding a per-service .yaml file.\n\nPiperOrigin-RevId: 290952261\n\ncda99c1f7dc5e4ca9b1caeae1dc330838cbc1461\nChange api_name to 'asset' for v1p1beta1\n\nPiperOrigin-RevId: 290800639\n\n94e9e90c303a820ce40643d9129e7f0d2054e8a1\nAdds Google Maps Road service\n\nPiperOrigin-RevId: 290795667\n\na3b23dcb2eaecce98c600c7d009451bdec52dbda\nrpc: new message ErrorInfo, other comment updates\n\nPiperOrigin-RevId: 290781668\n\n26420ef4e46c37f193c0fbe53d6ebac481de460e\nAdd proto definition for Org Policy v1.\n\nPiperOrigin-RevId: 290771923\n\n7f0dab8177cf371ae019a082e2512de7ac102888\nPublish Routes Preferred API v1 service definitions.\n\nPiperOrigin-RevId: 290326986\n\nad6e508d0728e1d1bca6e3f328cd562718cb772d\nFix: Qualify resource type references with \"jobs.googleapis.com/\"\n\nPiperOrigin-RevId: 290285762\n\n58e770d568a2b78168ddc19a874178fee8265a9d\ncts client library\n\nPiperOrigin-RevId: 290146169\n\naf9daa4c3b4c4a8b7133b81588dd9ffd37270af2\nAdd more programming language options to public proto\n\nPiperOrigin-RevId: 290144091\n\nd9f2bbf2df301ef84641d4cec7c828736a0bd907\ntalent: add missing resource.proto dep to Bazel build target\n\nPiperOrigin-RevId: 290143164\n\n3b3968237451d027b42471cd28884a5a1faed6c7\nAnnotate Talent API.\nAdd gRPC service config for retry.\nUpdate bazel file with google.api.resource dependency.\n\nPiperOrigin-RevId: 290125172\n\n0735b4b096872960568d1f366bfa75b7b0e1f1a3\nWeekly library update.\n\nPiperOrigin-RevId: 289939042\n\n8760d3d9a4543d7f9c0d1c7870aca08b116e4095\nWeekly library update.\n\nPiperOrigin-RevId: 289939020\n\n8607df842f782a901805187e02fff598145b0b0e\nChange Talent API timeout to 30s.\n\nPiperOrigin-RevId: 289912621\n\n908155991fe32570653bcb72ecfdcfc896642f41\nAdd Recommendations AI V1Beta1\n\nPiperOrigin-RevId: 289901914\n\n5c9a8c2bebd8b71aa66d1cc473edfaac837a2c78\nAdding no-arg method signatures for ListBillingAccounts and ListServices\n\nPiperOrigin-RevId: 289891136\n\n50b0e8286ac988b0593bd890eb31fef6ea2f5767\nlongrunning: add grpc service config and default_host annotation to operations.proto\n\nPiperOrigin-RevId: 289876944\n\n6cac27dabe51c54807b0401698c32d34998948a9\n Updating default deadline for Cloud Security Command Center's v1 APIs.\n\nPiperOrigin-RevId: 289875412\n\nd99df0d67057a233c711187e0689baa4f8e6333d\nFix: Correct spelling in C# namespace option\n\nPiperOrigin-RevId: 289709813\n\n2fa8d48165cc48e35b0c62e6f7bdade12229326c\nfeat: Publish Recommender v1 to GitHub.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289619243\n\n9118db63d1ab493a2e44a3b4973fde810a835c49\nfirestore: don't retry reads that fail with Aborted\n\nFor transaction reads that fail with ABORTED, we need to rollback and start a new transaction. Our current configuration makes it so that GAPIC retries ABORTED reads multiple times without making any progress. Instead, we should retry at the transaction level.\n\nPiperOrigin-RevId: 289532382\n\n1dbfd3fe4330790b1e99c0bb20beb692f1e20b8a\nFix bazel build\nAdd other langauges (Java was already there) for bigquery/storage/v1alpha2 api.\n\nPiperOrigin-RevId: 289519766\n\nc06599cdd7d11f8d3fd25f8d3249e5bb1a3d5d73\nInitial commit of google.cloud.policytroubleshooter API, The API helps in troubleshooting GCP policies. Refer https://cloud.google.com/iam/docs/troubleshooting-access for more information\n\nPiperOrigin-RevId: 289491444\n\nfce7d80fa16ea241e87f7bc33d68595422e94ecd\nDo not pass samples option for Artman config of recommender v1 API.\n\nPiperOrigin-RevId: 289477403\n\nef179e8c61436297e6bb124352e47e45c8c80cb1\nfix: Address missing Bazel dependency.\n\nBazel builds stopped working in 06ec6d5 because\nthe google/longrunning/operations.proto file took\nan import from google/api/client.proto, but that\nimport was not added to BUILD.bazel.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446074\n\n8841655b242c84fd691d77d7bcf21b61044f01ff\nMigrate Data Labeling v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446026\n\n06ec6d5d053fff299eaa6eaa38afdd36c5e2fc68\nAdd annotations to google.longrunning.v1\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289413169\n\n0480cf40be1d3cc231f4268a2fdb36a8dd60e641\nMigrate IAM Admin v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289411084\n\n1017173e9adeb858587639af61889ad970c728b1\nSpecify a C# namespace for BigQuery Connection v1beta1\n\nPiperOrigin-RevId: 289396763\n\nb08714b378e8e5b0c4ecdde73f92c36d6303b4b6\nfix: Integrate latest proto-docs-plugin fix.\nFixes dialogflow v2\n\nPiperOrigin-RevId: 289189004\n\n51217a67e79255ee1f2e70a6a3919df082513327\nCreate BUILD file for recommender v1\n\nPiperOrigin-RevId: 289183234\n\nacacd87263c0a60e458561b8b8ce9f67c760552a\nGenerate recommender v1 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 289177510\n\n9d2f7133b97720b1fa3601f6dcd30760ba6d8a1e\nFix kokoro build script\n\nPiperOrigin-RevId: 289166315\n\nc43a67530d2a47a0220cad20ca8de39b3fbaf2c5\ncloudtasks: replace missing RPC timeout config for v2beta2 and v2beta3\n\nPiperOrigin-RevId: 289162391\n\n4cefc229a9197236fc0adf02d69b71c0c5cf59de\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 289158456\n\n56f263fe959c50786dab42e3c61402d32d1417bd\nCatalog API: Adding config necessary to build client libraries\n\nPiperOrigin-RevId: 289149879\n\n4543762b23a57fc3c53d409efc3a9affd47b6ab3\nFix Bazel build\nbilling/v1 and dialogflow/v2 remain broken (not bazel-related issues).\nBilling has wrong configuration, dialogflow failure is caused by a bug in documentation plugin.\n\nPiperOrigin-RevId: 289140194\n\nc9dce519127b97e866ca133a01157f4ce27dcceb\nUpdate Bigtable docs\n\nPiperOrigin-RevId: 289114419\n\n802c5c5f2bf94c3facb011267d04e71942e0d09f\nMigrate DLP to proto annotations (but not GAPIC v2).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289102579\n\n6357f30f2ec3cff1d8239d18b707ff9d438ea5da\nRemove gRPC configuration file that was in the wrong place.\n\nPiperOrigin-RevId: 289096111\n\n360a8792ed62f944109d7e22d613a04a010665b4\n Protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 289011995\n\na79211c20c4f2807eec524d00123bf7c06ad3d6e\nRoll back containeranalysis v1 to GAPIC v1.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288999068\n\n9e60345ba603e03484a8aaa33ce5ffa19c1c652b\nPublish Routes Preferred API v1 proto definitions.\n\nPiperOrigin-RevId: 288941399\n\nd52885b642ad2aa1f42b132ee62dbf49a73e1e24\nMigrate the service management API to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288909426\n\n6ace586805c08896fef43e28a261337fcf3f022b\ncloudtasks: replace missing RPC timeout config\n\nPiperOrigin-RevId: 288783603\n\n51d906cabee4876b12497054b15b05d4a50ad027\nImport of Grafeas from Github.\n\nUpdate BUILD.bazel accordingly.\n\nPiperOrigin-RevId: 288783426\n\n5ef42bcd363ba0440f0ee65b3c80b499e9067ede\nMigrate Recommender v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288713066\n\n94f986afd365b7d7e132315ddcd43d7af0e652fb\nMigrate Container Analysis v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288708382\n\n7a751a279184970d3b6ba90e4dd4d22a382a0747\nRemove Container Analysis v1alpha1 (nobody publishes it).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288707473\n\n3c0d9c71242e70474b2b640e15bb0a435fd06ff0\nRemove specious annotation from BigQuery Data Transfer before\nanyone accidentally does anything that uses it.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288701604\n\n1af307a4764bd415ef942ac5187fa1def043006f\nMigrate BigQuery Connection to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288698681\n\n08b488e0660c59842a7dee0e3e2b65d9e3a514a9\nExposing cloud_catalog.proto (This API is already available through REST)\n\nPiperOrigin-RevId: 288625007\n\na613482977e11ac09fa47687a5d1b5a01efcf794\nUpdate the OS Login v1beta API description to render better in the UI.\n\nPiperOrigin-RevId: 288547940\n\n5e182b8d9943f1b17008d69d4c7e865dc83641a7\nUpdate the OS Login API description to render better in the UI.\n\nPiperOrigin-RevId: 288546443\n\ncb79155f596e0396dd900da93872be7066f6340d\nFix: Add a resource annotation for Agent\nFix: Correct the service name in annotations for Intent and SessionEntityType\n\nPiperOrigin-RevId: 288441307\n\nf7f6e9daec3315fd47cb638789bd8415bf4a27cc\nAdded cloud asset api v1p1beta1\n\nPiperOrigin-RevId: 288427239\n\nf2880f5b342c6345f3dcaad24fcb3c6ca9483654\nBilling account API: Adding config necessary to build client libraries\n\nPiperOrigin-RevId: 288351810\n\ndc250ffe071729f8f8bef9d6fd0fbbeb0254c666\nFix: Remove incorrect resource annotations in requests\n\nPiperOrigin-RevId: 288321208\n\n91ef2d9dd69807b0b79555f22566fb2d81e49ff9\nAdd GAPIC annotations to Cloud KMS (but do not migrate the GAPIC config yet).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 287999179\n\n4d45a6399e9444fbddaeb1c86aabfde210723714\nRefreshing Cloud Billing API protos.\n\nThis exposes the following API methods:\n- UpdateBillingAccount\n- CreateBillingAccount\n- GetIamPolicy\n- SetIamPolicy\n- TestIamPermissions\n\nThere are also some new fields to support the management of sub-accounts.\n\nPiperOrigin-RevId: 287908369\n\nec285d3d230810147ebbf8d5b691ee90320c6d2d\nHide not yet implemented update_transforms message\n\nPiperOrigin-RevId: 287608953\n\na202fb3b91cd0e4231be878b0348afd17067cbe2\nBigQuery Storage Write API v1alpha2 clients. The service is enabled by whitelist only.\n\nPiperOrigin-RevId: 287379998\n\n650d7f1f8adb0cfaf37b3ce2241c3168f24efd4d\nUpdate Readme.md to match latest Bazel updates\n090d98aea20270e3be4b64240775588f7ce50ff8\ndocs(bigtable): Fix library release level listed in generated documentation\n\nPiperOrigin-RevId: 287308849\n\n2c28f646ca77b1d57550368be22aa388adde2e66\nfirestore: retry reads that fail with contention\n\nPiperOrigin-RevId: 287250665\n\nfd3091fbe9b2083cabc53dc50c78035658bfc4eb\nSync timeout in grpc config back to 10s for tasks API with github googelapis gapic config.\n\nPiperOrigin-RevId: 287207067\n\n49dd7d856a6f77c0cf7e5cb3334423e5089a9e8a\nbazel: Integrate bazel-2.0.0 compatibility fixes\n\nPiperOrigin-RevId: 287205644\n\n46e52fd64973e815cae61e78b14608fe7aa7b1df\nbazel: Integrate bazel build file generator\n\nTo generate/update BUILD.bazel files for any particular client or a batch of clients:\n```\nbazel run //:build_gen -- --src=google/example/library\n```\n\nPiperOrigin-RevId: 286958627\n\n1a380ea21dea9b6ac6ad28c60ad96d9d73574e19\nBigQuery Storage Read API v1beta2 clients.\n\nPiperOrigin-RevId: 286616241\n\n5f3f1d0f1c06b6475a17d995e4f7a436ca67ec9e\nAdd Artman config for secretmanager.\n\nPiperOrigin-RevId: 286598440\n\n50af0530730348f1e3697bf3c70261f7daaf2981\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 286491002\n\n91818800384f4ed26961aea268910b1a2ec58cc8\nFor Data Catalog API,\n1. Add support for marking a tag template field as required when creating a new tag template.\n2. Add support for updating a tag template field from required to optional.\n\nPiperOrigin-RevId: 286490262\n\nff4a2047b3d66f38c9b22197c370ed0d02fc0238\nWeekly library update.\n\nPiperOrigin-RevId: 286484215\n\n192c14029861752a911ed434fd6ee5b850517cd9\nWeekly library update.\n\nPiperOrigin-RevId: 286484165\n\nd9e328eaf790d4e4346fbbf32858160f497a03e0\nFix bazel build (versions 1.x)\n\nBump gapic-generator and resource names plugins to the latest version.\n\nPiperOrigin-RevId: 286469287\n\n0ca305403dcc50e31ad9477c9b6241ddfd2056af\nsecretmanager client package name option updates for java and go\n\nPiperOrigin-RevId: 286439553\n\nade4803e8a1a9e3efd249c8c86895d2f12eb2aaa\niam credentials: publish v1 protos containing annotations\n\nPiperOrigin-RevId: 286418383\n\n03e5708e5f8d1909dcb74b25520309e59ebf24be\nsecuritycenter: add missing proto deps for Bazel build\n\nPiperOrigin-RevId: 286417075\n\n8b991eb3eb82483b0ca1f1361a9c8e5b375c4747\nAdd secretmanager client package name options.\n\nPiperOrigin-RevId: 286415883\n\nd400cb8d45df5b2ae796b909f098a215b2275c1d\ndialogflow: add operation_info annotations to BatchUpdateEntities and BatchDeleteEntities.\n\nPiperOrigin-RevId: 286312673\n\nf2b25232db397ebd4f67eb901a2a4bc99f7cc4c6\nIncreased the default timeout time for all the Cloud Security Command Center client libraries.\n\nPiperOrigin-RevId: 286263771\n\ncb2f1eefd684c7efd56fd375cde8d4084a20439e\nExposing new Resource fields in the SecurityCenterProperties proto, added more comments to the filter logic for these Resource fields, and updated the response proto for the ListFindings API with the new Resource fields.\n\nPiperOrigin-RevId: 286263092\n\n73cebb20432b387c3d8879bb161b517d60cf2552\nUpdate v1beta2 clusters and jobs to include resource ids in GRPC header.\n\nPiperOrigin-RevId: 286261392\n\n1b4e453d51c0bd77e7b73896cdd8357d62768d83\nsecuritycenter: publish v1beta1 protos with annotations\n\nPiperOrigin-RevId: 286228860\n\na985eeda90ae98e8519d2320bee4dec148eb8ccb\nAdd default retry configurations for speech_v1p1beta1.\n\nSettings are copied from speech_gapic.legacy.yaml. The Python client library is being generated with timeouts that are too low. See https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2578\n\nPiperOrigin-RevId: 286191318\n\n3352100a15ede383f5ab3c34599f7a10a3d066fe\nMake importing rule with the same name (but different aliases) from different repositories possible.\n\nThis is needed to allow monolitic gapic-generator and microgenerators coexist during transition period.\n\nTo plug a microgenerator:\n\n1) Add corresponding rules bidnings under `switched_rules_by_language` in repository_rules.bzl:\n rules[\"go_gapic_library2\"] = _switch(\n go and grpc and gapic,\n \"@gapic_generator_go//rules_go_gapic/go_gapic.bzl\",\n \"go_gapic_library\",\n )\n\n2) Import microgenerator in WORKSPACE (the above example assumes that the generator was imported under name \"gapic_generator_go\").\n\n3) To migrate an API from monolith to micro generator (this is done per API and per language) modify the corresponding load statement in the API's BUILD.bazel file. For example, for the example above, to migrate to go microgenerator modify the go-specific load statement in BUILD.bazel file of a specific API (which you want to migrate) to the following:\n\nload(\n \"@com_google_googleapis_imports//:imports.bzl\",\n \"go_gapic_assembly_pkg\",\n go_gapic_library = \"go_gapic_library2\",\n \"go_proto_library\",\n \"go_test\",\n)\n\nPiperOrigin-RevId: 286065440\n\n6ad2bb13bc4b0f3f785517f0563118f6ca52ddfd\nUpdated v1beta1 protos for the client:\n- added support for GenericSignedAttestation which has a generic Signature\n- added support for CVSSv3 and WindowsDetail in Vulnerability\n- documentation updates\n\nPiperOrigin-RevId: 286008145\n\nfe1962e49999a832eed8162c45f23096336a9ced\nAdMob API v1 20191210\n\nBasic account info, mediation and network report available. See https://developers.google.com/admob/api/release-notes for more details.\n\nPiperOrigin-RevId: 285894502\n\n41fc1403738b61427f3a798ca9750ef47eb9c0f2\nAnnotate the required fields for the Monitoring Dashboards API\n\nPiperOrigin-RevId: 285824386\n\n27d0e0f202cbe91bf155fcf36824a87a5764ef1e\nRemove inappropriate resource_reference annotations for UpdateWorkflowTemplateRequest.template.\n\nPiperOrigin-RevId: 285802643\n\ne5c4d3a2b5b5bef0a30df39ebb27711dc98dee64\nAdd Artman BUILD.bazel file for the Monitoring Dashboards API\n\nPiperOrigin-RevId: 285445602\n\n2085a0d3c76180ee843cf2ecef2b94ca5266be31\nFix path in the artman config for Monitoring Dashboard API.\n\nPiperOrigin-RevId: 285233245\n\n2da72dfe71e4cca80902f9e3e125c40f02c2925b\nAdd Artman and GAPIC configs for the Monitoring Dashboards API.\n\nPiperOrigin-RevId: 285211544\n\n9f6eeebf1f30f51ffa02acea5a71680fe592348e\nAdd annotations to Dataproc v1. (Also forwarding comment changes from internal source control.)\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 285197557\n\n19c4589a3cb44b3679f7b3fba88365b3d055d5f8\noslogin: fix v1beta retry configuration\n\nPiperOrigin-RevId: 285013366\n\nee3f02926d0f8a0bc13f8d716581aad20f575751\nAdd Monitoring Dashboards API protocol buffers to Google Cloud Monitoring API.\n\nPiperOrigin-RevId: 284982647\n\ne47fdd266542386e5e7346697f90476e96dc7ee8\nbigquery datatransfer: Remove non-publicly available DataSourceService.\n\nPiperOrigin-RevId: 284822593\n\n6156f433fd1d9d5e4a448d6c6da7f637921d92ea\nAdds OSConfig v1beta protos and initial client library config\n\nPiperOrigin-RevId: 284799663\n\n6cc9499e225a4f6a5e34fe07e390f67055d7991c\nAdd datetime.proto to google/type/BUILD.bazel\n\nPiperOrigin-RevId: 284643689\n\nfe7dd5277e39ffe0075729c61e8d118d7527946d\nCosmetic changes to proto comment as part of testing internal release instructions.\n\nPiperOrigin-RevId: 284608712\n\n68d109adad726b89f74276d2f4b2ba6aac6ec04a\nAdd annotations to securitycenter v1, but leave GAPIC v1 in place.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 284580511\n\ndf8a1707a910fc17c71407a75547992fd1864c51\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 284568564\n\na69a974976221ce3bb944901b739418b85d6408c\nclient library update\n\nPiperOrigin-RevId: 284463979\n\na4adac3a12aca6e3a792c9c35ee850435fe7cf7e\nAdded DateTime, TimeZone, and Month proto files to google/type\n\nPiperOrigin-RevId: 284277770\n\ned5dec392906078db4f7745fe4f11d34dd401ae9\nchange common resources from message-level annotations to file-level annotations.\n\nPiperOrigin-RevId: 284236794\n\na00e2c575ef1b637667b4ebe96b8c228b2ddb273\nbigquerydatatransfer: change resource type TransferRun to Run to be consistent with gapic configs\nbigquerydatatransfer: add missing patterns for DataSource, TransferConfig and Run (to allow the location segment)\nbigquerydatatransfer: add file-level Parent resource type (to allow the location segement)\nbigquerydatatransfer: update grpc service config with correct retry delays\n\nPiperOrigin-RevId: 284234378\n\nb10e4547017ca529ac8d183e839f3c272e1c13de\ncloud asset: replace required fields for batchgetassethistory. Correct the time out duration.\n\nPiperOrigin-RevId: 284059574\n\n6690161e3dcc3367639a2ec10db67bf1cf392550\nAdd default retry configurations for speech_v1.\n\nSettings are copied from speech_gapic.legacy.yaml. The Python client library is being generated with timeouts that are too low. See https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2578\n\nPiperOrigin-RevId: 284035915\n\n9b2635ef91e114f0357bdb87652c26a8f59316d5\ncloudtasks: fix gapic v2 config\n\nPiperOrigin-RevId: 284020555\n\ne5676ba8b863951a8ed0bfd6046e1db38062743c\nReinstate resource name handling in GAPIC config for Asset v1.\n\nPiperOrigin-RevId: 283993903\n\nf337f7fb702c85833b7b6ca56afaf9a1bf32c096\nOSConfig AgentEndpoint: add LookupEffectiveGuestPolicy rpc\n\nPiperOrigin-RevId: 283989762\n\nc0ac9b55f2e2efd0ee525b3a6591a1b09330e55a\nInclude real time feed api into v1 version\n\nPiperOrigin-RevId: 283845474\n\n2427a3a0f6f4222315362d973d91a082a3a884a7\nfirestore admin: update v1 protos with annotations & retry config\n\nPiperOrigin-RevId: 283826605\n\n555e844dbe04af50a8f55fe1217fa9d39a0a80b2\nchore: publish retry configs for iam admin, cloud asset, and remoteworkers\n\nPiperOrigin-RevId: 283801979\n\n6311dc536668849142d1fe5cd9fc46da66d1f77f\nfirestore: update v1beta1 protos with annotations and retry config\n\nPiperOrigin-RevId: 283794315\n\nda0edeeef953b05eb1524d514d2e9842ac2df0fd\nfeat: publish several retry config files for client generation\n\nPiperOrigin-RevId: 283614497\n\n59a78053537e06190f02d0a7ffb792c34e185c5a\nRemoving TODO comment\n\nPiperOrigin-RevId: 283592535\n\n8463992271d162e2aff1d5da5b78db11f2fb5632\nFix bazel build\n\nPiperOrigin-RevId: 283589351\n\n3bfcb3d8df10dfdba58f864d3bdb8ccd69364669\nPublic client library for bebop_jobs_api_20191118_1_RC3 release.\n\nPiperOrigin-RevId: 283568877\n\n27ab0db61021d267c452b34d149161a7bf0d9f57\nfirestore: publish annotated protos and new retry config\n\nPiperOrigin-RevId: 283565148\n\n38dc36a2a43cbab4a2a9183a43dd0441670098a9\nfeat: add http annotations for operations calls\n\nPiperOrigin-RevId: 283384331\n\n366caab94906975af0e17822e372f1d34e319d51\ndatastore: add a legacy artman config for PHP generation\n\nPiperOrigin-RevId: 283378578\n\n82944da21578a53b74e547774cf62ed31a05b841\nMigrate container v1beta1 to GAPIC v2.\n\nPiperOrigin-RevId: 283342796\n\n584dcde5826dd11ebe222016b7b208a4e1196f4b\nRemove resource name annotation for UpdateKeyRequest.key, because it's the resource, not a name.\n\nPiperOrigin-RevId: 283167368\n\n6ab0171e3688bfdcf3dbc4056e2df6345e843565\nAdded resource annotation for Key message.\n\nPiperOrigin-RevId: 283066965\n\n86c1a2db1707a25cec7d92f8850cc915163ec3c3\nExpose Admin API methods for Key manipulation.\n\nPiperOrigin-RevId: 282988776\n\n3ddad085965896ffb205d44cb0c0616fe3def10b\nC++ targets: correct deps so they build, rename them from trace* to cloudtrace*\nto match the proto names.\n\nPiperOrigin-RevId: 282857635\n\ne9389365a971ad6457ceb9646c595e79dfdbdea5\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 282810797\n\ne42eaaa9abed3c4d63d64f790bd3191448dbbca6\nPut back C++ targets for cloud trace v2 api.\n\nPiperOrigin-RevId: 282803841\n\nd8896a3d8a191702a9e39f29cf4c2e16fa05f76d\nAdd initial BUILD.bazel for secretmanager.googleapis.com\n\nPiperOrigin-RevId: 282674885\n\n2cc56cb83ea3e59a6364e0392c29c9e23ad12c3a\nCreate sample for list recommendations\n\nPiperOrigin-RevId: 282665402\n\nf88e2ca65790e3b44bb3455e4779b41de1bf7136\nbump Go to ga\n\nPiperOrigin-RevId: 282651105\n\naac86d932b3cefd7d746f19def6935d16d6235e0\nDocumentation update. Add location_id in preparation for regionalization.\n\nPiperOrigin-RevId: 282586371\n\n5b501cd384f6b842486bd41acce77854876158e7\nMigrate Datastore Admin to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 282570874\n\n6a16d474d5be201b20a27646e2009c4dfde30452\nMigrate Datastore to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 282564329\n\n74bd9b95ac8c70b883814e4765a725cffe43d77c\nmark Go lib ga\n\nPiperOrigin-RevId: 282562558\n\nf7b3d434f44f6a77cf6c37cae5474048a0639298\nAdd secretmanager.googleapis.com protos\n\nPiperOrigin-RevId: 282546399\n\nc34a911aaa0660a45f5a556578f764f135e6e060\niot: bump Go GAPIC to GA release level\n\nPiperOrigin-RevId: 282494787\n\n79b7f1c5ba86859dbf70aa6cd546057c1002cdc0\nPut back C++ targets.\nPrevious change overrode custom C++ targets made by external teams. This PR puts those targets back.\n\nPiperOrigin-RevId: 282458292\n\n06a840781d2dc1b0a28e03e30fb4b1bfb0b29d1e\nPopulate BAZEL.build files for around 100 APIs (all APIs we publish) in all 7 langauges.\n\nPiperOrigin-RevId: 282449910\n\n777b580a046c4fa84a35e1d00658b71964120bb0\nCreate BUILD file for recommender v1beta1\n\nPiperOrigin-RevId: 282068850\n\n48b385b6ef71dfe2596490ea34c9a9a434e74243\nGenerate recommender v1beta1 gRPC ServiceConfig file\n\nPiperOrigin-RevId: 282067795\n\n8395b0f1435a4d7ce8737b3b55392627758bd20c\nfix: Set timeout to 25s, because Tasks fails for any deadline above 30s.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 282017295\n\n3ba7ddc4b2acf532bdfb0004ca26311053c11c30\nfix: Shift Ruby and PHP to legacy GAPIC YAMLs for back-compat.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281852671\n\nad6f0c002194c3ec6c13d592d911d122d2293931\nRemove unneeded yaml files\n\nPiperOrigin-RevId: 281835839\n\n1f42588e4373750588152cdf6f747de1cadbcbef\nrefactor: Migrate Tasks beta 2 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281769558\n\n902b51f2073e9958a2aba441f7f7ac54ea00966d\nrefactor: Migrate Tasks to GAPIC v2 (for real this time).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281769522\n\n17561f59970eede87f61ef6e9c322fa1198a2f4d\nMigrate Tasks Beta 3 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281769519\n\nf95883b15a1ddd58eb7e3583fdefe7b00505faa3\nRegenerate recommender v1beta1 protos and sanitized yaml\n\nPiperOrigin-RevId: 281765245\n\n9a52df54c626b36699a058013d1735a166933167\nadd gRPC ServiceConfig for grafeas v1\n\nPiperOrigin-RevId: 281762754\n\n7a79d682ef40c5ca39c3fca1c0901a8e90021f8a\nfix: Roll back Tasks GAPIC v2 while we investigate C# issue.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281758548\n\n3fc31491640a90f029f284289e7e97f78f442233\nMigrate Tasks to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281751187\n\n5bc0fecee454f857cec042fb99fe2d22e1bff5bc\nfix: adds operation HTTP rules back to v1p1beta1 config\n\nPiperOrigin-RevId: 281635572\n\n5364a19284a1333b3ffe84e4e78a1919363d9f9c\nbazel: Fix build\n\n1) Update to latest gapic-generator (has iam resource names fix for java).\n2) Fix non-trivial issues with oslogin (resources defined in sibling package to the one they are used from) and monitoring.\n3) Fix trivial missing dependencies in proto_library targets for other apis.\n\nThis is to prepare the repository to being populated with BUILD.bazel files for all supported apis (101 API) in all 7 languages.\n\nPiperOrigin-RevId: 281618750\n\n0aa77cbe45538d5e5739eb637db3f2940b912789\nUpdating common proto files in google/type/ with their latest versions.\n\nPiperOrigin-RevId: 281603926\n\nd47e1b4485b3effbb2298eb10dd13a544c0f66dc\nfix: replace Speech Recognize RPC retry_codes_name for non-standard assignment\n\nPiperOrigin-RevId: 281594037\n\n16543773103e2619d2b5f52456264de5bb9be104\nRegenerating public protos for datacatalog, also adding gRPC service config.\n\nPiperOrigin-RevId: 281423227\n\n328ebe76adb06128d12547ed70107fb841aebf4e\nChange custom data type from String to google.protobuf.Struct to be consistent with other docs such as\nhttps://developers.google.com/actions/smarthome/develop/process-intents#response_format\n\nPiperOrigin-RevId: 281402467\n\n5af83f47b9656261cafcf88b0b3334521ab266b3\n(internal change without visible public changes)\n\nPiperOrigin-RevId: 281334391\n\nc53ed56649583a149382bd88d3c427be475b91b6\nFix typo in protobuf docs.\n\nPiperOrigin-RevId: 281293109\n\nd8dd7fe8d5304f7bd1c52207703d7f27d5328c5a\nFix build by adding missing deps.\n\nPiperOrigin-RevId: 281088257\n\n3ef5ffd7351809d75c1332d2eaad1f24d9c318e4\nMigrate Error Reporting v1beta1 to proto annotations / GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281075722\n\n418ee8e24a56b5959e1c1defa4b6c97f883be379\nTrace v2: Add remaining proto annotations, migrate to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281068859\n\nc89394342a9ef70acaf73a6959e04b943fbc817b\nThis change updates an outdated comment for the feature importance proto field since they are no longer in [0, 1] for online predictions.\n\nPiperOrigin-RevId: 280761373\n\n1ec8b8e2c3c8f41d7d2b22c594c025276d6a4ae6\nCode refactoring\n\nPiperOrigin-RevId: 280760149\n\n427a22b04039f93b769d89accd6f487413f667c1\nImport automl operation protos.\n\nPiperOrigin-RevId: 280703572\n\n45749a04dac104e986f6cc47da3baf7c8bb6f9b0\nfix: bigqueryconnection_gapic.yaml to reflect proto annotations\n\n* remove connection_credential resource\n* make CreateCredentialRequest.connection_id optional\n* shuffle field ordering in CreateCredential flattening\n\nPiperOrigin-RevId: 280685438\n\n8385366aa1e5d7796793db02a9c5e167d1fd8f17\nRevert the Trace v2 GAPIC for now.\nCommitter: @lukesneeringer\n\nPiperOrigin-RevId: 280669295\n\n5c8ab2c072d557c2f4c4e54b544394e2d62202d5\nMigrate Trace v1 and Trace v2 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 280667429\n\nf6808ff4e8b966cd571e99279d4a2780ed97dff2\nRename the `endpoint_urls` field to `endpoint_uris` to be consistent with\nGoogle API nomenclature.\n\nPiperOrigin-RevId: 280581337\n\n1935fb8889686f5c9d107f11b3c6870fc3aa7cdc\nComment updates\n\nPiperOrigin-RevId: 280451656\n\n0797fd5b9029d630e68a0899734715d62ad38e33\nComment updates\n\nPiperOrigin-RevId: 280451600\n\n9bc8d07b8b749e791d16c8d559526928ceaf1994\nRollback of \"Migrate Cloud Error Reporting to proto annotations & GAPIC v2.\"\n\nPiperOrigin-RevId: 280445975\n\nf8720321aecf4aab42e03602ac2c67f9777d9170\nfix: bigtable retry config in GAPIC v2\n\nPiperOrigin-RevId: 280434856\n\nb11664ba64f92d96d748e0dd9724d006dcafd120\nMigrate Cloud Error Reporting to proto annotations & GAPIC v2.\n\nPiperOrigin-RevId: 280432937\n\n4f747bda9b099b4426f495985680d16d0227fa5f\n1. Change DataCatalog package name in java from com.google.cloud.datacatalog to com.google.cloud.datacatalog.v1beta1 (API version is included in the package). *This is a breaking change.*\n\n2. Add API for Taxonomies (PolicyTagManager and PolicyTagManagerSerialization services).\n\n3. Minor changes to documentation.\n\nPiperOrigin-RevId: 280394936\n\nbc76ffd87360ce1cd34e3a6eac28afd5e1efda76\nUse rules_proto bzl files to load proto_library\n\nThis makes googleapis forward compatible with Bazel incompatible change https://github.com/bazelbuild/bazel/issues/8922.\n\nThis CL was created by adding @rules_proto to the WORKSPACE file and then running:\n\nfind . -name BUILD.bazel | \\\n while read build; do \\\n buildifier --lint=fix --warnings=load $build; \\\n done\n\nSince buildifier cannot be told not to reformat the BUILD file, some files are reformatted.\n\nPiperOrigin-RevId: 280356106\n\n218164b3deba1075979c9dca5f71461379e42dd1\nMake the `permissions` argument in TestIamPermissions required.\n\nPiperOrigin-RevId: 280279014\n\ndec8fd8ea5dc464496606189ba4b8949188639c8\nUpdating Cloud Billing Budget API documentation for clarity.\n\nPiperOrigin-RevId: 280225437\n\na667ffab90deb5e2669eb40ec7b61ec96a3d0454\nIntroduced detailed status message for CreateTimeSeries: CreateTimeSeriesSummary replaces CreateTimeSeriesError, which is now deprecated and unused.\n\nPiperOrigin-RevId: 280221707\n\nbe0a25eceec8916633447a37af0ecea801b85186\nMigrate Bigtable API to GAPIC v2 config.\n\nPiperOrigin-RevId: 280199643\n\n88bbf96b90089994ed16208a0f38cdd07f743742\nFix location of monitoring.yaml in Artman config for monitoring v3.\n\nPiperOrigin-RevId: 280134477\n\ndbaa01a20303758eed0c5a95ad2239ea306ad9a5\nUpdate namespace for PHP.\n\nPiperOrigin-RevId: 280085199\n\nf73b3796a635b2026a590d5133af7fa1f0eb807b\nStandardize pub/sub client default settings across clients:\n- Add retry codes for streaming pull\n- Decrease publish's max_rpc_timeout (mini-timeout) from 10 mins to 1 min\n- Decrease publish's total timeout from 10 mins to 1 min\n- Increase publish batching threshold from 10 to 100 elements\n- Increase publish batching size threshold from 1 KiB to 1 MiB\n\nPiperOrigin-RevId: 280044012\n\n822172613e1d93bede3beaf78b123c42a5876e2b\nReplace local_repository with http_archive in WORKSPACE\n\nPiperOrigin-RevId: 280039052\n\n6a8c7914d1b79bd832b5157a09a9332e8cbd16d4\nAdded notification_supported_by_agent to indicate whether the agent is sending notifications to Google or not.\n\nPiperOrigin-RevId: 279991530\n\n675de3dc9ab98cc1cf54216ad58c933ede54e915\nAdd an endpoint_urls field to the instance admin proto and adds a field_mask field to the GetInstanceRequest.\n\nPiperOrigin-RevId: 279982263\n\n" + "sha": "052b274138fce2be80f97b6dcb83ab343c7c8812", + "internalRef": "293693115", + "log": "052b274138fce2be80f97b6dcb83ab343c7c8812\nAdd source field for user event and add field behavior annotations\n\nPiperOrigin-RevId: 293693115\n\n" } }, { diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index 0d477f5f179d..d5a5f9e1343d 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -36,7 +36,7 @@ # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(unit_cov_level=97, cov_level=100) -s.move(templated_files) +templated_files = common.py_library(unit_cov_level=97, cov_level=99, system_test_dependencies=["test_utils"]) +s.move(templated_files, excludes="noxfile.py") s.shell.run(["nox", "-s", "blacken"], hide_output=False) From 65c802e256781fdff3188da836909096f29d2fb7 Mon Sep 17 00:00:00 2001 From: Chris Wilcox Date: Fri, 7 Feb 2020 00:22:45 +0000 Subject: [PATCH 254/611] fix: add back doctest doc dep --- packages/google-cloud-datastore/docs/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index 19a56853f0b1..bb3a9142b77c 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -33,6 +33,7 @@ extensions = [ "sphinx.ext.autodoc", "sphinx.ext.autosummary", + "sphinx.ext.doctest", "sphinx.ext.intersphinx", "sphinx.ext.coverage", "sphinx.ext.napoleon", From 34dc22426c642727a420f46e1ef3fcb1548fdae3 Mon Sep 17 00:00:00 2001 From: Chris Wilcox Date: Fri, 7 Feb 2020 00:37:04 +0000 Subject: [PATCH 255/611] fix: synth updates for overwriting conf.py --- packages/google-cloud-datastore/noxfile.py | 2 +- packages/google-cloud-datastore/synth.metadata | 7 +++---- packages/google-cloud-datastore/synth.py | 2 +- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index b090f01fc853..d8dcdf9378b6 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -112,8 +112,8 @@ def system(session): # virtualenv's dist-packages. session.install("mock", "pytest") - session.install("-e", ".") session.install("-e", "test_utils") + session.install("-e", ".") # Run py.test against the system tests. if system_test_exists: diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 4975d919a966..8213e8f2e626 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,5 +1,5 @@ { - "updateTime": "2020-02-06T23:50:52.082564Z", + "updateTime": "2020-02-07T00:36:36.462894Z", "sources": [ { "generator": { @@ -12,9 +12,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "052b274138fce2be80f97b6dcb83ab343c7c8812", - "internalRef": "293693115", - "log": "052b274138fce2be80f97b6dcb83ab343c7c8812\nAdd source field for user event and add field behavior annotations\n\nPiperOrigin-RevId: 293693115\n\n" + "sha": "68477017c4173c98addac0373950c6aa9d7b375f", + "internalRef": "293703548" } }, { diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index d5a5f9e1343d..0ccb57854645 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -37,6 +37,6 @@ # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library(unit_cov_level=97, cov_level=99, system_test_dependencies=["test_utils"]) -s.move(templated_files, excludes="noxfile.py") +s.move(templated_files, excludes=["docs/conf.py"]) s.shell.run(["nox", "-s", "blacken"], hide_output=False) From 686cfc0d452c33b0440e0e96d7dc618e9caa14e9 Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Fri, 28 Feb 2020 03:30:43 +0530 Subject: [PATCH 256/611] feat(datastore): add return query object in add filter method (#12) --- .../google/cloud/datastore/query.py | 12 ++++++++---- .../google-cloud-datastore/tests/unit/test_query.py | 8 ++++++++ 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index f7979d12be70..4b3daa66dfb0 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -220,8 +220,8 @@ def add_filter(self, property_name, operator, value): >>> from google.cloud import datastore >>> client = datastore.Client() >>> query = client.query(kind='Person') - >>> query.add_filter('name', '=', 'James') - >>> query.add_filter('age', '>', 50) + >>> query = query.add_filter('name', '=', 'James') + >>> query = query.add_filter('age', '>', 50) :type property_name: str :param property_name: A property name. @@ -235,6 +235,9 @@ def add_filter(self, property_name, operator, value): :class:`google.cloud.datastore.key.Key` :param value: The value to filter on. + :rtype: :class:`~google.cloud.datastore.query.Query` + :returns: A query object. + :raises: :class:`ValueError` if ``operation`` is not one of the specified values, or if a filter names ``'__key__'`` but passes an invalid value (a key is required). @@ -248,6 +251,7 @@ def add_filter(self, property_name, operator, value): raise ValueError('Invalid key: "%s"' % value) self._filters.append((property_name, operator, value)) + return self @property def projection(self): @@ -348,8 +352,8 @@ def fetch( >>> from google.cloud import datastore >>> client = datastore.Client() >>> query = client.query(kind='Person') - >>> query.add_filter('name', '=', 'Sally') - >>> list(query.fetch()) + >>> result = query.add_filter('name', '=', 'Sally').fetch() + >>> list(result) [, , ...] >>> list(query.fetch(1)) [] diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 01a005f4eb78..ed6cbc9d9405 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -214,6 +214,14 @@ def test_add_filter___key__valid_key(self): query.add_filter("__key__", "=", key) self.assertEqual(query.filters, [("__key__", "=", key)]) + def test_add_filter_return_query_obj(self): + from google.cloud.datastore.query import Query + + query = self._make_one(self._make_client()) + query_obj = query.add_filter("firstname", "=", u"John") + self.assertIsInstance(query_obj, Query) + self.assertEqual(query_obj.filters, [("firstname", "=", u"John")]) + def test_filter___key__not_equal_operator(self): from google.cloud.datastore.key import Key From d4a795794d929f54a62aaf8bebd911f91f273b5b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 27 Feb 2020 22:08:07 +0000 Subject: [PATCH 257/611] chore: release 1.11.0 (#17) :robot: I have created a release \*beep\* \*boop\* --- ## [1.11.0](https://www.github.com/googleapis/python-datastore/compare/v1.10.0...v1.11.0) (2020-02-27) ### Features * **datastore:** add return query object in add filter method ([#12](https://www.github.com/googleapis/python-datastore/issues/12)) ([6a9efab](https://www.github.com/googleapis/python-datastore/commit/6a9efabe1560d5137986df70f1b4f79731deac02)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). --- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ packages/google-cloud-datastore/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 2dc7b6c7cc86..da207f8a7fe8 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [1.11.0](https://www.github.com/googleapis/python-datastore/compare/v1.10.0...v1.11.0) (2020-02-27) + + +### Features + +* **datastore:** add return query object in add filter method ([#12](https://www.github.com/googleapis/python-datastore/issues/12)) ([6a9efab](https://www.github.com/googleapis/python-datastore/commit/6a9efabe1560d5137986df70f1b4f79731deac02)) + ## 1.10.0 10-10-2019 12:20 PDT diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index ffe2fd19c539..fca25a5abbec 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-datastore" description = "Google Cloud Datastore API client library" -version = "1.10.0" +version = "1.11.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From dd3bc2b80f00bb515e22570652614360a7704043 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Mon, 6 Apr 2020 17:54:52 -0700 Subject: [PATCH 258/611] fix: Address queries not fully satisfying requested offset (#18) --- .../google/cloud/datastore/query.py | 16 +++++- .../tests/system/test_system.py | 57 +++++++++++++++++++ .../tests/system/utils/populate_datastore.py | 55 ++++++++++++++++++ 3 files changed, 127 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 4b3daa66dfb0..78a153cb3247 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -498,7 +498,6 @@ def _process_query_results(self, response_pb): :raises ValueError: If ``more_results`` is an unexpected value. """ self._skipped_results = response_pb.batch.skipped_results - if response_pb.batch.more_results == _NO_MORE_RESULTS: self.next_page_token = None else: @@ -540,6 +539,21 @@ def _next_page(self): response_pb = self.client._datastore_api.run_query( self._query.project, partition_id, read_options, query=query_pb ) + + while ( + response_pb.batch.more_results == _NOT_FINISHED + and response_pb.batch.skipped_results < query_pb.offset + ): + # We haven't finished processing. A likely reason is we haven't + # skipped all of the results yet. Don't return any results. + # Instead, rerun query, adjusting offsets. Datastore doesn't process + # more than 1000 skipped results in a query. + query_pb.start_cursor = response_pb.batch.skipped_cursor + query_pb.offset -= response_pb.batch.skipped_results + response_pb = self.client._datastore_api.run_query( + self._query.project, partition_id, read_options, query=query_pb + ) + entity_pbs = self._process_query_results(response_pb) return page_iterator.Page(self, entity_pbs, self.item_to_value) diff --git a/packages/google-cloud-datastore/tests/system/test_system.py b/packages/google-cloud-datastore/tests/system/test_system.py index 482b0b80ba7d..ef0de3a2c9b0 100644 --- a/packages/google-cloud-datastore/tests/system/test_system.py +++ b/packages/google-cloud-datastore/tests/system/test_system.py @@ -14,6 +14,7 @@ import datetime import os +import string import unittest import requests @@ -465,6 +466,62 @@ def test_query_distinct_on(self): self.assertEqual(entities[1]["name"], "Arya") +class TestDatastoreQueryOffsets(TestDatastore): + TOTAL_OBJECTS = 2500 + NAMESPACE = "LargeCharacterEntity" + KIND = "LargeCharacter" + + @classmethod + def setUpClass(cls): + cls.CLIENT = clone_client(Config.CLIENT) + # Remove the namespace from the cloned client, since these + # query tests rely on the entities to be already stored + # cls.CLIENT.namespace = cls.NAMESPACE + cls.CLIENT.namespace = None + + # Populating the datastore if necessary. + populate_datastore.add_large_character_entities(client=cls.CLIENT) + + @classmethod + def tearDownClass(cls): + # In the emulator, destroy the query entities. + if os.getenv(GCD_DATASET) is not None: + # Use the client for this test instead of the global. + clear_datastore.remove_all_entities(client=cls.CLIENT) + + def _base_query(self): + # Use the client for this test instead of the global. + return self.CLIENT.query(kind=self.KIND, namespace=self.NAMESPACE) + + def _verify(self, limit, offset, expected): + # Query used for all tests + page_query = self._base_query() + page_query.add_filter("family", "=", "Stark") + page_query.add_filter("alive", "=", False) + + iterator = page_query.fetch(limit=limit, offset=offset) + entities = [e for e in iterator] + self.assertEqual(len(entities), expected) + + def test_query_in_bounds_offsets(self): + # Verify that with no offset there are the correct # of results + self._verify(limit=None, offset=None, expected=self.TOTAL_OBJECTS) + + # Verify that with no limit there are results (offset provided)") + self._verify(limit=None, offset=900, expected=self.TOTAL_OBJECTS - 900) + + # Offset beyond items larger Verify 200 items found") + self._verify(limit=200, offset=1100, expected=200) + + def test_query_partially_out_of_bounds_offsets(self): + # Offset within range, expect 50 despite larger limit") + self._verify(limit=100, offset=self.TOTAL_OBJECTS - 50, expected=50) + + def test_query_out_of_bounds_offsets(self): + # Offset beyond items larger Verify no items found") + self._verify(limit=200, offset=self.TOTAL_OBJECTS + 1000, expected=0) + + class TestDatastoreTransaction(TestDatastore): def test_transaction_via_with_statement(self): entity = datastore.Entity(key=Config.CLIENT.key("Company", "Google")) diff --git a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py index 2c266a8ac4bc..e2baa5b33f34 100644 --- a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py @@ -18,6 +18,7 @@ from __future__ import print_function import os +import string import sys import time import uuid @@ -62,6 +63,60 @@ def print_func(message): print(message) +def add_large_character_entities(client=None): + TOTAL_OBJECTS = 2500 + NAMESPACE = "LargeCharacterEntity" + KIND = "LargeCharacter" + MAX_STRING = (string.ascii_lowercase * 58)[:1500] + + client.namespace = NAMESPACE + + # Query used for all tests + page_query = client.query(kind=KIND, namespace=NAMESPACE) + + def put_objects(count): + remaining = count + current = 0 + + # Can only do 500 operations in a transaction with an overall + # size limit. + ENTITIES_TO_BATCH = 25 + while current < count: + start = current + end = min(current + ENTITIES_TO_BATCH, count) + with client.transaction() as xact: + # The name/ID for the new entity + for i in range(start, end): + name = "character{0:05d}".format(i) + # The Cloud Datastore key for the new entity + task_key = client.key(KIND, name) + + # Prepares the new entity + task = datastore.Entity(key=task_key) + task["name"] = "{0:05d}".format(i) + task["family"] = "Stark" + task["alive"] = False + + for i in string.ascii_lowercase: + task["space-{}".format(i)] = MAX_STRING + + # Saves the entity + xact.put(task) + current += ENTITIES_TO_BATCH + + # Ensure we have 1500 entities for tests. If not, clean up type and add + # new entities equal to TOTAL_OBJECTS + all_entities = [e for e in page_query.fetch()] + if len(all_entities) != TOTAL_OBJECTS: + # Cleanup Collection if not an exact match + while all_entities: + entities = all_entities[:500] + all_entities = all_entities[500:] + client.delete_multi([e.key for e in entities]) + # Put objects + put_objects(TOTAL_OBJECTS) + + def add_characters(client=None): if client is None: # Get a client that uses the test dataset. From 4de3290c2888462892f2b495d11c3748888766bb Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 7 Apr 2020 13:22:11 -0700 Subject: [PATCH 259/611] chore: docs build and lint fixes (#22) * chore: lint fixes * chore: pin sphinx to the 2.x versions --- packages/google-cloud-datastore/noxfile.py | 2 +- packages/google-cloud-datastore/tests/system/test_system.py | 1 - .../tests/system/utils/populate_datastore.py | 1 - 3 files changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index d8dcdf9378b6..ce4aea432478 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -140,7 +140,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/google-cloud-datastore/tests/system/test_system.py b/packages/google-cloud-datastore/tests/system/test_system.py index ef0de3a2c9b0..577bd748809d 100644 --- a/packages/google-cloud-datastore/tests/system/test_system.py +++ b/packages/google-cloud-datastore/tests/system/test_system.py @@ -14,7 +14,6 @@ import datetime import os -import string import unittest import requests diff --git a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py index e2baa5b33f34..e8e1574aa841 100644 --- a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py @@ -75,7 +75,6 @@ def add_large_character_entities(client=None): page_query = client.query(kind=KIND, namespace=NAMESPACE) def put_objects(count): - remaining = count current = 0 # Can only do 500 operations in a transaction with an overall From e9fa06663c801f2776fe2ec2886ea9e0ea8bbba9 Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Wed, 8 Apr 2020 02:33:09 +0530 Subject: [PATCH 260/611] feat(datastore): add missing method for system test with emulator (#19) Co-authored-by: Christopher Wilcox --- .../tests/system/utils/clear_datastore.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py index 03f5ffddbe10..3438ff895e26 100644 --- a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py @@ -80,6 +80,13 @@ def remove_kind(kind, client): delete_chunks(client, results) +def remove_all_entities(client): + query = client.query() + results = list(query.fetch()) + keys = [entity.key for entity in results] + client.delete_multi(keys) + + def main(): client = datastore.Client() kinds = sys.argv[1:] From 8e8ee2be1c2a5fbeaf18981e0720913adb29d381 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 7 Apr 2020 14:27:08 -0700 Subject: [PATCH 261/611] chore: release 1.12.0 (#23) * updated CHANGELOG.md [ci skip] * updated setup.cfg [ci skip] * updated setup.py [ci skip] Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 12 ++++++++++++ packages/google-cloud-datastore/setup.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index da207f8a7fe8..83593e298d1a 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [1.12.0](https://www.github.com/googleapis/python-datastore/compare/v1.11.0...v1.12.0) (2020-04-07) + + +### Features + +* **datastore:** add missing method for system test with emulator ([#19](https://www.github.com/googleapis/python-datastore/issues/19)) ([bf8b897](https://www.github.com/googleapis/python-datastore/commit/bf8b897dc86e28e4ad79e05f24383c1387eddbf6)) + + +### Bug Fixes + +* Address queries not fully satisfying requested offset ([#18](https://www.github.com/googleapis/python-datastore/issues/18)) ([e7b5fc9](https://www.github.com/googleapis/python-datastore/commit/e7b5fc99e91078e94d1eaab64e1ea2158220ae98)) + ## [1.11.0](https://www.github.com/googleapis/python-datastore/compare/v1.10.0...v1.11.0) (2020-02-27) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index fca25a5abbec..a20a1dd63005 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-datastore" description = "Google Cloud Datastore API client library" -version = "1.11.0" +version = "1.12.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 06dcb5dcf3e29a94d5c38eba8e30fe3ce404df37 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 28 Apr 2020 18:12:44 -0700 Subject: [PATCH 262/611] chore: update templates, remove test_utils dir (via synth) (#16) --- packages/google-cloud-datastore/.coveragerc | 16 ++ packages/google-cloud-datastore/.flake8 | 16 ++ .../.github/ISSUE_TEMPLATE/bug_report.md | 3 +- .../google-cloud-datastore/CONTRIBUTING.rst | 15 +- packages/google-cloud-datastore/MANIFEST.in | 16 ++ packages/google-cloud-datastore/noxfile.py | 4 +- packages/google-cloud-datastore/setup.cfg | 16 ++ .../google-cloud-datastore/synth.metadata | 24 +- packages/google-cloud-datastore/synth.py | 2 +- .../test_utils/credentials.json.enc | 49 ---- .../scripts/circleci/get_tagged_package.py | 64 ----- .../scripts/circleci/twine_upload.sh | 36 --- .../test_utils/scripts/get_target_packages.py | 268 ------------------ .../scripts/get_target_packages_kokoro.py | 98 ------- .../test_utils/scripts/run_emulator.py | 199 ------------- .../test_utils/scripts/update_docs.sh | 93 ------ .../test_utils/setup.py | 64 ----- .../test_utils/test_utils/__init__.py | 0 .../test_utils/test_utils/imports.py | 38 --- .../test_utils/test_utils/retry.py | 207 -------------- .../test_utils/test_utils/system.py | 81 ------ .../test_utils/test_utils/vpcsc_config.py | 118 -------- 22 files changed, 85 insertions(+), 1342 deletions(-) delete mode 100644 packages/google-cloud-datastore/test_utils/credentials.json.enc delete mode 100644 packages/google-cloud-datastore/test_utils/scripts/circleci/get_tagged_package.py delete mode 100755 packages/google-cloud-datastore/test_utils/scripts/circleci/twine_upload.sh delete mode 100644 packages/google-cloud-datastore/test_utils/scripts/get_target_packages.py delete mode 100644 packages/google-cloud-datastore/test_utils/scripts/get_target_packages_kokoro.py delete mode 100644 packages/google-cloud-datastore/test_utils/scripts/run_emulator.py delete mode 100755 packages/google-cloud-datastore/test_utils/scripts/update_docs.sh delete mode 100644 packages/google-cloud-datastore/test_utils/setup.py delete mode 100644 packages/google-cloud-datastore/test_utils/test_utils/__init__.py delete mode 100644 packages/google-cloud-datastore/test_utils/test_utils/imports.py delete mode 100644 packages/google-cloud-datastore/test_utils/test_utils/retry.py delete mode 100644 packages/google-cloud-datastore/test_utils/test_utils/system.py delete mode 100644 packages/google-cloud-datastore/test_utils/test_utils/vpcsc_config.py diff --git a/packages/google-cloud-datastore/.coveragerc b/packages/google-cloud-datastore/.coveragerc index b178b094aa1d..dd39c8546c41 100644 --- a/packages/google-cloud-datastore/.coveragerc +++ b/packages/google-cloud-datastore/.coveragerc @@ -1,3 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! [run] branch = True diff --git a/packages/google-cloud-datastore/.flake8 b/packages/google-cloud-datastore/.flake8 index 0268ecc9c55c..20fe9bda2ee4 100644 --- a/packages/google-cloud-datastore/.flake8 +++ b/packages/google-cloud-datastore/.flake8 @@ -1,3 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! [flake8] ignore = E203, E266, E501, W503 diff --git a/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/bug_report.md b/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/bug_report.md index e00382ac1ab0..02714dfe78e5 100644 --- a/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/bug_report.md +++ b/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/bug_report.md @@ -11,8 +11,7 @@ Thanks for stopping by to let us know something could be better! Please run down the following list and make sure you've tried the usual "quick fixes": - Search the issues already opened: https://github.com/googleapis/python-datastore/issues - - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-python - - Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+python + - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python If you are still having issues, please be sure to include as much information as possible: diff --git a/packages/google-cloud-datastore/CONTRIBUTING.rst b/packages/google-cloud-datastore/CONTRIBUTING.rst index b6326a58b484..653ac4341f8f 100644 --- a/packages/google-cloud-datastore/CONTRIBUTING.rst +++ b/packages/google-cloud-datastore/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: 2.7, - 3.5, 3.6, and 3.7 on both UNIX and Windows. + 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -214,26 +214,18 @@ We support: - `Python 3.5`_ - `Python 3.6`_ - `Python 3.7`_ +- `Python 3.8`_ .. _Python 3.5: https://docs.python.org/3.5/ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-datastore/blob/master/noxfile.py -We explicitly decided not to support `Python 2.5`_ due to `decreased usage`_ -and lack of continuous integration `support`_. - -.. _Python 2.5: https://docs.python.org/2.5/ -.. _decreased usage: https://caremad.io/2013/10/a-look-at-pypi-downloads/ -.. _support: https://blog.travis-ci.com/2013-11-18-upcoming-build-environment-updates/ - -We have `dropped 2.6`_ as a supported version as well since Python 2.6 is no -longer supported by the core development team. - Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. We also explicitly decided to support Python 3 beginning with version @@ -247,7 +239,6 @@ We also explicitly decided to support Python 3 beginning with version .. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django .. _projects: http://flask.pocoo.org/docs/0.10/python3/ .. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ -.. _dropped 2.6: https://github.com/googleapis/google-cloud-python/issues/995 ********** Versioning diff --git a/packages/google-cloud-datastore/MANIFEST.in b/packages/google-cloud-datastore/MANIFEST.in index cd011be27a0e..68855abc3f02 100644 --- a/packages/google-cloud-datastore/MANIFEST.in +++ b/packages/google-cloud-datastore/MANIFEST.in @@ -1,3 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE recursive-include google *.json *.proto diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index ce4aea432478..1e43b20eb9e2 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -110,9 +110,7 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install("mock", "pytest") - - session.install("-e", "test_utils") + session.install("mock", "pytest", "google-cloud-testutils") session.install("-e", ".") # Run py.test against the system tests. diff --git a/packages/google-cloud-datastore/setup.cfg b/packages/google-cloud-datastore/setup.cfg index 3bd555500e37..c3a2b39f6528 100644 --- a/packages/google-cloud-datastore/setup.cfg +++ b/packages/google-cloud-datastore/setup.cfg @@ -1,3 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! [bdist_wheel] universal = 1 diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 8213e8f2e626..58b5cedc04b6 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,26 +1,32 @@ { - "updateTime": "2020-02-07T00:36:36.462894Z", "sources": [ { "generator": { "name": "artman", - "version": "0.44.4", - "dockerImage": "googleapis/artman@sha256:19e945954fc960a4bdfee6cb34695898ab21a8cf0bac063ee39b91f00a1faec8" + "version": "2.0.0", + "dockerImage": "googleapis/artman@sha256:b3b47805231a305d0f40c4bf069df20f6a2635574e6d4259fac651d3f9f6e098" + } + }, + { + "git": { + "name": ".", + "remote": "git@github.com:googleapis/python-datastore", + "sha": "f9c0937f24f3a9874db6a8710e260c6ce2907069" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "68477017c4173c98addac0373950c6aa9d7b375f", - "internalRef": "293703548" + "sha": "756b174de4a122461993c1c583345533d819936d", + "internalRef": "308824110" } }, { - "template": { - "name": "python_split_library", - "origin": "synthtool.gcp", - "version": "2020.2.4" + "git": { + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "01b6f23d24b27878b48667ce597876d66b59780e" } } ], diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index 0ccb57854645..ff84827c949a 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -36,7 +36,7 @@ # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(unit_cov_level=97, cov_level=99, system_test_dependencies=["test_utils"]) +templated_files = common.py_library(unit_cov_level=97, cov_level=99) s.move(templated_files, excludes=["docs/conf.py"]) s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/packages/google-cloud-datastore/test_utils/credentials.json.enc b/packages/google-cloud-datastore/test_utils/credentials.json.enc deleted file mode 100644 index f073c7e4f774..000000000000 --- a/packages/google-cloud-datastore/test_utils/credentials.json.enc +++ /dev/null @@ -1,49 +0,0 @@ -U2FsdGVkX1/vVm/dOEg1DCACYbdOcL+ey6+64A+DZGZVgF8Z/3skK6rpPocu6GOA -UZAqASsBH9QifDf8cKVXQXVYpYq6HSv2O0w7vOmVorZO9GYPo98s9/8XO+4ty/AU -aB6TD68frBAYv4cT/l5m7aYdzfzMTy0EOXoleZT09JYP3B5FV3KCO114FzMXGwrj -HXsR6E5SyUUlUnWPC3eD3aqmovay0gxOKYO3ZwjFK1nlbN/8q6/8nwBCf/Bg6SHV -V93pNxdolRlJev9kgKz4RN1z4jGCy5PAndhSLE82NFIs9LoAiEOU5YeMlN+Ulqus -J92nh+ptUe9a4pJGbAuveUWO7zdS1QyXvTMUcmmSfXCNm/eIQjNuu5+rHtIjWKh8 -Ilwj2w1aTfSptQEhk/kwRgFz/d11vfwJzvwTmCxO6zyOeL0VUWLqdCBGgG5As9He -/RenF8PZ1O0WbTt7fns5oTlTk/MUo+0xJ1xqvu/y45LaqqcBAnEdrWKmtM3dJHWv -ufQku+kD+83F/VwBnQdvgMHu6KZEs6LRrNo58r4QuK6fS7VCACdzxID1RM2cL7kT -6BFRlyGj1aigmjne9g9M9Jx4R+mZDpPU1WDzzG71J4qCUwaX8Dfwutuv4uiFvzwq -NUF0wLJJPtKWmtW+hnZ/fhHQGCRsOpZzFnqp6Zv7J7k6esqxMgIjfal7Djk5Acy8 -j3iVvm6CYmKMVqzL62JHYS9Ye83tzBCaR8hpnJQKgH3FSOFY8HSwrtQSIsl/hSeF -41sgnz0Y+/gkzNeU18qFk+eCZmvljyu+JK0nPYUgpOCJYVBNQpNHz5PUyiAEKhtM -IOSdjPRW1Y+Xf4RroJnLPoF24Ijwrow5LCm9hBRY6TPPMMmnIXCd23xcLJ1rMj6g -x4ZikElans+cwuc9wtbb7w01DcpTwQ1+eIV1qV+KIgpnLjRGLhZD4etobBsrwYu/ -vnIwy2QHCKENPb8sbdgp7x2mF7VSX0/7tf+9+i70EBiMzpOKBkiZhtLzm6hOBkEy -ODaWrx4lTTwbSw8Rmtf58APhPFMsjHoNsjiUoK249Y8Y2Ff4fMfqYsXu6VC1n/At -CuWYHc3EfBwFcLJS+RQB9kFk/4FygFBWq4Kj0MqoRruLbKmoGeJKH9q35W0f0NCD -j+iHt3014kMGiuyJe1UDQ6fvEihFFdHuDivFpPAXDt4PTY/WtpDhaGMx23kb54pK -jkAuxpznAB1lK3u9bGRXDasGeHIrNtIlPvgkrWHXvoBVqM7zry8TGtoxp3E3I42Z -cUfDWfB9GqVdrOwvrTzyZsl2uShRkAJaZFZj5aMyYxiptp4gM8CwWiNtOd2EwtRO -LxZX4M02PQFIqXV3FSDA0q6EwglUrTZdAlYeOEkopaKCtG31dEPOSQG3NGJAEYso -Cxm99H7970dp0OAgpNSgRbcWDbhVbQXnRzvFGqLeH6a9dQ/a8uD3s8Qm9Du/kB6d -XxTRe2OGxzcD0AgI8GClE4rIZHCLbcwuJRp0EYcN+pgY80O4U98fZ5RYpU6OYbU/ -MEiaBYFKtZtGkV6AQD568V7hHJWqc5DDfVHUQ/aeQwnKi2vnU66u+nnV2rZxXxLP -+dqeLRpul+wKa5b/Z5SfQ14Ff8s7aVyxaogGpyggyPL1vyq4KWZ6Or/wEE5hgNO4 -kBh6ht0QT1Hti8XY2JK1M+Jgbjgcg4jkHBGVqegrG1Rvcc2A4TYKwx+QMSBhyxrU -5qhROjS4lTcC42hQslMUkUwc4U/Y91XdFbOOnaAkwzI36NRYL0pmgZnYxGJZeRvr -E5foOhnOEVSFGdOkLfFh+FkWZQf56Lmn8Gg2wHE3dZTxLHibiUYfkgOr1uEosq29 -D1NstvlJURPQ0Q+8QQNWcl9nEZHMAjOmnL1hbx+QfuC6seucp+sXGzdZByMLZbvT -tG8KNL293CmyQowgf9MXToWYnwRkcvqfTaKyor2Ggze3JtoFW4t0j4DI1XPciZFX -XmfApHrzdB/bZadzxyaZ2NE0CuH9zDelwI6rz38xsN5liYnp5qmNKVCZVOHccXa6 -J8x365m5/VaaA2RrtdPqKxn8VaKy7+T690QgMXVGM4PbzQzQxHuSleklocqlP+sB -jSMXCZY+ng/i4UmRO9noiyW3UThYh0hIdMYs12EmmI9cnF/OuYZpl30fmqwV+VNM -td5B2fYvAvvsjiX60SFCn3DATP1GrPMBlZSmhhP3GYS+xrWt3Xxta9qIX2BEF1Gg -twnZZRjoULSRFUYPfJPEOfEH2UQwm84wxx/GezVE+S/RpBlatPOgCiLnNNaLfdTC -mTG9qY9elJv3GGQO8Lqgf4i8blExs05lSPk1BDhzTB6H9TLz+Ge0/l1QxKf3gPXU -aImK1azieXMXHECkdKxrzmehwu1dZ/oYOLc/OFQCETwSRoLPFOFpYUpizwmVVHR6 -uLSfRptte4ZOU3zHfpd/0+J4tkwHwEkGzsmMdqudlm7qME6upuIplyVBH8JiXzUK -n1RIH/OPmVEluAnexWRLZNdk7MrakIO4XACVbICENiYQgAIErP568An6twWEGDbZ -bEN64E3cVDTDRPRAunIhhsEaapcxpFEPWlHorxv36nMUt0R0h0bJlCu5QdzckfcX -ZrRuu1kl76ZfbSE8T0G4/rBb9gsU4Gn3WyvLIO3MgFBuxR68ZwcR8LpEUd8qp38H -NG4cxPmN1nGKo663Z+xI2Gt5up4gpl+fOt4mXqxY386rB7yHaOfElMG5TUYdrS9w -1xbbCVgeJ6zxX+NFlndG33cSAPprhw+C18eUu6ZU63WZcYFo3GfK6rs3lvYtofvE -8DxztdTidQedNVNE+63YCjhxd/cZUI5n/UpgYkr9owp7hNGJiR3tdoNLR2gcoGqL -qWhH928k2aSgF2j97LZ2OqoPCp0tUB7ho4jD2u4Ik3GLVNlCc3dCvWRvpHtDTQDv -tujESMfHUc9I2r4S/PD3bku/ABGwa977Yp1PjzJGr9RajA5is5n6GVpyynwjtKG4 -iyyITpdwpCgr8pueTBLwZnas3slmiMOog/E4PmPgctHzvC+vhQijhUtw5zSsmv0l -bZlw/mVhp5Ta7dTcLBKR8DA3m3vTbaEGkz0xpfQr7GfiSMRbJyvIw88pDK0gyTMD diff --git a/packages/google-cloud-datastore/test_utils/scripts/circleci/get_tagged_package.py b/packages/google-cloud-datastore/test_utils/scripts/circleci/get_tagged_package.py deleted file mode 100644 index c148b9dc2370..000000000000 --- a/packages/google-cloud-datastore/test_utils/scripts/circleci/get_tagged_package.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Helper to determine package from tag. -Get the current package directory corresponding to the Circle Tag. -""" - -from __future__ import print_function - -import os -import re -import sys - - -TAG_RE = re.compile(r""" - ^ - (?P - (([a-z]+)[_-])*) # pkg-name-with-hyphens-or-underscores (empty allowed) - ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints) - $ -""", re.VERBOSE) -TAG_ENV = 'CIRCLE_TAG' -ERROR_MSG = '%s env. var. not set' % (TAG_ENV,) -BAD_TAG_MSG = 'Invalid tag name: %s. Expected pkg-name-x.y.z' -CIRCLE_CI_SCRIPTS_DIR = os.path.dirname(__file__) -ROOT_DIR = os.path.realpath( - os.path.join(CIRCLE_CI_SCRIPTS_DIR, '..', '..', '..')) - - -def main(): - """Get the current package directory. - Prints the package directory out so callers can consume it. - """ - if TAG_ENV not in os.environ: - print(ERROR_MSG, file=sys.stderr) - sys.exit(1) - - tag_name = os.environ[TAG_ENV] - match = TAG_RE.match(tag_name) - if match is None: - print(BAD_TAG_MSG % (tag_name,), file=sys.stderr) - sys.exit(1) - - pkg_name = match.group('pkg') - if pkg_name is None: - print(ROOT_DIR) - else: - pkg_dir = pkg_name.rstrip('-').replace('-', '_') - print(os.path.join(ROOT_DIR, pkg_dir)) - - -if __name__ == '__main__': - main() diff --git a/packages/google-cloud-datastore/test_utils/scripts/circleci/twine_upload.sh b/packages/google-cloud-datastore/test_utils/scripts/circleci/twine_upload.sh deleted file mode 100755 index 23a4738e90b9..000000000000 --- a/packages/google-cloud-datastore/test_utils/scripts/circleci/twine_upload.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash - -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -ev - -# If this is not a CircleCI tag, no-op. -if [[ -z "$CIRCLE_TAG" ]]; then - echo "This is not a release tag. Doing nothing." - exit 0 -fi - -# H/T: http://stackoverflow.com/a/246128/1068170 -SCRIPT="$(dirname "${BASH_SOURCE[0]}")/get_tagged_package.py" -# Determine the package directory being deploying on this tag. -PKG_DIR="$(python ${SCRIPT})" - -# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. -python3 -m pip install --upgrade twine wheel setuptools - -# Move into the package, build the distribution and upload. -cd ${PKG_DIR} -python3 setup.py sdist bdist_wheel -twine upload dist/* diff --git a/packages/google-cloud-datastore/test_utils/scripts/get_target_packages.py b/packages/google-cloud-datastore/test_utils/scripts/get_target_packages.py deleted file mode 100644 index 1d51830cc23a..000000000000 --- a/packages/google-cloud-datastore/test_utils/scripts/get_target_packages.py +++ /dev/null @@ -1,268 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Print a list of packages which require testing.""" - -import os -import re -import subprocess -import warnings - - -CURRENT_DIR = os.path.realpath(os.path.dirname(__file__)) -BASE_DIR = os.path.realpath(os.path.join(CURRENT_DIR, '..', '..')) -GITHUB_REPO = os.environ.get('GITHUB_REPO', 'google-cloud-python') -CI = os.environ.get('CI', '') -CI_BRANCH = os.environ.get('CIRCLE_BRANCH') -CI_PR = os.environ.get('CIRCLE_PR_NUMBER') -CIRCLE_TAG = os.environ.get('CIRCLE_TAG') -head_hash, head_name = subprocess.check_output(['git', 'show-ref', 'HEAD'] -).strip().decode('ascii').split() -rev_parse = subprocess.check_output( - ['git', 'rev-parse', '--abbrev-ref', 'HEAD'] -).strip().decode('ascii') -MAJOR_DIV = '#' * 78 -MINOR_DIV = '#' + '-' * 77 - -# NOTE: This reg-ex is copied from ``get_tagged_packages``. -TAG_RE = re.compile(r""" - ^ - (?P - (([a-z]+)-)*) # pkg-name-with-hyphens- (empty allowed) - ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints) - $ -""", re.VERBOSE) - -# This is the current set of dependencies by package. -# As of this writing, the only "real" dependency is that of error_reporting -# (on logging), the rest are just system test dependencies. -PKG_DEPENDENCIES = { - 'logging': {'pubsub'}, -} - - -def get_baseline(): - """Return the baseline commit. - - On a pull request, or on a branch, return the common parent revision - with the master branch. - - Locally, return a value pulled from environment variables, or None if - the environment variables are not set. - - On a push to master, return None. This will effectively cause everything - to be considered to be affected. - """ - - # If this is a pull request or branch, return the tip for master. - # We will test only packages which have changed since that point. - ci_non_master = (CI == 'true') and any([CI_BRANCH != 'master', CI_PR]) - - if ci_non_master: - - repo_url = 'git@github.com:GoogleCloudPlatform/{}'.format(GITHUB_REPO) - subprocess.run(['git', 'remote', 'add', 'baseline', repo_url], - stderr=subprocess.DEVNULL) - subprocess.run(['git', 'pull', 'baseline'], stderr=subprocess.DEVNULL) - - if CI_PR is None and CI_BRANCH is not None: - output = subprocess.check_output([ - 'git', 'merge-base', '--fork-point', - 'baseline/master', CI_BRANCH]) - return output.strip().decode('ascii') - - return 'baseline/master' - - # If environment variables are set identifying what the master tip is, - # use that. - if os.environ.get('GOOGLE_CLOUD_TESTING_REMOTE', ''): - remote = os.environ['GOOGLE_CLOUD_TESTING_REMOTE'] - branch = os.environ.get('GOOGLE_CLOUD_TESTING_BRANCH', 'master') - return '%s/%s' % (remote, branch) - - # If we are not in CI and we got this far, issue a warning. - if not CI: - warnings.warn('No baseline could be determined; this means tests ' - 'will run for every package. If this is local ' - 'development, set the $GOOGLE_CLOUD_TESTING_REMOTE ' - 'environment variable.') - - # That is all we can do; return None. - return None - - -def get_changed_files(): - """Return a list of files that have been changed since the baseline. - - If there is no base, return None. - """ - # Get the baseline, and fail quickly if there is no baseline. - baseline = get_baseline() - print('# Baseline commit: {}'.format(baseline)) - if not baseline: - return None - - # Return a list of altered files. - try: - return subprocess.check_output([ - 'git', 'diff', '--name-only', '{}..HEAD'.format(baseline), - ], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n') - except subprocess.CalledProcessError: - warnings.warn('Unable to perform git diff; falling back to assuming ' - 'all packages have changed.') - return None - - -def reverse_map(dict_of_sets): - """Reverse a map of one-to-many. - - So the map:: - - { - 'A': {'B', 'C'}, - 'B': {'C'}, - } - - becomes - - { - 'B': {'A'}, - 'C': {'A', 'B'}, - } - - Args: - dict_of_sets (dict[set]): A dictionary of sets, mapping - one value to many. - - Returns: - dict[set]: The reversed map. - """ - result = {} - for key, values in dict_of_sets.items(): - for value in values: - result.setdefault(value, set()).add(key) - - return result - -def get_changed_packages(file_list): - """Return a list of changed packages based on the provided file list. - - If the file list is None, then all packages should be considered to be - altered. - """ - # Determine a complete list of packages. - all_packages = set() - for file_ in os.listdir(BASE_DIR): - abs_file = os.path.realpath(os.path.join(BASE_DIR, file_)) - nox_file = os.path.join(abs_file, 'nox.py') - if os.path.isdir(abs_file) and os.path.isfile(nox_file): - all_packages.add(file_) - - # If ther is no file list, send down the full package set. - if file_list is None: - return all_packages - - # Create a set based on the list of changed files. - answer = set() - reverse_deps = reverse_map(PKG_DEPENDENCIES) - for file_ in file_list: - # Ignore root directory changes (setup.py, .gitignore, etc.). - if os.path.sep not in file_: - continue - - # Ignore changes that are not in a package (usually this will be docs). - package = file_.split(os.path.sep, 1)[0] - if package not in all_packages: - continue - - # If there is a change in core, short-circuit now and return - # everything. - if package in ('core',): - return all_packages - - # Add the package, as well as any dependencies this package has. - # NOTE: For now, dependencies only go down one level. - answer.add(package) - answer = answer.union(reverse_deps.get(package, set())) - - # We got this far without being short-circuited; return the final answer. - return answer - - -def get_tagged_package(): - """Return the package corresponding to the current tag. - - If there is not tag, will return :data:`None`. - """ - if CIRCLE_TAG is None: - return - - match = TAG_RE.match(CIRCLE_TAG) - if match is None: - return - - pkg_name = match.group('pkg') - if pkg_name == '': - # NOTE: This corresponds to the "umbrella" tag. - return - - return pkg_name.rstrip('-').replace('-', '_') - - -def get_target_packages(): - """Return a list of target packages to be run in the current build. - - If in a tag build, will run only the package(s) that are tagged, otherwise - will run the packages that have file changes in them (or packages that - depend on those). - """ - tagged_package = get_tagged_package() - if tagged_package is None: - file_list = get_changed_files() - print(MAJOR_DIV) - print('# Changed files:') - print(MINOR_DIV) - for file_ in file_list or (): - print('# {}'.format(file_)) - for package in sorted(get_changed_packages(file_list)): - yield package - else: - yield tagged_package - - -def main(): - print(MAJOR_DIV) - print('# Environment') - print(MINOR_DIV) - print('# CircleCI: {}'.format(CI)) - print('# CircleCI branch: {}'.format(CI_BRANCH)) - print('# CircleCI pr: {}'.format(CI_PR)) - print('# CircleCI tag: {}'.format(CIRCLE_TAG)) - print('# HEAD ref: {}'.format(head_hash)) - print('# {}'.format(head_name)) - print('# Git branch: {}'.format(rev_parse)) - print(MAJOR_DIV) - - packages = list(get_target_packages()) - - print(MAJOR_DIV) - print('# Target packages:') - print(MINOR_DIV) - for package in packages: - print(package) - print(MAJOR_DIV) - - -if __name__ == '__main__': - main() diff --git a/packages/google-cloud-datastore/test_utils/scripts/get_target_packages_kokoro.py b/packages/google-cloud-datastore/test_utils/scripts/get_target_packages_kokoro.py deleted file mode 100644 index 27d3a0c940ea..000000000000 --- a/packages/google-cloud-datastore/test_utils/scripts/get_target_packages_kokoro.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Print a list of packages which require testing.""" - -import pathlib -import subprocess - -import ci_diff_helper -import requests - - -def print_environment(environment): - print("-> CI environment:") - print('Branch', environment.branch) - print('PR', environment.pr) - print('In PR', environment.in_pr) - print('Repo URL', environment.repo_url) - if environment.in_pr: - print('PR Base', environment.base) - - -def get_base(environment): - if environment.in_pr: - return environment.base - else: - # If we're not in a PR, just calculate the changes between this commit - # and its parent. - return 'HEAD~1' - - -def get_changed_files_from_base(base): - return subprocess.check_output([ - 'git', 'diff', '--name-only', f'{base}..HEAD', - ], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n') - - -_URL_TEMPLATE = ( - 'https://api.github.com/repos/googleapis/google-cloud-python/pulls/' - '{}/files' -) - - -def get_changed_files_from_pr(pr): - url = _URL_TEMPLATE.format(pr) - while url is not None: - response = requests.get(url) - for info in response.json(): - yield info['filename'] - url = response.links.get('next', {}).get('url') - - -def determine_changed_packages(changed_files): - packages = [ - path.parent for path in pathlib.Path('.').glob('*/noxfile.py') - ] - - changed_packages = set() - for file in changed_files: - file = pathlib.Path(file) - for package in packages: - if package in file.parents: - changed_packages.add(package) - - return changed_packages - - -def main(): - environment = ci_diff_helper.get_config() - print_environment(environment) - base = get_base(environment) - - if environment.in_pr: - changed_files = list(get_changed_files_from_pr(environment.pr)) - else: - changed_files = get_changed_files_from_base(base) - - packages = determine_changed_packages(changed_files) - - print(f"Comparing against {base}.") - print("-> Changed packages:") - - for package in packages: - print(package) - - -main() diff --git a/packages/google-cloud-datastore/test_utils/scripts/run_emulator.py b/packages/google-cloud-datastore/test_utils/scripts/run_emulator.py deleted file mode 100644 index 287b08640691..000000000000 --- a/packages/google-cloud-datastore/test_utils/scripts/run_emulator.py +++ /dev/null @@ -1,199 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Run system tests locally with the emulator. - -First makes system calls to spawn the emulator and get the local environment -variable needed for it. Then calls the system tests. -""" - - -import argparse -import os -import subprocess - -import psutil - -from google.cloud.environment_vars import BIGTABLE_EMULATOR -from google.cloud.environment_vars import GCD_DATASET -from google.cloud.environment_vars import GCD_HOST -from google.cloud.environment_vars import PUBSUB_EMULATOR -from run_system_test import run_module_tests - - -BIGTABLE = 'bigtable' -DATASTORE = 'datastore' -PUBSUB = 'pubsub' -PACKAGE_INFO = { - BIGTABLE: (BIGTABLE_EMULATOR,), - DATASTORE: (GCD_DATASET, GCD_HOST), - PUBSUB: (PUBSUB_EMULATOR,), -} -EXTRA = { - DATASTORE: ('--no-legacy',), -} -_DS_READY_LINE = '[datastore] Dev App Server is now running.\n' -_PS_READY_LINE_PREFIX = '[pubsub] INFO: Server started, listening on ' -_BT_READY_LINE_PREFIX = '[bigtable] Cloud Bigtable emulator running on ' - - -def get_parser(): - """Get simple ``argparse`` parser to determine package. - - :rtype: :class:`argparse.ArgumentParser` - :returns: The parser for this script. - """ - parser = argparse.ArgumentParser( - description='Run google-cloud system tests against local emulator.') - parser.add_argument('--package', dest='package', - choices=sorted(PACKAGE_INFO.keys()), - default=DATASTORE, help='Package to be tested.') - return parser - - -def get_start_command(package): - """Get command line arguments for starting emulator. - - :type package: str - :param package: The package to start an emulator for. - - :rtype: tuple - :returns: The arguments to be used, in a tuple. - """ - result = ('gcloud', 'beta', 'emulators', package, 'start') - extra = EXTRA.get(package, ()) - return result + extra - - -def get_env_init_command(package): - """Get command line arguments for getting emulator env. info. - - :type package: str - :param package: The package to get environment info for. - - :rtype: tuple - :returns: The arguments to be used, in a tuple. - """ - result = ('gcloud', 'beta', 'emulators', package, 'env-init') - extra = EXTRA.get(package, ()) - return result + extra - - -def datastore_wait_ready(popen): - """Wait until the datastore emulator is ready to use. - - :type popen: :class:`subprocess.Popen` - :param popen: An open subprocess to interact with. - """ - emulator_ready = False - while not emulator_ready: - emulator_ready = popen.stderr.readline() == _DS_READY_LINE - - -def wait_ready_prefix(popen, prefix): - """Wait until the a process encounters a line with matching prefix. - - :type popen: :class:`subprocess.Popen` - :param popen: An open subprocess to interact with. - - :type prefix: str - :param prefix: The prefix to match - """ - emulator_ready = False - while not emulator_ready: - emulator_ready = popen.stderr.readline().startswith(prefix) - - -def wait_ready(package, popen): - """Wait until the emulator is ready to use. - - :type package: str - :param package: The package to check if ready. - - :type popen: :class:`subprocess.Popen` - :param popen: An open subprocess to interact with. - - :raises: :class:`KeyError` if the ``package`` is not among - ``datastore``, ``pubsub`` or ``bigtable``. - """ - if package == DATASTORE: - datastore_wait_ready(popen) - elif package == PUBSUB: - wait_ready_prefix(popen, _PS_READY_LINE_PREFIX) - elif package == BIGTABLE: - wait_ready_prefix(popen, _BT_READY_LINE_PREFIX) - else: - raise KeyError('Package not supported', package) - - -def cleanup(pid): - """Cleanup a process (including all of its children). - - :type pid: int - :param pid: Process ID. - """ - proc = psutil.Process(pid) - for child_proc in proc.children(recursive=True): - try: - child_proc.kill() - child_proc.terminate() - except psutil.NoSuchProcess: - pass - proc.terminate() - proc.kill() - - -def run_tests_in_emulator(package): - """Spawn an emulator instance and run the system tests. - - :type package: str - :param package: The package to run system tests against. - """ - # Make sure this package has environment vars to replace. - env_vars = PACKAGE_INFO[package] - - start_command = get_start_command(package) - # Ignore stdin and stdout, don't pollute the user's output with them. - proc_start = subprocess.Popen(start_command, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - try: - wait_ready(package, proc_start) - env_init_command = get_env_init_command(package) - proc_env = subprocess.Popen(env_init_command, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - env_status = proc_env.wait() - if env_status != 0: - raise RuntimeError(env_status, proc_env.stderr.read()) - env_lines = proc_env.stdout.read().strip().split('\n') - # Set environment variables before running the system tests. - for env_var in env_vars: - line_prefix = 'export ' + env_var + '=' - value, = [line.split(line_prefix, 1)[1] for line in env_lines - if line.startswith(line_prefix)] - os.environ[env_var] = value - run_module_tests(package, - ignore_requirements=True) - finally: - cleanup(proc_start.pid) - - -def main(): - """Main method to run this script.""" - parser = get_parser() - args = parser.parse_args() - run_tests_in_emulator(args.package) - - -if __name__ == '__main__': - main() diff --git a/packages/google-cloud-datastore/test_utils/scripts/update_docs.sh b/packages/google-cloud-datastore/test_utils/scripts/update_docs.sh deleted file mode 100755 index 8cbab9f0dad0..000000000000 --- a/packages/google-cloud-datastore/test_utils/scripts/update_docs.sh +++ /dev/null @@ -1,93 +0,0 @@ -#!/bin/bash - -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -ev - -GH_OWNER='GoogleCloudPlatform' -GH_PROJECT_NAME='google-cloud-python' - -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" - -# Function to build the docs. -function build_docs { - rm -rf docs/_build/ - rm -f docs/bigquery/generated/*.rst - # -W -> warnings as errors - # -T -> show full traceback on exception - # -N -> no color - sphinx-build \ - -W -T -N \ - -b html \ - -d docs/_build/doctrees \ - docs/ \ - docs/_build/html/ - return $? -} - -# Only update docs if we are on CircleCI. -if [[ "${CIRCLE_BRANCH}" == "master" ]] && [[ -z "${CIRCLE_PR_NUMBER}" ]]; then - echo "Building new docs on a merged commit." -elif [[ "$1" == "kokoro" ]]; then - echo "Building and publishing docs on Kokoro." -elif [[ -n "${CIRCLE_TAG}" ]]; then - echo "Building new docs on a tag (but will not deploy)." - build_docs - exit $? -else - echo "Not on master nor a release tag." - echo "Building new docs for testing purposes, but not deploying." - build_docs - exit $? -fi - -# Adding GitHub pages branch. `git submodule add` checks it -# out at HEAD. -GH_PAGES_DIR='ghpages' -git submodule add -q -b gh-pages \ - "git@github.com:${GH_OWNER}/${GH_PROJECT_NAME}" ${GH_PAGES_DIR} - -# Determine if we are building a new tag or are building docs -# for master. Then build new docs in docs/_build from master. -if [[ -n "${CIRCLE_TAG}" ]]; then - # Sphinx will use the package version by default. - build_docs -else - SPHINX_RELEASE=$(git log -1 --pretty=%h) build_docs -fi - -# Update gh-pages with the created docs. -cd ${GH_PAGES_DIR} -git rm -fr latest/ -cp -R ../docs/_build/html/ latest/ - -# Update the files push to gh-pages. -git add . -git status - -# If there are no changes, just exit cleanly. -if [[ -z "$(git status --porcelain)" ]]; then - echo "Nothing to commit. Exiting without pushing changes." - exit -fi - -# Commit to gh-pages branch to apply changes. -git config --global user.email "dpebot@google.com" -git config --global user.name "dpebot" -git commit -m "Update docs after merge to master." - -# NOTE: This may fail if two docs updates (on merges to master) -# happen in close proximity. -git push -q origin HEAD:gh-pages diff --git a/packages/google-cloud-datastore/test_utils/setup.py b/packages/google-cloud-datastore/test_utils/setup.py deleted file mode 100644 index 8e9222a7f862..000000000000 --- a/packages/google-cloud-datastore/test_utils/setup.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -from setuptools import find_packages -from setuptools import setup - - -PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) - - -# NOTE: This is duplicated throughout and we should try to -# consolidate. -SETUP_BASE = { - 'author': 'Google Cloud Platform', - 'author_email': 'googleapis-publisher@google.com', - 'scripts': [], - 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', - 'license': 'Apache 2.0', - 'platforms': 'Posix; MacOS X; Windows', - 'include_package_data': True, - 'zip_safe': False, - 'classifiers': [ - 'Development Status :: 4 - Beta', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Topic :: Internet', - ], -} - - -REQUIREMENTS = [ - 'google-auth >= 0.4.0', - 'six', -] - -setup( - name='google-cloud-testutils', - version='0.24.0', - description='System test utilities for google-cloud-python', - packages=find_packages(), - install_requires=REQUIREMENTS, - python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*', - **SETUP_BASE -) diff --git a/packages/google-cloud-datastore/test_utils/test_utils/__init__.py b/packages/google-cloud-datastore/test_utils/test_utils/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-datastore/test_utils/test_utils/imports.py b/packages/google-cloud-datastore/test_utils/test_utils/imports.py deleted file mode 100644 index 5991af7fc465..000000000000 --- a/packages/google-cloud-datastore/test_utils/test_utils/imports.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import mock -import six - - -def maybe_fail_import(predicate): - """Create and return a patcher that conditionally makes an import fail. - - Args: - predicate (Callable[[...], bool]): A callable that, if it returns `True`, - triggers an `ImportError`. It must accept the same arguments as the - built-in `__import__` function. - https://docs.python.org/3/library/functions.html#__import__ - - Returns: - A mock patcher object that can be used to enable patched import behavior. - """ - orig_import = six.moves.builtins.__import__ - - def custom_import(name, globals=None, locals=None, fromlist=(), level=0): - if predicate(name, globals, locals, fromlist, level): - raise ImportError - return orig_import(name, globals, locals, fromlist, level) - - return mock.patch.object(six.moves.builtins, "__import__", new=custom_import) diff --git a/packages/google-cloud-datastore/test_utils/test_utils/retry.py b/packages/google-cloud-datastore/test_utils/test_utils/retry.py deleted file mode 100644 index e61c001a03e1..000000000000 --- a/packages/google-cloud-datastore/test_utils/test_utils/retry.py +++ /dev/null @@ -1,207 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time -from functools import wraps - -import six - -MAX_TRIES = 4 -DELAY = 1 -BACKOFF = 2 - - -def _retry_all(_): - """Retry all caught exceptions.""" - return True - - -class BackoffFailed(Exception): - """Retry w/ backoffs did not complete successfully.""" - - -class RetryBase(object): - """Base for retrying calling a decorated function w/ exponential backoff. - - :type max_tries: int - :param max_tries: Number of times to try (not retry) before giving up. - - :type delay: int - :param delay: Initial delay between retries in seconds. - - :type backoff: int - :param backoff: Backoff multiplier e.g. value of 2 will double the - delay each retry. - - :type logger: logging.Logger instance - :param logger: Logger to use. If None, print. - """ - def __init__(self, max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, - logger=None): - self.max_tries = max_tries - self.delay = delay - self.backoff = backoff - self.logger = logger.warning if logger else six.print_ - - -class RetryErrors(RetryBase): - """Decorator for retrying given exceptions in testing. - - :type exception: Exception or tuple of Exceptions - :param exception: The exception to check or may be a tuple of - exceptions to check. - - :type error_predicate: function, takes caught exception, returns bool - :param error_predicate: Predicate evaluating whether to retry after a - caught exception. - - :type max_tries: int - :param max_tries: Number of times to try (not retry) before giving up. - - :type delay: int - :param delay: Initial delay between retries in seconds. - - :type backoff: int - :param backoff: Backoff multiplier e.g. value of 2 will double the - delay each retry. - - :type logger: logging.Logger instance - :param logger: Logger to use. If None, print. - """ - def __init__(self, exception, error_predicate=_retry_all, - max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, - logger=None): - super(RetryErrors, self).__init__(max_tries, delay, backoff, logger) - self.exception = exception - self.error_predicate = error_predicate - - def __call__(self, to_wrap): - @wraps(to_wrap) - def wrapped_function(*args, **kwargs): - tries = 0 - while tries < self.max_tries: - try: - return to_wrap(*args, **kwargs) - except self.exception as caught_exception: - - if not self.error_predicate(caught_exception): - raise - - delay = self.delay * self.backoff**tries - msg = ("%s, Trying again in %d seconds..." % - (caught_exception, delay)) - self.logger(msg) - - time.sleep(delay) - tries += 1 - return to_wrap(*args, **kwargs) - - return wrapped_function - - -class RetryResult(RetryBase): - """Decorator for retrying based on non-error result. - - :type result_predicate: function, takes result, returns bool - :param result_predicate: Predicate evaluating whether to retry after a - result is returned. - - :type max_tries: int - :param max_tries: Number of times to try (not retry) before giving up. - - :type delay: int - :param delay: Initial delay between retries in seconds. - - :type backoff: int - :param backoff: Backoff multiplier e.g. value of 2 will double the - delay each retry. - - :type logger: logging.Logger instance - :param logger: Logger to use. If None, print. - """ - def __init__(self, result_predicate, - max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, - logger=None): - super(RetryResult, self).__init__(max_tries, delay, backoff, logger) - self.result_predicate = result_predicate - - def __call__(self, to_wrap): - @wraps(to_wrap) - def wrapped_function(*args, **kwargs): - tries = 0 - while tries < self.max_tries: - result = to_wrap(*args, **kwargs) - if self.result_predicate(result): - return result - - delay = self.delay * self.backoff**tries - msg = "%s. Trying again in %d seconds..." % ( - self.result_predicate.__name__, delay,) - self.logger(msg) - - time.sleep(delay) - tries += 1 - raise BackoffFailed() - - return wrapped_function - - -class RetryInstanceState(RetryBase): - """Decorator for retrying based on instance state. - - :type instance_predicate: function, takes instance, returns bool - :param instance_predicate: Predicate evaluating whether to retry after an - API-invoking method is called. - - :type max_tries: int - :param max_tries: Number of times to try (not retry) before giving up. - - :type delay: int - :param delay: Initial delay between retries in seconds. - - :type backoff: int - :param backoff: Backoff multiplier e.g. value of 2 will double the - delay each retry. - - :type logger: logging.Logger instance - :param logger: Logger to use. If None, print. - """ - def __init__(self, instance_predicate, - max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, - logger=None): - super(RetryInstanceState, self).__init__( - max_tries, delay, backoff, logger) - self.instance_predicate = instance_predicate - - def __call__(self, to_wrap): - instance = to_wrap.__self__ # only instance methods allowed - - @wraps(to_wrap) - def wrapped_function(*args, **kwargs): - tries = 0 - while tries < self.max_tries: - result = to_wrap(*args, **kwargs) - if self.instance_predicate(instance): - return result - - delay = self.delay * self.backoff**tries - msg = "%s. Trying again in %d seconds..." % ( - self.instance_predicate.__name__, delay,) - self.logger(msg) - - time.sleep(delay) - tries += 1 - raise BackoffFailed() - - return wrapped_function diff --git a/packages/google-cloud-datastore/test_utils/test_utils/system.py b/packages/google-cloud-datastore/test_utils/test_utils/system.py deleted file mode 100644 index 590dc62a06e6..000000000000 --- a/packages/google-cloud-datastore/test_utils/test_utils/system.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright 2014 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function -import os -import sys -import time - -import google.auth.credentials -from google.auth.environment_vars import CREDENTIALS as TEST_CREDENTIALS - - -# From shell environ. May be None. -CREDENTIALS = os.getenv(TEST_CREDENTIALS) - -ENVIRON_ERROR_MSG = """\ -To run the system tests, you need to set some environment variables. -Please check the CONTRIBUTING guide for instructions. -""" - - -class EmulatorCreds(google.auth.credentials.Credentials): - """A mock credential object. - - Used to avoid unnecessary token refreshing or reliance on the network - while an emulator is running. - """ - - def __init__(self): # pylint: disable=super-init-not-called - self.token = b'seekrit' - self.expiry = None - - @property - def valid(self): - """Would-be validity check of the credentials. - - Always is :data:`True`. - """ - return True - - def refresh(self, unused_request): # pylint: disable=unused-argument - """Off-limits implementation for abstract method.""" - raise RuntimeError('Should never be refreshed.') - - -def check_environ(): - err_msg = None - if CREDENTIALS is None: - err_msg = '\nMissing variables: ' + TEST_CREDENTIALS - elif not os.path.isfile(CREDENTIALS): - err_msg = '\nThe %s path %r is not a file.' % (TEST_CREDENTIALS, - CREDENTIALS) - - if err_msg is not None: - msg = ENVIRON_ERROR_MSG + err_msg - print(msg, file=sys.stderr) - sys.exit(1) - - -def unique_resource_id(delimiter='_'): - """A unique identifier for a resource. - - Intended to help locate resources created in particular - testing environments and at particular times. - """ - build_id = os.getenv('CIRCLE_BUILD_NUM', '') - if build_id == '': - return '%s%d' % (delimiter, 1000 * time.time()) - else: - return '%s%s%s%d' % (delimiter, build_id, delimiter, time.time()) diff --git a/packages/google-cloud-datastore/test_utils/test_utils/vpcsc_config.py b/packages/google-cloud-datastore/test_utils/test_utils/vpcsc_config.py deleted file mode 100644 index 36b15d6be991..000000000000 --- a/packages/google-cloud-datastore/test_utils/test_utils/vpcsc_config.py +++ /dev/null @@ -1,118 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -import pytest - - -INSIDE_VPCSC_ENVVAR = "GOOGLE_CLOUD_TESTS_IN_VPCSC" -PROJECT_INSIDE_ENVVAR = "PROJECT_ID" -PROJECT_OUTSIDE_ENVVAR = "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT" -BUCKET_OUTSIDE_ENVVAR = "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_BUCKET" - - -class VPCSCTestConfig(object): - """System test utility for VPCSC detection. - - See: https://cloud.google.com/vpc-service-controls/docs/ - """ - - @property - def inside_vpcsc(self): - """Test whether the test environment is configured to run inside VPCSC. - - Returns: - bool: - true if the environment is configured to run inside VPCSC, - else false. - """ - return INSIDE_VPCSC_ENVVAR in os.environ - - @property - def project_inside(self): - """Project ID for testing outside access. - - Returns: - str: project ID used for testing outside access; None if undefined. - """ - return os.environ.get(PROJECT_INSIDE_ENVVAR, None) - - @property - def project_outside(self): - """Project ID for testing inside access. - - Returns: - str: project ID used for testing inside access; None if undefined. - """ - return os.environ.get(PROJECT_OUTSIDE_ENVVAR, None) - - @property - def bucket_outside(self): - """GCS bucket for testing inside access. - - Returns: - str: bucket ID used for testing inside access; None if undefined. - """ - return os.environ.get(BUCKET_OUTSIDE_ENVVAR, None) - - def skip_if_inside_vpcsc(self, testcase): - """Test decorator: skip if running inside VPCSC.""" - reason = ( - "Running inside VPCSC. " - "Unset the {} environment variable to enable this test." - ).format(INSIDE_VPCSC_ENVVAR) - skip = pytest.mark.skipif(self.inside_vpcsc, reason=reason) - return skip(testcase) - - def skip_unless_inside_vpcsc(self, testcase): - """Test decorator: skip if running outside VPCSC.""" - reason = ( - "Running outside VPCSC. " - "Set the {} environment variable to enable this test." - ).format(INSIDE_VPCSC_ENVVAR) - skip = pytest.mark.skipif(not self.inside_vpcsc, reason=reason) - return skip(testcase) - - def skip_unless_inside_project(self, testcase): - """Test decorator: skip if inside project env var not set.""" - reason = ( - "Project ID for running inside VPCSC not set. " - "Set the {} environment variable to enable this test." - ).format(PROJECT_INSIDE_ENVVAR) - skip = pytest.mark.skipif(self.project_inside is None, reason=reason) - return skip(testcase) - - def skip_unless_outside_project(self, testcase): - """Test decorator: skip if outside project env var not set.""" - reason = ( - "Project ID for running outside VPCSC not set. " - "Set the {} environment variable to enable this test." - ).format(PROJECT_OUTSIDE_ENVVAR) - skip = pytest.mark.skipif(self.project_outside is None, reason=reason) - return skip(testcase) - - def skip_unless_outside_bucket(self, testcase): - """Test decorator: skip if outside bucket env var not set.""" - reason = ( - "Bucket ID for running outside VPCSC not set. " - "Set the {} environment variable to enable this test." - ).format(BUCKET_OUTSIDE_ENVVAR) - skip = pytest.mark.skipif(self.bucket_outside is None, reason=reason) - return skip(testcase) - - -vpcsc_config = VPCSCTestConfig() From c6af0a87f4672681935c7922def93b15470a1627 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Fri, 1 May 2020 17:08:21 -0700 Subject: [PATCH 263/611] chore: Migrate python-datastore synth.py from artman to bazel (#24) --- packages/google-cloud-datastore/synth.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index ff84827c949a..3dc0ea12744f 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -16,17 +16,16 @@ import synthtool as s from synthtool import gcp -gapic = gcp.GAPICGenerator() +gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() # ---------------------------------------------------------------------------- # Generate datastore GAPIC layer # ---------------------------------------------------------------------------- library = gapic.py_library( - "datastore", - "v1", - config_path="/google/datastore/artman_datastore.yaml", - artman_output_name="datastore-v1", + service="datastore", + version="v1", + bazel_target="//google/datastore/v1:datastore-v1-py", include_protos=True, ) From 943c753045ebd7bc07f43a0d79e2bb92228f1c73 Mon Sep 17 00:00:00 2001 From: Cameron Zahedi Date: Mon, 11 May 2020 16:29:50 -0600 Subject: [PATCH 264/611] feat: Create CODEOWNERS (#28) * Create CODEOWNERS Adding owner team * fix: Point to right org --- packages/google-cloud-datastore/.github/CODEOWNERS | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 packages/google-cloud-datastore/.github/CODEOWNERS diff --git a/packages/google-cloud-datastore/.github/CODEOWNERS b/packages/google-cloud-datastore/.github/CODEOWNERS new file mode 100644 index 000000000000..39a8fc72bc4f --- /dev/null +++ b/packages/google-cloud-datastore/.github/CODEOWNERS @@ -0,0 +1,10 @@ +# Code owners file. +# This file controls who is tagged for review for any given pull request. +# +# For syntax help see: +# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax + + +# The firestore-dpe team is the default owner for anything not +# explicitly taken by someone else. +* @googleapis/firestore-dpe From 9b75295ec6b6e3b340d6c39a14bb4286828d023e Mon Sep 17 00:00:00 2001 From: Alex <7764119+AVaksman@users.noreply.github.com> Date: Tue, 16 Jun 2020 16:37:53 -0400 Subject: [PATCH 265/611] feat: add synth config to generate datastore_admin_v1 (#27) --- packages/google-cloud-datastore/synth.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index 3dc0ea12744f..49e9f69495a4 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -32,6 +32,19 @@ s.move(library / "google/cloud/datastore_v1/proto") s.move(library / "google/cloud/datastore_v1/gapic") +# ---------------------------------------------------------------------------- +# Generate datastore admin GAPIC layer +# ---------------------------------------------------------------------------- +library = gapic.py_library( + service="datastore_admin", + version="v1", + bazel_target="//google/datastore/admin/v1:datastore-admin-v1-py", + include_protos=True, +) + +s.move(library / "google/cloud/datastore_admin_v1/proto") +s.move(library / "google/cloud/datastore_admin_v1/gapic") + # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- From bbb082a0252f78302a528078f5fb4141a9fdb16c Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 1 Jul 2020 15:02:06 -0700 Subject: [PATCH 266/611] feat: add datastore admin client (#39) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/131dd10e-845c-4b68-acf1-3514a23e446f/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/cf2eff09d0f5319a4dc5cdce2b6356d85af4a798 Source-Link: https://github.com/googleapis/synthtool/commit/db4f15fedc76e04018e6e43ec403e804e1142158 PiperOrigin-RevId: 316182409 Source-Link: https://github.com/googleapis/googleapis/commit/184661793fbe3b89f2b485c303e7466cef9d21a1 PiperOrigin-RevId: 316124477 Source-Link: https://github.com/googleapis/googleapis/commit/c829fa0bfa725adaf20d82e86cbc1220e3ffd784 Source-Link: https://github.com/googleapis/synthtool/commit/4e1d2cb79b02d7496b1452f91c518630c207145e Source-Link: https://github.com/googleapis/synthtool/commit/e99975b6b49827b8720f0a885e218dbdb67849ca Source-Link: https://github.com/googleapis/synthtool/commit/ffe10407ee2f261c799fb0d01bf32a8abc67ed1e Source-Link: https://github.com/googleapis/synthtool/commit/71b8a272549c06b5768d00fa48d3ae990e871bec PiperOrigin-RevId: 313460921 Source-Link: https://github.com/googleapis/googleapis/commit/c4e37010d74071851ff24121f522e802231ac86e PiperOrigin-RevId: 312689208 Source-Link: https://github.com/googleapis/googleapis/commit/dec3204175104cef49bf21d685d5517caaf0058f Source-Link: https://github.com/googleapis/synthtool/commit/d2364eb80b840a36136c8ce12f1c6efabcc9600e PiperOrigin-RevId: 312088359 Source-Link: https://github.com/googleapis/googleapis/commit/5a90d467aa65e7f038f87585e8fbb45d74475e7c Source-Link: https://github.com/googleapis/synthtool/commit/fe8bbfdf2fef3bed8ebd461dcd33dfdd28604d02 Source-Link: https://github.com/googleapis/synthtool/commit/cb3433f7f554ea751584bdd3631d45ec56a32eb5 Source-Link: https://github.com/googleapis/synthtool/commit/7482e79a82e353248769d819788adc1213e8c207 PiperOrigin-RevId: 310619756 Source-Link: https://github.com/googleapis/googleapis/commit/482e5206e05e4ba8cd79738f6fb7a521c8c23555 Source-Link: https://github.com/googleapis/synthtool/commit/4674113712c0c7ada19e6a8219d7963ff174b392 PiperOrigin-RevId: 310060413 Source-Link: https://github.com/googleapis/googleapis/commit/a3a0bf0f6291d69f2ff3df7fcd63d28ee20ac727 Source-Link: https://github.com/googleapis/synthtool/commit/09c48461232ce929c34386259eb59018ad2d8eef Source-Link: https://github.com/googleapis/synthtool/commit/ea3ae392fced25d7d6f5c7ef9c8a1f24768a0936 PiperOrigin-RevId: 309845930 Source-Link: https://github.com/googleapis/googleapis/commit/1b5a8d2bbb69c5a04db26bd546d2888e609c6bab PiperOrigin-RevId: 309824146 Source-Link: https://github.com/googleapis/googleapis/commit/e0f9d9e1f9de890db765be46f45ca8490723e3eb --- packages/google-cloud-datastore/.flake8 | 2 + packages/google-cloud-datastore/.gitignore | 2 + .../.kokoro/publish-docs.sh | 2 - .../google-cloud-datastore/.kokoro/release.sh | 2 - .../.kokoro/samples/lint/common.cfg | 34 + .../.kokoro/samples/lint/continuous.cfg | 6 + .../.kokoro/samples/lint/periodic.cfg | 6 + .../.kokoro/samples/lint/presubmit.cfg | 6 + .../.kokoro/samples/python3.6/common.cfg | 34 + .../.kokoro/samples/python3.6/continuous.cfg | 7 + .../.kokoro/samples/python3.6/periodic.cfg | 6 + .../.kokoro/samples/python3.6/presubmit.cfg | 6 + .../.kokoro/samples/python3.7/common.cfg | 34 + .../.kokoro/samples/python3.7/continuous.cfg | 6 + .../.kokoro/samples/python3.7/periodic.cfg | 6 + .../.kokoro/samples/python3.7/presubmit.cfg | 6 + .../.kokoro/samples/python3.8/common.cfg | 34 + .../.kokoro/samples/python3.8/continuous.cfg | 6 + .../.kokoro/samples/python3.8/periodic.cfg | 6 + .../.kokoro/samples/python3.8/presubmit.cfg | 6 + .../.kokoro/test-samples.sh | 104 + packages/google-cloud-datastore/MANIFEST.in | 3 + .../docs/_templates/layout.html | 4 +- .../datastore_admin_v1/gapic/__init__.py | 0 .../gapic/datastore_admin_client.py | 665 ++++++ .../gapic/datastore_admin_client_config.py | 43 + .../cloud/datastore_admin_v1/gapic/enums.py | 130 ++ .../gapic/transports/__init__.py | 0 .../datastore_admin_grpc_transport.py | 186 ++ .../datastore_admin_v1/proto/__init__.py | 0 .../proto/datastore_admin.proto | 425 ++++ .../proto/datastore_admin_pb2.py | 1847 +++++++++++++++++ .../proto/datastore_admin_pb2_grpc.py | 414 ++++ .../datastore_admin_v1/proto/index.proto | 115 + .../datastore_admin_v1/proto/index_pb2.py | 430 ++++ .../proto/index_pb2_grpc.py | 3 + .../datastore_v1/gapic/datastore_client.py | 238 +-- .../gapic/datastore_client_config.py | 10 +- .../google/cloud/datastore_v1/gapic/enums.py | 6 +- .../transports/datastore_grpc_transport.py | 36 +- .../cloud/datastore_v1/proto/datastore.proto | 108 +- .../cloud/datastore_v1/proto/datastore_pb2.py | 611 +++--- .../datastore_v1/proto/datastore_pb2_grpc.py | 252 ++- .../cloud/datastore_v1/proto/entity.proto | 11 +- .../cloud/datastore_v1/proto/entity_pb2.py | 216 +- .../datastore_v1/proto/entity_pb2_grpc.py | 1 + .../cloud/datastore_v1/proto/query.proto | 13 +- .../cloud/datastore_v1/proto/query_pb2.py | 356 ++-- .../datastore_v1/proto/query_pb2_grpc.py | 1 + packages/google-cloud-datastore/noxfile.py | 31 +- .../scripts/decrypt-secrets.sh | 33 + .../scripts/readme-gen/readme_gen.py | 66 + .../readme-gen/templates/README.tmpl.rst | 87 + .../readme-gen/templates/auth.tmpl.rst | 9 + .../templates/auth_api_key.tmpl.rst | 14 + .../templates/install_deps.tmpl.rst | 29 + .../templates/install_portaudio.tmpl.rst | 35 + .../google-cloud-datastore/synth.metadata | 27 +- packages/google-cloud-datastore/synth.py | 31 +- .../google-cloud-datastore/testing/.gitignore | 3 + .../tests/unit/test_client.py | 4 +- .../tests/unit/test_key.py | 2 +- 62 files changed, 6055 insertions(+), 761 deletions(-) create mode 100644 packages/google-cloud-datastore/.kokoro/samples/lint/common.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/lint/continuous.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/lint/periodic.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/lint/presubmit.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.6/common.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.6/continuous.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.6/presubmit.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.7/common.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.7/continuous.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.7/presubmit.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.8/common.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.8/continuous.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.8/presubmit.cfg create mode 100755 packages/google-cloud-datastore/.kokoro/test-samples.sh create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client_config.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/enums.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/transports/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/transports/datastore_admin_grpc_transport.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin.proto create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index.proto create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py create mode 100755 packages/google-cloud-datastore/scripts/decrypt-secrets.sh create mode 100644 packages/google-cloud-datastore/scripts/readme-gen/readme_gen.py create mode 100644 packages/google-cloud-datastore/scripts/readme-gen/templates/README.tmpl.rst create mode 100644 packages/google-cloud-datastore/scripts/readme-gen/templates/auth.tmpl.rst create mode 100644 packages/google-cloud-datastore/scripts/readme-gen/templates/auth_api_key.tmpl.rst create mode 100644 packages/google-cloud-datastore/scripts/readme-gen/templates/install_deps.tmpl.rst create mode 100644 packages/google-cloud-datastore/scripts/readme-gen/templates/install_portaudio.tmpl.rst create mode 100644 packages/google-cloud-datastore/testing/.gitignore diff --git a/packages/google-cloud-datastore/.flake8 b/packages/google-cloud-datastore/.flake8 index 20fe9bda2ee4..ed9316381c9c 100644 --- a/packages/google-cloud-datastore/.flake8 +++ b/packages/google-cloud-datastore/.flake8 @@ -21,6 +21,8 @@ exclude = # Exclude generated code. **/proto/** **/gapic/** + **/services/** + **/types/** *_pb2.py # Standard linting exemptions. diff --git a/packages/google-cloud-datastore/.gitignore b/packages/google-cloud-datastore/.gitignore index 3fb06e09ce74..b87e1ed580d9 100644 --- a/packages/google-cloud-datastore/.gitignore +++ b/packages/google-cloud-datastore/.gitignore @@ -10,6 +10,7 @@ dist build eggs +.eggs parts bin var @@ -49,6 +50,7 @@ bigquery/docs/generated # Virtual environment env/ coverage.xml +sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/packages/google-cloud-datastore/.kokoro/publish-docs.sh b/packages/google-cloud-datastore/.kokoro/publish-docs.sh index b478f4c4d34e..487d823ae057 100755 --- a/packages/google-cloud-datastore/.kokoro/publish-docs.sh +++ b/packages/google-cloud-datastore/.kokoro/publish-docs.sh @@ -13,8 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -#!/bin/bash - set -eo pipefail # Disable buffering, so that the logs stream through. diff --git a/packages/google-cloud-datastore/.kokoro/release.sh b/packages/google-cloud-datastore/.kokoro/release.sh index 00df87ac5390..95282f08e11f 100755 --- a/packages/google-cloud-datastore/.kokoro/release.sh +++ b/packages/google-cloud-datastore/.kokoro/release.sh @@ -13,8 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -#!/bin/bash - set -eo pipefail # Start the releasetool reporter diff --git a/packages/google-cloud-datastore/.kokoro/samples/lint/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/lint/common.cfg new file mode 100644 index 000000000000..33f7432a1209 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/lint/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "lint" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-datastore/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/lint/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/lint/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/lint/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/lint/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/lint/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/lint/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/lint/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/lint/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/lint/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.6/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.6/common.cfg new file mode 100644 index 000000000000..8417400266d0 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.6/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.6" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-datastore/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.6/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.6/continuous.cfg new file mode 100644 index 000000000000..7218af1499e5 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.6/continuous.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.6/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.6/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.6/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.7/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.7/common.cfg new file mode 100644 index 000000000000..2122ef4c5421 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.7/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.7" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-datastore/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.7/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.7/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.7/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.7/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.7/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.7/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.8/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.8/common.cfg new file mode 100644 index 000000000000..c4ca39f0b0b1 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.8/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.8" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-datastore/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.8/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.8/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.8/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.8/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.8/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.8/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/test-samples.sh b/packages/google-cloud-datastore/.kokoro/test-samples.sh new file mode 100755 index 000000000000..78494e1e5490 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/test-samples.sh @@ -0,0 +1,104 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-datastore + +# Run periodic samples tests at latest release +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + LATEST_RELEASE=$(git describe --abbrev=0 --tags) + git checkout $LATEST_RELEASE +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the Build Cop Bot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop + $KOKORO_GFILE_DIR/linux_amd64/buildcop + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" \ No newline at end of file diff --git a/packages/google-cloud-datastore/MANIFEST.in b/packages/google-cloud-datastore/MANIFEST.in index 68855abc3f02..e9e29d12033d 100644 --- a/packages/google-cloud-datastore/MANIFEST.in +++ b/packages/google-cloud-datastore/MANIFEST.in @@ -20,3 +20,6 @@ recursive-include google *.json *.proto recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen \ No newline at end of file diff --git a/packages/google-cloud-datastore/docs/_templates/layout.html b/packages/google-cloud-datastore/docs/_templates/layout.html index 228529efe2d2..6316a537f72b 100644 --- a/packages/google-cloud-datastore/docs/_templates/layout.html +++ b/packages/google-cloud-datastore/docs/_templates/layout.html @@ -21,8 +21,8 @@
- On January 1, 2020 this library will no longer support Python 2 on the latest released version. - Previously released library versions will continue to be available. For more information please + As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please visit Python 2 support on Google Cloud.
{% block body %} {% endblock %} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py new file mode 100644 index 000000000000..9495419eb3ea --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py @@ -0,0 +1,665 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Accesses the google.datastore.admin.v1 DatastoreAdmin API.""" + +import functools +import pkg_resources +import warnings + +from google.oauth2 import service_account +import google.api_core.client_options +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header +import google.api_core.grpc_helpers +import google.api_core.operation +import google.api_core.operations_v1 +import google.api_core.page_iterator +import grpc + +from google.cloud.datastore_admin_v1.gapic import datastore_admin_client_config +from google.cloud.datastore_admin_v1.gapic import enums +from google.cloud.datastore_admin_v1.gapic.transports import ( + datastore_admin_grpc_transport, +) +from google.cloud.datastore_admin_v1.proto import datastore_admin_pb2 +from google.cloud.datastore_admin_v1.proto import datastore_admin_pb2_grpc +from google.cloud.datastore_admin_v1.proto import index_pb2 +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 + + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-datastore-admin", +).version + + +class DatastoreAdminClient(object): + """ + Google Cloud Datastore Admin API + + + The Datastore Admin API provides several admin services for Cloud Datastore. + + ----------------------------------------------------------------------------- + ## Concepts + + Project, namespace, kind, and entity as defined in the Google Cloud Datastore + API. + + Operation: An Operation represents work being performed in the background. + + EntityFilter: Allows specifying a subset of entities in a project. This is + specified as a combination of kinds and namespaces (either or both of which + may be all). + + ----------------------------------------------------------------------------- + ## Services + + # Export/Import + + The Export/Import service provides the ability to copy all or a subset of + entities to/from Google Cloud Storage. + + Exported data may be imported into Cloud Datastore for any Google Cloud + Platform project. It is not restricted to the export source project. It is + possible to export from one project and then import into another. + + Exported data can also be loaded into Google BigQuery for analysis. + + Exports and imports are performed asynchronously. An Operation resource is + created for each export/import. The state (including any errors encountered) + of the export/import may be queried via the Operation resource. + + # Index + + The index service manages Cloud Datastore composite indexes. + + Index creation and deletion are performed asynchronously. + An Operation resource is created for each such asynchronous operation. + The state of the operation (including any errors encountered) + may be queried via the Operation resource. + + # Operation + + The Operations collection provides a record of actions performed for the + specified project (including any operations in progress). Operations are not + created directly but through calls on other collections or resources. + + An operation that is not yet done may be cancelled. The request to cancel is + asynchronous and the operation may continue to run for some time after the + request to cancel is made. + + An operation that is done may be deleted so that it is no longer listed as + part of the Operation collection. + + ListOperations returns all pending operations, but not completed operations. + + Operations are created by service DatastoreAdmin, + but are accessed via service google.longrunning.Operations. + """ + + SERVICE_ADDRESS = "datastore.googleapis.com:443" + """The default address of the service.""" + + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. + _INTERFACE_NAME = "google.datastore.admin.v1.DatastoreAdmin" + + @classmethod + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatastoreAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + client_options=None, + ): + """Constructor. + + Args: + transport (Union[~.DatastoreAdminGrpcTransport, + Callable[[~.Credentials, type], ~.DatastoreAdminGrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. + """ + # Raise deprecation warnings for things we want to go away. + if client_config is not None: + warnings.warn( + "The `client_config` argument is deprecated.", + PendingDeprecationWarning, + stacklevel=2, + ) + else: + client_config = datastore_admin_client_config.config + + if channel: + warnings.warn( + "The `channel` argument is deprecated; use " "`transport` instead.", + PendingDeprecationWarning, + stacklevel=2, + ) + + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=datastore_admin_grpc_transport.DatastoreAdminGrpcTransport, + address=api_endpoint, + ) + else: + if credentials: + raise ValueError( + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) + self.transport = transport + else: + self.transport = datastore_admin_grpc_transport.DatastoreAdminGrpcTransport( + address=api_endpoint, channel=channel, credentials=credentials, + ) + + if client_info is None: + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION, + ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config["interfaces"][self._INTERFACE_NAME], + ) + + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} + + # Service calls + def export_entities( + self, + project_id, + output_url_prefix, + labels=None, + entity_filter=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Exports a copy of all or a subset of entities from Google Cloud Datastore + to another storage system, such as Google Cloud Storage. Recent updates to + entities may not be reflected in the export. The export occurs in the + background and its progress can be monitored and managed via the + Operation resource that is created. The output of an export may only be + used once the associated operation is done. If an export operation is + cancelled before completion it may leave partial data behind in Google + Cloud Storage. + + Example: + >>> from google.cloud import datastore_admin_v1 + >>> + >>> client = datastore_admin_v1.DatastoreAdminClient() + >>> + >>> # TODO: Initialize `project_id`: + >>> project_id = '' + >>> + >>> # TODO: Initialize `output_url_prefix`: + >>> output_url_prefix = '' + >>> + >>> response = client.export_entities(project_id, output_url_prefix) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + project_id (str): Required. Project ID against which to make the request. + output_url_prefix (str): Required. Location for the export metadata and data files. + + The full resource URL of the external storage location. Currently, only + Google Cloud Storage is supported. So output_url_prefix should be of the + form: ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, where ``BUCKET_NAME`` is + the name of the Cloud Storage bucket and ``NAMESPACE_PATH`` is an + optional Cloud Storage namespace path (this is not a Cloud Datastore + namespace). For more information about Cloud Storage namespace paths, + see `Object name + considerations `__. + + The resulting files will be nested deeper than the specified URL prefix. + The final output URL will be provided in the + ``google.datastore.admin.v1.ExportEntitiesResponse.output_url`` field. + That value should be used for subsequent ImportEntities operations. + + By nesting the data files deeper, the same Cloud Storage bucket can be + used in multiple ExportEntities operations without conflict. + labels (dict[str -> str]): Client-assigned labels. + entity_filter (Union[dict, ~google.cloud.datastore_admin_v1.types.EntityFilter]): Description of what data from the project is included in the export. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_admin_v1.types.EntityFilter` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.datastore_admin_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "export_entities" not in self._inner_api_calls: + self._inner_api_calls[ + "export_entities" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.export_entities, + default_retry=self._method_configs["ExportEntities"].retry, + default_timeout=self._method_configs["ExportEntities"].timeout, + client_info=self._client_info, + ) + + request = datastore_admin_pb2.ExportEntitiesRequest( + project_id=project_id, + output_url_prefix=output_url_prefix, + labels=labels, + entity_filter=entity_filter, + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["export_entities"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + datastore_admin_pb2.ExportEntitiesResponse, + metadata_type=datastore_admin_pb2.ExportEntitiesMetadata, + ) + + def import_entities( + self, + project_id, + input_url, + labels=None, + entity_filter=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Imports entities into Google Cloud Datastore. Existing entities with the + same key are overwritten. The import occurs in the background and its + progress can be monitored and managed via the Operation resource that is + created. If an ImportEntities operation is cancelled, it is possible + that a subset of the data has already been imported to Cloud Datastore. + + Example: + >>> from google.cloud import datastore_admin_v1 + >>> + >>> client = datastore_admin_v1.DatastoreAdminClient() + >>> + >>> # TODO: Initialize `project_id`: + >>> project_id = '' + >>> + >>> # TODO: Initialize `input_url`: + >>> input_url = '' + >>> + >>> response = client.import_entities(project_id, input_url) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + project_id (str): Required. Project ID against which to make the request. + input_url (str): Required. The full resource URL of the external storage location. + Currently, only Google Cloud Storage is supported. So input_url should + be of the form: + ``gs://BUCKET_NAME[/NAMESPACE_PATH]/OVERALL_EXPORT_METADATA_FILE``, + where ``BUCKET_NAME`` is the name of the Cloud Storage bucket, + ``NAMESPACE_PATH`` is an optional Cloud Storage namespace path (this is + not a Cloud Datastore namespace), and ``OVERALL_EXPORT_METADATA_FILE`` + is the metadata file written by the ExportEntities operation. For more + information about Cloud Storage namespace paths, see `Object name + considerations `__. + + For more information, see + ``google.datastore.admin.v1.ExportEntitiesResponse.output_url``. + labels (dict[str -> str]): Client-assigned labels. + entity_filter (Union[dict, ~google.cloud.datastore_admin_v1.types.EntityFilter]): Optionally specify which kinds/namespaces are to be imported. If + provided, the list must be a subset of the EntityFilter used in creating + the export, otherwise a FAILED_PRECONDITION error will be returned. If + no filter is specified then all entities from the export are imported. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_admin_v1.types.EntityFilter` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.datastore_admin_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "import_entities" not in self._inner_api_calls: + self._inner_api_calls[ + "import_entities" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.import_entities, + default_retry=self._method_configs["ImportEntities"].retry, + default_timeout=self._method_configs["ImportEntities"].timeout, + client_info=self._client_info, + ) + + request = datastore_admin_pb2.ImportEntitiesRequest( + project_id=project_id, + input_url=input_url, + labels=labels, + entity_filter=entity_filter, + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["import_entities"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + empty_pb2.Empty, + metadata_type=datastore_admin_pb2.ImportEntitiesMetadata, + ) + + def get_index( + self, + project_id=None, + index_id=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets an index. + + Example: + >>> from google.cloud import datastore_admin_v1 + >>> + >>> client = datastore_admin_v1.DatastoreAdminClient() + >>> + >>> response = client.get_index() + + Args: + project_id (str): Project ID against which to make the request. + index_id (str): The resource ID of the index to get. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.datastore_admin_v1.types.Index` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_index" not in self._inner_api_calls: + self._inner_api_calls[ + "get_index" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_index, + default_retry=self._method_configs["GetIndex"].retry, + default_timeout=self._method_configs["GetIndex"].timeout, + client_info=self._client_info, + ) + + request = datastore_admin_pb2.GetIndexRequest( + project_id=project_id, index_id=index_id, + ) + return self._inner_api_calls["get_index"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_indexes( + self, + project_id=None, + filter_=None, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists the indexes that match the specified filters. Datastore uses an + eventually consistent query to fetch the list of indexes and may + occasionally return stale results. + + Example: + >>> from google.cloud import datastore_admin_v1 + >>> + >>> client = datastore_admin_v1.DatastoreAdminClient() + >>> + >>> # Iterate over all results + >>> for element in client.list_indexes(): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_indexes().pages: + ... for element in page: + ... # process element + ... pass + + Args: + project_id (str): Project ID against which to make the request. + filter_ (str) + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.datastore_admin_v1.types.Index` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_indexes" not in self._inner_api_calls: + self._inner_api_calls[ + "list_indexes" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_indexes, + default_retry=self._method_configs["ListIndexes"].retry, + default_timeout=self._method_configs["ListIndexes"].timeout, + client_info=self._client_info, + ) + + request = datastore_admin_pb2.ListIndexesRequest( + project_id=project_id, filter=filter_, page_size=page_size, + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_indexes"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="indexes", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client_config.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client_config.py new file mode 100644 index 000000000000..dbbe2b85cc98 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client_config.py @@ -0,0 +1,43 @@ +config = { + "interfaces": { + "google.datastore.admin.v1.DatastoreAdmin": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000, + } + }, + "methods": { + "ExportEntities": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "ImportEntities": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "GetIndex": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "ListIndexes": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + }, + } + } +} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/enums.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/enums.py new file mode 100644 index 000000000000..77c303fc11b5 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/enums.py @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Wrappers for protocol buffer enum types.""" + +import enum + + +class OperationType(enum.IntEnum): + """ + Operation types. + + Attributes: + OPERATION_TYPE_UNSPECIFIED (int): Unspecified. + EXPORT_ENTITIES (int): ExportEntities. + IMPORT_ENTITIES (int): ImportEntities. + CREATE_INDEX (int): CreateIndex. + DELETE_INDEX (int): DeleteIndex. + """ + + OPERATION_TYPE_UNSPECIFIED = 0 + EXPORT_ENTITIES = 1 + IMPORT_ENTITIES = 2 + CREATE_INDEX = 3 + DELETE_INDEX = 4 + + +class CommonMetadata(object): + class State(enum.IntEnum): + """ + The various possible states for an ongoing Operation. + + Attributes: + STATE_UNSPECIFIED (int): Unspecified. + INITIALIZING (int): Request is being prepared for processing. + PROCESSING (int): Request is actively being processed. + CANCELLING (int): Request is in the process of being cancelled after user called + google.longrunning.Operations.CancelOperation on the operation. + FINALIZING (int): Request has been processed and is in its finalization stage. + SUCCESSFUL (int): Request has completed successfully. + FAILED (int): Request has finished being processed, but encountered an error. + CANCELLED (int): Request has finished being cancelled after user called + google.longrunning.Operations.CancelOperation. + """ + + STATE_UNSPECIFIED = 0 + INITIALIZING = 1 + PROCESSING = 2 + CANCELLING = 3 + FINALIZING = 4 + SUCCESSFUL = 5 + FAILED = 6 + CANCELLED = 7 + + +class Index(object): + class AncestorMode(enum.IntEnum): + """ + For an ordered index, specifies whether each of the entity's ancestors + will be included. + + Attributes: + ANCESTOR_MODE_UNSPECIFIED (int): The ancestor mode is unspecified. + NONE (int): Do not include the entity's ancestors in the index. + ALL_ANCESTORS (int): Include all the entity's ancestors in the index. + """ + + ANCESTOR_MODE_UNSPECIFIED = 0 + NONE = 1 + ALL_ANCESTORS = 2 + + class Direction(enum.IntEnum): + """ + The direction determines how a property is indexed. + + Attributes: + DIRECTION_UNSPECIFIED (int): The direction is unspecified. + ASCENDING (int): The property's values are indexed so as to support sequencing in + ascending order and also query by <, >, <=, >=, and =. + DESCENDING (int): The property's values are indexed so as to support sequencing in + descending order and also query by <, >, <=, >=, and =. + """ + + DIRECTION_UNSPECIFIED = 0 + ASCENDING = 1 + DESCENDING = 2 + + class State(enum.IntEnum): + """ + The possible set of states of an index. + + Attributes: + STATE_UNSPECIFIED (int): The state is unspecified. + CREATING (int): The index is being created, and cannot be used by queries. + There is an active long-running operation for the index. + The index is updated when writing an entity. + Some index data may exist. + READY (int): The index is ready to be used. + The index is updated when writing an entity. + The index is fully populated from all stored entities it applies to. + DELETING (int): The index is being deleted, and cannot be used by queries. + There is an active long-running operation for the index. + The index is not updated when writing an entity. + Some index data may exist. + ERROR (int): The index was being created or deleted, but something went wrong. + The index cannot by used by queries. + There is no active long-running operation for the index, + and the most recently finished long-running operation failed. + The index is not updated when writing an entity. + Some index data may exist. + """ + + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + DELETING = 3 + ERROR = 4 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/transports/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/transports/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/transports/datastore_admin_grpc_transport.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/transports/datastore_admin_grpc_transport.py new file mode 100644 index 000000000000..11fd92af67d4 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/transports/datastore_admin_grpc_transport.py @@ -0,0 +1,186 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import google.api_core.grpc_helpers +import google.api_core.operations_v1 + +from google.cloud.datastore_admin_v1.proto import datastore_admin_pb2_grpc + + +class DatastoreAdminGrpcTransport(object): + """gRPC transport class providing stubs for + google.datastore.admin.v1 DatastoreAdmin API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ) + + def __init__( + self, channel=None, credentials=None, address="datastore.googleapis.com:443" + ): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + "The `channel` and `credentials` arguments are mutually " "exclusive.", + ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) + + self._channel = channel + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = { + "datastore_admin_stub": datastore_admin_pb2_grpc.DatastoreAdminStub( + channel + ), + } + + # Because this API includes a method that returns a + # long-running operation (proto: google.longrunning.Operation), + # instantiate an LRO client. + self._operations_client = google.api_core.operations_v1.OperationsClient( + channel + ) + + @classmethod + def create_channel( + cls, address="datastore.googleapis.com:443", credentials=None, **kwargs + ): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs + ) + + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + + @property + def export_entities(self): + """Return the gRPC stub for :meth:`DatastoreAdminClient.export_entities`. + + Exports a copy of all or a subset of entities from Google Cloud Datastore + to another storage system, such as Google Cloud Storage. Recent updates to + entities may not be reflected in the export. The export occurs in the + background and its progress can be monitored and managed via the + Operation resource that is created. The output of an export may only be + used once the associated operation is done. If an export operation is + cancelled before completion it may leave partial data behind in Google + Cloud Storage. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["datastore_admin_stub"].ExportEntities + + @property + def import_entities(self): + """Return the gRPC stub for :meth:`DatastoreAdminClient.import_entities`. + + Imports entities into Google Cloud Datastore. Existing entities with the + same key are overwritten. The import occurs in the background and its + progress can be monitored and managed via the Operation resource that is + created. If an ImportEntities operation is cancelled, it is possible + that a subset of the data has already been imported to Cloud Datastore. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["datastore_admin_stub"].ImportEntities + + @property + def get_index(self): + """Return the gRPC stub for :meth:`DatastoreAdminClient.get_index`. + + Gets an index. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["datastore_admin_stub"].GetIndex + + @property + def list_indexes(self): + """Return the gRPC stub for :meth:`DatastoreAdminClient.list_indexes`. + + Lists the indexes that match the specified filters. Datastore uses an + eventually consistent query to fetch the list of indexes and may + occasionally return stale results. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["datastore_admin_stub"].ListIndexes diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin.proto b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin.proto new file mode 100644 index 000000000000..c0f470766511 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin.proto @@ -0,0 +1,425 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.datastore.admin.v1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/datastore/admin/v1/index.proto"; +import "google/longrunning/operations.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.Datastore.Admin.V1"; +option go_package = "google.golang.org/genproto/googleapis/datastore/admin/v1;admin"; +option java_multiple_files = true; +option java_outer_classname = "DatastoreAdminProto"; +option java_package = "com.google.datastore.admin.v1"; +option ruby_package = "Google::Cloud::Datastore::Admin::V1"; + +// Google Cloud Datastore Admin API +// +// +// The Datastore Admin API provides several admin services for Cloud Datastore. +// +// ----------------------------------------------------------------------------- +// ## Concepts +// +// Project, namespace, kind, and entity as defined in the Google Cloud Datastore +// API. +// +// Operation: An Operation represents work being performed in the background. +// +// EntityFilter: Allows specifying a subset of entities in a project. This is +// specified as a combination of kinds and namespaces (either or both of which +// may be all). +// +// ----------------------------------------------------------------------------- +// ## Services +// +// # Export/Import +// +// The Export/Import service provides the ability to copy all or a subset of +// entities to/from Google Cloud Storage. +// +// Exported data may be imported into Cloud Datastore for any Google Cloud +// Platform project. It is not restricted to the export source project. It is +// possible to export from one project and then import into another. +// +// Exported data can also be loaded into Google BigQuery for analysis. +// +// Exports and imports are performed asynchronously. An Operation resource is +// created for each export/import. The state (including any errors encountered) +// of the export/import may be queried via the Operation resource. +// +// # Index +// +// The index service manages Cloud Datastore composite indexes. +// +// Index creation and deletion are performed asynchronously. +// An Operation resource is created for each such asynchronous operation. +// The state of the operation (including any errors encountered) +// may be queried via the Operation resource. +// +// # Operation +// +// The Operations collection provides a record of actions performed for the +// specified project (including any operations in progress). Operations are not +// created directly but through calls on other collections or resources. +// +// An operation that is not yet done may be cancelled. The request to cancel is +// asynchronous and the operation may continue to run for some time after the +// request to cancel is made. +// +// An operation that is done may be deleted so that it is no longer listed as +// part of the Operation collection. +// +// ListOperations returns all pending operations, but not completed operations. +// +// Operations are created by service DatastoreAdmin, +// but are accessed via service google.longrunning.Operations. +service DatastoreAdmin { + option (google.api.default_host) = "datastore.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/datastore"; + + // Exports a copy of all or a subset of entities from Google Cloud Datastore + // to another storage system, such as Google Cloud Storage. Recent updates to + // entities may not be reflected in the export. The export occurs in the + // background and its progress can be monitored and managed via the + // Operation resource that is created. The output of an export may only be + // used once the associated operation is done. If an export operation is + // cancelled before completion it may leave partial data behind in Google + // Cloud Storage. + rpc ExportEntities(ExportEntitiesRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/projects/{project_id}:export" + body: "*" + }; + option (google.api.method_signature) = "project_id,labels,entity_filter,output_url_prefix"; + option (google.longrunning.operation_info) = { + response_type: "ExportEntitiesResponse" + metadata_type: "ExportEntitiesMetadata" + }; + } + + // Imports entities into Google Cloud Datastore. Existing entities with the + // same key are overwritten. The import occurs in the background and its + // progress can be monitored and managed via the Operation resource that is + // created. If an ImportEntities operation is cancelled, it is possible + // that a subset of the data has already been imported to Cloud Datastore. + rpc ImportEntities(ImportEntitiesRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/projects/{project_id}:import" + body: "*" + }; + option (google.api.method_signature) = "project_id,labels,input_url,entity_filter"; + option (google.longrunning.operation_info) = { + response_type: "google.protobuf.Empty" + metadata_type: "ImportEntitiesMetadata" + }; + } + + // Gets an index. + rpc GetIndex(GetIndexRequest) returns (Index) { + option (google.api.http) = { + get: "/v1/projects/{project_id}/indexes/{index_id}" + }; + } + + // Lists the indexes that match the specified filters. Datastore uses an + // eventually consistent query to fetch the list of indexes and may + // occasionally return stale results. + rpc ListIndexes(ListIndexesRequest) returns (ListIndexesResponse) { + option (google.api.http) = { + get: "/v1/projects/{project_id}/indexes" + }; + } +} + +// Metadata common to all Datastore Admin operations. +message CommonMetadata { + // The various possible states for an ongoing Operation. + enum State { + // Unspecified. + STATE_UNSPECIFIED = 0; + + // Request is being prepared for processing. + INITIALIZING = 1; + + // Request is actively being processed. + PROCESSING = 2; + + // Request is in the process of being cancelled after user called + // google.longrunning.Operations.CancelOperation on the operation. + CANCELLING = 3; + + // Request has been processed and is in its finalization stage. + FINALIZING = 4; + + // Request has completed successfully. + SUCCESSFUL = 5; + + // Request has finished being processed, but encountered an error. + FAILED = 6; + + // Request has finished being cancelled after user called + // google.longrunning.Operations.CancelOperation. + CANCELLED = 7; + } + + // The time that work began on the operation. + google.protobuf.Timestamp start_time = 1; + + // The time the operation ended, either successfully or otherwise. + google.protobuf.Timestamp end_time = 2; + + // The type of the operation. Can be used as a filter in + // ListOperationsRequest. + OperationType operation_type = 3; + + // The client-assigned labels which were provided when the operation was + // created. May also include additional labels. + map labels = 4; + + // The current state of the Operation. + State state = 5; +} + +// Operation types. +enum OperationType { + // Unspecified. + OPERATION_TYPE_UNSPECIFIED = 0; + + // ExportEntities. + EXPORT_ENTITIES = 1; + + // ImportEntities. + IMPORT_ENTITIES = 2; + + // CreateIndex. + CREATE_INDEX = 3; + + // DeleteIndex. + DELETE_INDEX = 4; +} + +// Measures the progress of a particular metric. +message Progress { + // The amount of work that has been completed. Note that this may be greater + // than work_estimated. + int64 work_completed = 1; + + // An estimate of how much work needs to be performed. May be zero if the + // work estimate is unavailable. + int64 work_estimated = 2; +} + +// The request for +// [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. +message ExportEntitiesRequest { + // Required. Project ID against which to make the request. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Client-assigned labels. + map labels = 2; + + // Description of what data from the project is included in the export. + EntityFilter entity_filter = 3; + + // Required. Location for the export metadata and data files. + // + // The full resource URL of the external storage location. Currently, only + // Google Cloud Storage is supported. So output_url_prefix should be of the + // form: `gs://BUCKET_NAME[/NAMESPACE_PATH]`, where `BUCKET_NAME` is the + // name of the Cloud Storage bucket and `NAMESPACE_PATH` is an optional Cloud + // Storage namespace path (this is not a Cloud Datastore namespace). For more + // information about Cloud Storage namespace paths, see + // [Object name + // considerations](https://cloud.google.com/storage/docs/naming#object-considerations). + // + // The resulting files will be nested deeper than the specified URL prefix. + // The final output URL will be provided in the + // [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url] field. That + // value should be used for subsequent ImportEntities operations. + // + // By nesting the data files deeper, the same Cloud Storage bucket can be used + // in multiple ExportEntities operations without conflict. + string output_url_prefix = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// The request for +// [google.datastore.admin.v1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. +message ImportEntitiesRequest { + // Required. Project ID against which to make the request. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Client-assigned labels. + map labels = 2; + + // Required. The full resource URL of the external storage location. Currently, only + // Google Cloud Storage is supported. So input_url should be of the form: + // `gs://BUCKET_NAME[/NAMESPACE_PATH]/OVERALL_EXPORT_METADATA_FILE`, where + // `BUCKET_NAME` is the name of the Cloud Storage bucket, `NAMESPACE_PATH` is + // an optional Cloud Storage namespace path (this is not a Cloud Datastore + // namespace), and `OVERALL_EXPORT_METADATA_FILE` is the metadata file written + // by the ExportEntities operation. For more information about Cloud Storage + // namespace paths, see + // [Object name + // considerations](https://cloud.google.com/storage/docs/naming#object-considerations). + // + // For more information, see + // [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. + string input_url = 3 [(google.api.field_behavior) = REQUIRED]; + + // Optionally specify which kinds/namespaces are to be imported. If provided, + // the list must be a subset of the EntityFilter used in creating the export, + // otherwise a FAILED_PRECONDITION error will be returned. If no filter is + // specified then all entities from the export are imported. + EntityFilter entity_filter = 4; +} + +// The response for +// [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. +message ExportEntitiesResponse { + // Location of the output metadata file. This can be used to begin an import + // into Cloud Datastore (this project or another project). See + // [google.datastore.admin.v1.ImportEntitiesRequest.input_url][google.datastore.admin.v1.ImportEntitiesRequest.input_url]. + // Only present if the operation completed successfully. + string output_url = 1; +} + +// Metadata for ExportEntities operations. +message ExportEntitiesMetadata { + // Metadata common to all Datastore Admin operations. + CommonMetadata common = 1; + + // An estimate of the number of entities processed. + Progress progress_entities = 2; + + // An estimate of the number of bytes processed. + Progress progress_bytes = 3; + + // Description of which entities are being exported. + EntityFilter entity_filter = 4; + + // Location for the export metadata and data files. This will be the same + // value as the + // [google.datastore.admin.v1.ExportEntitiesRequest.output_url_prefix][google.datastore.admin.v1.ExportEntitiesRequest.output_url_prefix] + // field. The final output location is provided in + // [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. + string output_url_prefix = 5; +} + +// Metadata for ImportEntities operations. +message ImportEntitiesMetadata { + // Metadata common to all Datastore Admin operations. + CommonMetadata common = 1; + + // An estimate of the number of entities processed. + Progress progress_entities = 2; + + // An estimate of the number of bytes processed. + Progress progress_bytes = 3; + + // Description of which entities are being imported. + EntityFilter entity_filter = 4; + + // The location of the import metadata file. This will be the same value as + // the [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url] field. + string input_url = 5; +} + +// Identifies a subset of entities in a project. This is specified as +// combinations of kinds and namespaces (either or both of which may be all, as +// described in the following examples). +// Example usage: +// +// Entire project: +// kinds=[], namespace_ids=[] +// +// Kinds Foo and Bar in all namespaces: +// kinds=['Foo', 'Bar'], namespace_ids=[] +// +// Kinds Foo and Bar only in the default namespace: +// kinds=['Foo', 'Bar'], namespace_ids=[''] +// +// Kinds Foo and Bar in both the default and Baz namespaces: +// kinds=['Foo', 'Bar'], namespace_ids=['', 'Baz'] +// +// The entire Baz namespace: +// kinds=[], namespace_ids=['Baz'] +message EntityFilter { + // If empty, then this represents all kinds. + repeated string kinds = 1; + + // An empty list represents all namespaces. This is the preferred + // usage for projects that don't use namespaces. + // + // An empty string element represents the default namespace. This should be + // used if the project has data in non-default namespaces, but doesn't want to + // include them. + // Each namespace in this list must be unique. + repeated string namespace_ids = 2; +} + +// The request for [google.datastore.admin.v1.DatastoreAdmin.GetIndex][google.datastore.admin.v1.DatastoreAdmin.GetIndex]. +message GetIndexRequest { + // Project ID against which to make the request. + string project_id = 1; + + // The resource ID of the index to get. + string index_id = 3; +} + +// The request for +// [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. +message ListIndexesRequest { + // Project ID against which to make the request. + string project_id = 1; + + string filter = 3; + + // The maximum number of items to return. If zero, then all results will be + // returned. + int32 page_size = 4; + + // The next_page_token value returned from a previous List request, if any. + string page_token = 5; +} + +// The response for +// [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. +message ListIndexesResponse { + // The indexes. + repeated Index indexes = 1; + + // The standard List next-page token. + string next_page_token = 2; +} + +// Metadata for Index operations. +message IndexOperationMetadata { + // Metadata common to all Datastore Admin operations. + CommonMetadata common = 1; + + // An estimate of the number of entities processed. + Progress progress_entities = 2; + + // The index resource ID that this operation is acting on. + string index_id = 3; +} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py new file mode 100644 index 000000000000..f16463bb0475 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py @@ -0,0 +1,1847 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/datastore_admin_v1/proto/datastore_admin.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.cloud.datastore_admin_v1.proto import ( + index_pb2 as google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/datastore_admin_v1/proto/datastore_admin.proto", + package="google.datastore.admin.v1", + syntax="proto3", + serialized_options=b"\n\035com.google.datastore.admin.v1B\023DatastoreAdminProtoP\001Z>google.golang.org/genproto/googleapis/datastore/admin/v1;admin\252\002\037Google.Cloud.Datastore.Admin.V1\352\002#Google::Cloud::Datastore::Admin::V1", + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n;google/cloud/datastore_admin_v1/proto/datastore_admin.proto\x12\x19google.datastore.admin.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x31google/cloud/datastore_admin_v1/proto/index.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xf4\x03\n\x0e\x43ommonMetadata\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12@\n\x0eoperation_type\x18\x03 \x01(\x0e\x32(.google.datastore.admin.v1.OperationType\x12\x45\n\x06labels\x18\x04 \x03(\x0b\x32\x35.google.datastore.admin.v1.CommonMetadata.LabelsEntry\x12>\n\x05state\x18\x05 \x01(\x0e\x32/.google.datastore.admin.v1.CommonMetadata.State\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x8b\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x10\n\x0cINITIALIZING\x10\x01\x12\x0e\n\nPROCESSING\x10\x02\x12\x0e\n\nCANCELLING\x10\x03\x12\x0e\n\nFINALIZING\x10\x04\x12\x0e\n\nSUCCESSFUL\x10\x05\x12\n\n\x06\x46\x41ILED\x10\x06\x12\r\n\tCANCELLED\x10\x07":\n\x08Progress\x12\x16\n\x0ework_completed\x18\x01 \x01(\x03\x12\x16\n\x0ework_estimated\x18\x02 \x01(\x03"\x8d\x02\n\x15\x45xportEntitiesRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12L\n\x06labels\x18\x02 \x03(\x0b\x32<.google.datastore.admin.v1.ExportEntitiesRequest.LabelsEntry\x12>\n\rentity_filter\x18\x03 \x01(\x0b\x32\'.google.datastore.admin.v1.EntityFilter\x12\x1e\n\x11output_url_prefix\x18\x04 \x01(\tB\x03\xe0\x41\x02\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x85\x02\n\x15ImportEntitiesRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12L\n\x06labels\x18\x02 \x03(\x0b\x32<.google.datastore.admin.v1.ImportEntitiesRequest.LabelsEntry\x12\x16\n\tinput_url\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12>\n\rentity_filter\x18\x04 \x01(\x0b\x32\'.google.datastore.admin.v1.EntityFilter\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01",\n\x16\x45xportEntitiesResponse\x12\x12\n\noutput_url\x18\x01 \x01(\t"\xab\x02\n\x16\x45xportEntitiesMetadata\x12\x39\n\x06\x63ommon\x18\x01 \x01(\x0b\x32).google.datastore.admin.v1.CommonMetadata\x12>\n\x11progress_entities\x18\x02 \x01(\x0b\x32#.google.datastore.admin.v1.Progress\x12;\n\x0eprogress_bytes\x18\x03 \x01(\x0b\x32#.google.datastore.admin.v1.Progress\x12>\n\rentity_filter\x18\x04 \x01(\x0b\x32\'.google.datastore.admin.v1.EntityFilter\x12\x19\n\x11output_url_prefix\x18\x05 \x01(\t"\xa3\x02\n\x16ImportEntitiesMetadata\x12\x39\n\x06\x63ommon\x18\x01 \x01(\x0b\x32).google.datastore.admin.v1.CommonMetadata\x12>\n\x11progress_entities\x18\x02 \x01(\x0b\x32#.google.datastore.admin.v1.Progress\x12;\n\x0eprogress_bytes\x18\x03 \x01(\x0b\x32#.google.datastore.admin.v1.Progress\x12>\n\rentity_filter\x18\x04 \x01(\x0b\x32\'.google.datastore.admin.v1.EntityFilter\x12\x11\n\tinput_url\x18\x05 \x01(\t"4\n\x0c\x45ntityFilter\x12\r\n\x05kinds\x18\x01 \x03(\t\x12\x15\n\rnamespace_ids\x18\x02 \x03(\t"7\n\x0fGetIndexRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x10\n\x08index_id\x18\x03 \x01(\t"_\n\x12ListIndexesRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"a\n\x13ListIndexesResponse\x12\x31\n\x07indexes\x18\x01 \x03(\x0b\x32 .google.datastore.admin.v1.Index\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xa5\x01\n\x16IndexOperationMetadata\x12\x39\n\x06\x63ommon\x18\x01 \x01(\x0b\x32).google.datastore.admin.v1.CommonMetadata\x12>\n\x11progress_entities\x18\x02 \x01(\x0b\x32#.google.datastore.admin.v1.Progress\x12\x10\n\x08index_id\x18\x03 \x01(\t*}\n\rOperationType\x12\x1e\n\x1aOPERATION_TYPE_UNSPECIFIED\x10\x00\x12\x13\n\x0f\x45XPORT_ENTITIES\x10\x01\x12\x13\n\x0fIMPORT_ENTITIES\x10\x02\x12\x10\n\x0c\x43REATE_INDEX\x10\x03\x12\x10\n\x0c\x44\x45LETE_INDEX\x10\x04\x32\x9c\x07\n\x0e\x44\x61tastoreAdmin\x12\xf6\x01\n\x0e\x45xportEntities\x12\x30.google.datastore.admin.v1.ExportEntitiesRequest\x1a\x1d.google.longrunning.Operation"\x92\x01\x82\xd3\xe4\x93\x02%" /v1/projects/{project_id}:export:\x01*\xda\x41\x31project_id,labels,entity_filter,output_url_prefix\xca\x41\x30\n\x16\x45xportEntitiesResponse\x12\x16\x45xportEntitiesMetadata\x12\xed\x01\n\x0eImportEntities\x12\x30.google.datastore.admin.v1.ImportEntitiesRequest\x1a\x1d.google.longrunning.Operation"\x89\x01\x82\xd3\xe4\x93\x02%" /v1/projects/{project_id}:import:\x01*\xda\x41)project_id,labels,input_url,entity_filter\xca\x41/\n\x15google.protobuf.Empty\x12\x16ImportEntitiesMetadata\x12\x8e\x01\n\x08GetIndex\x12*.google.datastore.admin.v1.GetIndexRequest\x1a .google.datastore.admin.v1.Index"4\x82\xd3\xe4\x93\x02.\x12,/v1/projects/{project_id}/indexes/{index_id}\x12\x97\x01\n\x0bListIndexes\x12-.google.datastore.admin.v1.ListIndexesRequest\x1a..google.datastore.admin.v1.ListIndexesResponse")\x82\xd3\xe4\x93\x02#\x12!/v1/projects/{project_id}/indexes\x1av\xca\x41\x18\x64\x61tastore.googleapis.com\xd2\x41Xhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastoreB\xbe\x01\n\x1d\x63om.google.datastore.admin.v1B\x13\x44\x61tastoreAdminProtoP\x01Z>google.golang.org/genproto/googleapis/datastore/admin/v1;admin\xaa\x02\x1fGoogle.Cloud.Datastore.Admin.V1\xea\x02#Google::Cloud::Datastore::Admin::V1b\x06proto3', + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2.DESCRIPTOR, + google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], +) + +_OPERATIONTYPE = _descriptor.EnumDescriptor( + name="OperationType", + full_name="google.datastore.admin.v1.OperationType", + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name="OPERATION_TYPE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="EXPORT_ENTITIES", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="IMPORT_ENTITIES", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="CREATE_INDEX", + index=3, + number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="DELETE_INDEX", + index=4, + number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=2515, + serialized_end=2640, +) +_sym_db.RegisterEnumDescriptor(_OPERATIONTYPE) + +OperationType = enum_type_wrapper.EnumTypeWrapper(_OPERATIONTYPE) +OPERATION_TYPE_UNSPECIFIED = 0 +EXPORT_ENTITIES = 1 +IMPORT_ENTITIES = 2 +CREATE_INDEX = 3 +DELETE_INDEX = 4 + + +_COMMONMETADATA_STATE = _descriptor.EnumDescriptor( + name="State", + full_name="google.datastore.admin.v1.CommonMetadata.State", + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name="STATE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="INITIALIZING", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="PROCESSING", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="CANCELLING", + index=3, + number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="FINALIZING", + index=4, + number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="SUCCESSFUL", + index=5, + number=5, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="FAILED", + index=6, + number=6, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="CANCELLED", + index=7, + number=7, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=661, + serialized_end=800, +) +_sym_db.RegisterEnumDescriptor(_COMMONMETADATA_STATE) + + +_COMMONMETADATA_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.datastore.admin.v1.CommonMetadata.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.datastore.admin.v1.CommonMetadata.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.datastore.admin.v1.CommonMetadata.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=b"8\001", + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=613, + serialized_end=658, +) + +_COMMONMETADATA = _descriptor.Descriptor( + name="CommonMetadata", + full_name="google.datastore.admin.v1.CommonMetadata", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="start_time", + full_name="google.datastore.admin.v1.CommonMetadata.start_time", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="end_time", + full_name="google.datastore.admin.v1.CommonMetadata.end_time", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="operation_type", + full_name="google.datastore.admin.v1.CommonMetadata.operation_type", + index=2, + number=3, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.datastore.admin.v1.CommonMetadata.labels", + index=3, + number=4, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="state", + full_name="google.datastore.admin.v1.CommonMetadata.state", + index=4, + number=5, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[_COMMONMETADATA_LABELSENTRY,], + enum_types=[_COMMONMETADATA_STATE,], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=300, + serialized_end=800, +) + + +_PROGRESS = _descriptor.Descriptor( + name="Progress", + full_name="google.datastore.admin.v1.Progress", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="work_completed", + full_name="google.datastore.admin.v1.Progress.work_completed", + index=0, + number=1, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="work_estimated", + full_name="google.datastore.admin.v1.Progress.work_estimated", + index=1, + number=2, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=802, + serialized_end=860, +) + + +_EXPORTENTITIESREQUEST_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.datastore.admin.v1.ExportEntitiesRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.datastore.admin.v1.ExportEntitiesRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.datastore.admin.v1.ExportEntitiesRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=b"8\001", + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=613, + serialized_end=658, +) + +_EXPORTENTITIESREQUEST = _descriptor.Descriptor( + name="ExportEntitiesRequest", + full_name="google.datastore.admin.v1.ExportEntitiesRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.datastore.admin.v1.ExportEntitiesRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\002", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.datastore.admin.v1.ExportEntitiesRequest.labels", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="entity_filter", + full_name="google.datastore.admin.v1.ExportEntitiesRequest.entity_filter", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="output_url_prefix", + full_name="google.datastore.admin.v1.ExportEntitiesRequest.output_url_prefix", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\002", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[_EXPORTENTITIESREQUEST_LABELSENTRY,], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=863, + serialized_end=1132, +) + + +_IMPORTENTITIESREQUEST_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.datastore.admin.v1.ImportEntitiesRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.datastore.admin.v1.ImportEntitiesRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.datastore.admin.v1.ImportEntitiesRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=b"8\001", + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=613, + serialized_end=658, +) + +_IMPORTENTITIESREQUEST = _descriptor.Descriptor( + name="ImportEntitiesRequest", + full_name="google.datastore.admin.v1.ImportEntitiesRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.datastore.admin.v1.ImportEntitiesRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\002", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.datastore.admin.v1.ImportEntitiesRequest.labels", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="input_url", + full_name="google.datastore.admin.v1.ImportEntitiesRequest.input_url", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\002", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="entity_filter", + full_name="google.datastore.admin.v1.ImportEntitiesRequest.entity_filter", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[_IMPORTENTITIESREQUEST_LABELSENTRY,], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1135, + serialized_end=1396, +) + + +_EXPORTENTITIESRESPONSE = _descriptor.Descriptor( + name="ExportEntitiesResponse", + full_name="google.datastore.admin.v1.ExportEntitiesResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="output_url", + full_name="google.datastore.admin.v1.ExportEntitiesResponse.output_url", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1398, + serialized_end=1442, +) + + +_EXPORTENTITIESMETADATA = _descriptor.Descriptor( + name="ExportEntitiesMetadata", + full_name="google.datastore.admin.v1.ExportEntitiesMetadata", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="common", + full_name="google.datastore.admin.v1.ExportEntitiesMetadata.common", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="progress_entities", + full_name="google.datastore.admin.v1.ExportEntitiesMetadata.progress_entities", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="progress_bytes", + full_name="google.datastore.admin.v1.ExportEntitiesMetadata.progress_bytes", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="entity_filter", + full_name="google.datastore.admin.v1.ExportEntitiesMetadata.entity_filter", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="output_url_prefix", + full_name="google.datastore.admin.v1.ExportEntitiesMetadata.output_url_prefix", + index=4, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1445, + serialized_end=1744, +) + + +_IMPORTENTITIESMETADATA = _descriptor.Descriptor( + name="ImportEntitiesMetadata", + full_name="google.datastore.admin.v1.ImportEntitiesMetadata", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="common", + full_name="google.datastore.admin.v1.ImportEntitiesMetadata.common", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="progress_entities", + full_name="google.datastore.admin.v1.ImportEntitiesMetadata.progress_entities", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="progress_bytes", + full_name="google.datastore.admin.v1.ImportEntitiesMetadata.progress_bytes", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="entity_filter", + full_name="google.datastore.admin.v1.ImportEntitiesMetadata.entity_filter", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="input_url", + full_name="google.datastore.admin.v1.ImportEntitiesMetadata.input_url", + index=4, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1747, + serialized_end=2038, +) + + +_ENTITYFILTER = _descriptor.Descriptor( + name="EntityFilter", + full_name="google.datastore.admin.v1.EntityFilter", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="kinds", + full_name="google.datastore.admin.v1.EntityFilter.kinds", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="namespace_ids", + full_name="google.datastore.admin.v1.EntityFilter.namespace_ids", + index=1, + number=2, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2040, + serialized_end=2092, +) + + +_GETINDEXREQUEST = _descriptor.Descriptor( + name="GetIndexRequest", + full_name="google.datastore.admin.v1.GetIndexRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.datastore.admin.v1.GetIndexRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="index_id", + full_name="google.datastore.admin.v1.GetIndexRequest.index_id", + index=1, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2094, + serialized_end=2149, +) + + +_LISTINDEXESREQUEST = _descriptor.Descriptor( + name="ListIndexesRequest", + full_name="google.datastore.admin.v1.ListIndexesRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.datastore.admin.v1.ListIndexesRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.datastore.admin.v1.ListIndexesRequest.filter", + index=1, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.datastore.admin.v1.ListIndexesRequest.page_size", + index=2, + number=4, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.datastore.admin.v1.ListIndexesRequest.page_token", + index=3, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2151, + serialized_end=2246, +) + + +_LISTINDEXESRESPONSE = _descriptor.Descriptor( + name="ListIndexesResponse", + full_name="google.datastore.admin.v1.ListIndexesResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="indexes", + full_name="google.datastore.admin.v1.ListIndexesResponse.indexes", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.datastore.admin.v1.ListIndexesResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2248, + serialized_end=2345, +) + + +_INDEXOPERATIONMETADATA = _descriptor.Descriptor( + name="IndexOperationMetadata", + full_name="google.datastore.admin.v1.IndexOperationMetadata", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="common", + full_name="google.datastore.admin.v1.IndexOperationMetadata.common", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="progress_entities", + full_name="google.datastore.admin.v1.IndexOperationMetadata.progress_entities", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="index_id", + full_name="google.datastore.admin.v1.IndexOperationMetadata.index_id", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2348, + serialized_end=2513, +) + +_COMMONMETADATA_LABELSENTRY.containing_type = _COMMONMETADATA +_COMMONMETADATA.fields_by_name[ + "start_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_COMMONMETADATA.fields_by_name[ + "end_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_COMMONMETADATA.fields_by_name["operation_type"].enum_type = _OPERATIONTYPE +_COMMONMETADATA.fields_by_name["labels"].message_type = _COMMONMETADATA_LABELSENTRY +_COMMONMETADATA.fields_by_name["state"].enum_type = _COMMONMETADATA_STATE +_COMMONMETADATA_STATE.containing_type = _COMMONMETADATA +_EXPORTENTITIESREQUEST_LABELSENTRY.containing_type = _EXPORTENTITIESREQUEST +_EXPORTENTITIESREQUEST.fields_by_name[ + "labels" +].message_type = _EXPORTENTITIESREQUEST_LABELSENTRY +_EXPORTENTITIESREQUEST.fields_by_name["entity_filter"].message_type = _ENTITYFILTER +_IMPORTENTITIESREQUEST_LABELSENTRY.containing_type = _IMPORTENTITIESREQUEST +_IMPORTENTITIESREQUEST.fields_by_name[ + "labels" +].message_type = _IMPORTENTITIESREQUEST_LABELSENTRY +_IMPORTENTITIESREQUEST.fields_by_name["entity_filter"].message_type = _ENTITYFILTER +_EXPORTENTITIESMETADATA.fields_by_name["common"].message_type = _COMMONMETADATA +_EXPORTENTITIESMETADATA.fields_by_name["progress_entities"].message_type = _PROGRESS +_EXPORTENTITIESMETADATA.fields_by_name["progress_bytes"].message_type = _PROGRESS +_EXPORTENTITIESMETADATA.fields_by_name["entity_filter"].message_type = _ENTITYFILTER +_IMPORTENTITIESMETADATA.fields_by_name["common"].message_type = _COMMONMETADATA +_IMPORTENTITIESMETADATA.fields_by_name["progress_entities"].message_type = _PROGRESS +_IMPORTENTITIESMETADATA.fields_by_name["progress_bytes"].message_type = _PROGRESS +_IMPORTENTITIESMETADATA.fields_by_name["entity_filter"].message_type = _ENTITYFILTER +_LISTINDEXESRESPONSE.fields_by_name[ + "indexes" +].message_type = ( + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2._INDEX +) +_INDEXOPERATIONMETADATA.fields_by_name["common"].message_type = _COMMONMETADATA +_INDEXOPERATIONMETADATA.fields_by_name["progress_entities"].message_type = _PROGRESS +DESCRIPTOR.message_types_by_name["CommonMetadata"] = _COMMONMETADATA +DESCRIPTOR.message_types_by_name["Progress"] = _PROGRESS +DESCRIPTOR.message_types_by_name["ExportEntitiesRequest"] = _EXPORTENTITIESREQUEST +DESCRIPTOR.message_types_by_name["ImportEntitiesRequest"] = _IMPORTENTITIESREQUEST +DESCRIPTOR.message_types_by_name["ExportEntitiesResponse"] = _EXPORTENTITIESRESPONSE +DESCRIPTOR.message_types_by_name["ExportEntitiesMetadata"] = _EXPORTENTITIESMETADATA +DESCRIPTOR.message_types_by_name["ImportEntitiesMetadata"] = _IMPORTENTITIESMETADATA +DESCRIPTOR.message_types_by_name["EntityFilter"] = _ENTITYFILTER +DESCRIPTOR.message_types_by_name["GetIndexRequest"] = _GETINDEXREQUEST +DESCRIPTOR.message_types_by_name["ListIndexesRequest"] = _LISTINDEXESREQUEST +DESCRIPTOR.message_types_by_name["ListIndexesResponse"] = _LISTINDEXESRESPONSE +DESCRIPTOR.message_types_by_name["IndexOperationMetadata"] = _INDEXOPERATIONMETADATA +DESCRIPTOR.enum_types_by_name["OperationType"] = _OPERATIONTYPE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +CommonMetadata = _reflection.GeneratedProtocolMessageType( + "CommonMetadata", + (_message.Message,), + { + "LabelsEntry": _reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + { + "DESCRIPTOR": _COMMONMETADATA_LABELSENTRY, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2" + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.CommonMetadata.LabelsEntry) + }, + ), + "DESCRIPTOR": _COMMONMETADATA, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """Metadata common to all Datastore Admin operations. + + Attributes: + start_time: + The time that work began on the operation. + end_time: + The time the operation ended, either successfully or + otherwise. + operation_type: + The type of the operation. Can be used as a filter in + ListOperationsRequest. + labels: + The client-assigned labels which were provided when the + operation was created. May also include additional labels. + state: + The current state of the Operation. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.CommonMetadata) + }, +) +_sym_db.RegisterMessage(CommonMetadata) +_sym_db.RegisterMessage(CommonMetadata.LabelsEntry) + +Progress = _reflection.GeneratedProtocolMessageType( + "Progress", + (_message.Message,), + { + "DESCRIPTOR": _PROGRESS, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """Measures the progress of a particular metric. + + Attributes: + work_completed: + The amount of work that has been completed. Note that this may + be greater than work_estimated. + work_estimated: + An estimate of how much work needs to be performed. May be + zero if the work estimate is unavailable. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.Progress) + }, +) +_sym_db.RegisterMessage(Progress) + +ExportEntitiesRequest = _reflection.GeneratedProtocolMessageType( + "ExportEntitiesRequest", + (_message.Message,), + { + "LabelsEntry": _reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + { + "DESCRIPTOR": _EXPORTENTITIESREQUEST_LABELSENTRY, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2" + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ExportEntitiesRequest.LabelsEntry) + }, + ), + "DESCRIPTOR": _EXPORTENTITIESREQUEST, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """The request for [google.datastore.admin.v1.DatastoreAdmin.ExportEntiti + es][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. + + Attributes: + project_id: + Required. Project ID against which to make the request. + labels: + Client-assigned labels. + entity_filter: + Description of what data from the project is included in the + export. + output_url_prefix: + Required. Location for the export metadata and data files. + The full resource URL of the external storage location. + Currently, only Google Cloud Storage is supported. So + output_url_prefix should be of the form: + ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, where ``BUCKET_NAME`` + is the name of the Cloud Storage bucket and ``NAMESPACE_PATH`` + is an optional Cloud Storage namespace path (this is not a + Cloud Datastore namespace). For more information about Cloud + Storage namespace paths, see `Object name considerations + `__. The resulting files will be nested deeper + than the specified URL prefix. The final output URL will be + provided in the [google.datastore.admin.v1.ExportEntitiesRespo + nse.output_url][google.datastore.admin.v1.ExportEntitiesRespon + se.output_url] field. That value should be used for subsequent + ImportEntities operations. By nesting the data files deeper, + the same Cloud Storage bucket can be used in multiple + ExportEntities operations without conflict. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ExportEntitiesRequest) + }, +) +_sym_db.RegisterMessage(ExportEntitiesRequest) +_sym_db.RegisterMessage(ExportEntitiesRequest.LabelsEntry) + +ImportEntitiesRequest = _reflection.GeneratedProtocolMessageType( + "ImportEntitiesRequest", + (_message.Message,), + { + "LabelsEntry": _reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + { + "DESCRIPTOR": _IMPORTENTITIESREQUEST_LABELSENTRY, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2" + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ImportEntitiesRequest.LabelsEntry) + }, + ), + "DESCRIPTOR": _IMPORTENTITIESREQUEST, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """The request for [google.datastore.admin.v1.DatastoreAdmin.ImportEntiti + es][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. + + Attributes: + project_id: + Required. Project ID against which to make the request. + labels: + Client-assigned labels. + input_url: + Required. The full resource URL of the external storage + location. Currently, only Google Cloud Storage is supported. + So input_url should be of the form: ``gs://BUCKET_NAME[/NAMESP + ACE_PATH]/OVERALL_EXPORT_METADATA_FILE``, where + ``BUCKET_NAME`` is the name of the Cloud Storage bucket, + ``NAMESPACE_PATH`` is an optional Cloud Storage namespace path + (this is not a Cloud Datastore namespace), and + ``OVERALL_EXPORT_METADATA_FILE`` is the metadata file written + by the ExportEntities operation. For more information about + Cloud Storage namespace paths, see `Object name considerations + `__. For more information, see [google.datasto + re.admin.v1.ExportEntitiesResponse.output_url][google.datastor + e.admin.v1.ExportEntitiesResponse.output_url]. + entity_filter: + Optionally specify which kinds/namespaces are to be imported. + If provided, the list must be a subset of the EntityFilter + used in creating the export, otherwise a FAILED_PRECONDITION + error will be returned. If no filter is specified then all + entities from the export are imported. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ImportEntitiesRequest) + }, +) +_sym_db.RegisterMessage(ImportEntitiesRequest) +_sym_db.RegisterMessage(ImportEntitiesRequest.LabelsEntry) + +ExportEntitiesResponse = _reflection.GeneratedProtocolMessageType( + "ExportEntitiesResponse", + (_message.Message,), + { + "DESCRIPTOR": _EXPORTENTITIESRESPONSE, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """The response for [google.datastore.admin.v1.DatastoreAdmin.ExportEntit + ies][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. + + Attributes: + output_url: + Location of the output metadata file. This can be used to + begin an import into Cloud Datastore (this project or another + project). See [google.datastore.admin.v1.ImportEntitiesRequest + .input_url][google.datastore.admin.v1.ImportEntitiesRequest.in + put_url]. Only present if the operation completed + successfully. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ExportEntitiesResponse) + }, +) +_sym_db.RegisterMessage(ExportEntitiesResponse) + +ExportEntitiesMetadata = _reflection.GeneratedProtocolMessageType( + "ExportEntitiesMetadata", + (_message.Message,), + { + "DESCRIPTOR": _EXPORTENTITIESMETADATA, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """Metadata for ExportEntities operations. + + Attributes: + common: + Metadata common to all Datastore Admin operations. + progress_entities: + An estimate of the number of entities processed. + progress_bytes: + An estimate of the number of bytes processed. + entity_filter: + Description of which entities are being exported. + output_url_prefix: + Location for the export metadata and data files. This will be + the same value as the [google.datastore.admin.v1.ExportEntitie + sRequest.output_url_prefix][google.datastore.admin.v1.ExportEn + titiesRequest.output_url_prefix] field. The final output + location is provided in [google.datastore.admin.v1.ExportEntit + iesResponse.output_url][google.datastore.admin.v1.ExportEntiti + esResponse.output_url]. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ExportEntitiesMetadata) + }, +) +_sym_db.RegisterMessage(ExportEntitiesMetadata) + +ImportEntitiesMetadata = _reflection.GeneratedProtocolMessageType( + "ImportEntitiesMetadata", + (_message.Message,), + { + "DESCRIPTOR": _IMPORTENTITIESMETADATA, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """Metadata for ImportEntities operations. + + Attributes: + common: + Metadata common to all Datastore Admin operations. + progress_entities: + An estimate of the number of entities processed. + progress_bytes: + An estimate of the number of bytes processed. + entity_filter: + Description of which entities are being imported. + input_url: + The location of the import metadata file. This will be the + same value as the [google.datastore.admin.v1.ExportEntitiesRes + ponse.output_url][google.datastore.admin.v1.ExportEntitiesResp + onse.output_url] field. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ImportEntitiesMetadata) + }, +) +_sym_db.RegisterMessage(ImportEntitiesMetadata) + +EntityFilter = _reflection.GeneratedProtocolMessageType( + "EntityFilter", + (_message.Message,), + { + "DESCRIPTOR": _ENTITYFILTER, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """Identifies a subset of entities in a project. This is specified as + combinations of kinds and namespaces (either or both of which may be + all, as described in the following examples). Example usage: Entire + project: kinds=[], namespace_ids=[] Kinds Foo and Bar in all + namespaces: kinds=[‘Foo’, ‘Bar’], namespace_ids=[] Kinds Foo and Bar + only in the default namespace: kinds=[‘Foo’, ‘Bar’], + namespace_ids=[’’] Kinds Foo and Bar in both the default and Baz + namespaces: kinds=[‘Foo’, ‘Bar’], namespace_ids=[’‘, ’Baz’] The + entire Baz namespace: kinds=[], namespace_ids=[‘Baz’] + + Attributes: + kinds: + If empty, then this represents all kinds. + namespace_ids: + An empty list represents all namespaces. This is the preferred + usage for projects that don’t use namespaces. An empty string + element represents the default namespace. This should be used + if the project has data in non-default namespaces, but doesn’t + want to include them. Each namespace in this list must be + unique. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.EntityFilter) + }, +) +_sym_db.RegisterMessage(EntityFilter) + +GetIndexRequest = _reflection.GeneratedProtocolMessageType( + "GetIndexRequest", + (_message.Message,), + { + "DESCRIPTOR": _GETINDEXREQUEST, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """The request for [google.datastore.admin.v1.DatastoreAdmin.GetIndex][go + ogle.datastore.admin.v1.DatastoreAdmin.GetIndex]. + + Attributes: + project_id: + Project ID against which to make the request. + index_id: + The resource ID of the index to get. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.GetIndexRequest) + }, +) +_sym_db.RegisterMessage(GetIndexRequest) + +ListIndexesRequest = _reflection.GeneratedProtocolMessageType( + "ListIndexesRequest", + (_message.Message,), + { + "DESCRIPTOR": _LISTINDEXESREQUEST, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """The request for [google.datastore.admin.v1.DatastoreAdmin.ListIndexes] + [google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. + + Attributes: + project_id: + Project ID against which to make the request. + page_size: + The maximum number of items to return. If zero, then all + results will be returned. + page_token: + The next_page_token value returned from a previous List + request, if any. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ListIndexesRequest) + }, +) +_sym_db.RegisterMessage(ListIndexesRequest) + +ListIndexesResponse = _reflection.GeneratedProtocolMessageType( + "ListIndexesResponse", + (_message.Message,), + { + "DESCRIPTOR": _LISTINDEXESRESPONSE, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """The response for [google.datastore.admin.v1.DatastoreAdmin.ListIndexes + ][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. + + Attributes: + indexes: + The indexes. + next_page_token: + The standard List next-page token. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ListIndexesResponse) + }, +) +_sym_db.RegisterMessage(ListIndexesResponse) + +IndexOperationMetadata = _reflection.GeneratedProtocolMessageType( + "IndexOperationMetadata", + (_message.Message,), + { + "DESCRIPTOR": _INDEXOPERATIONMETADATA, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """Metadata for Index operations. + + Attributes: + common: + Metadata common to all Datastore Admin operations. + progress_entities: + An estimate of the number of entities processed. + index_id: + The index resource ID that this operation is acting on. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.IndexOperationMetadata) + }, +) +_sym_db.RegisterMessage(IndexOperationMetadata) + + +DESCRIPTOR._options = None +_COMMONMETADATA_LABELSENTRY._options = None +_EXPORTENTITIESREQUEST_LABELSENTRY._options = None +_EXPORTENTITIESREQUEST.fields_by_name["project_id"]._options = None +_EXPORTENTITIESREQUEST.fields_by_name["output_url_prefix"]._options = None +_IMPORTENTITIESREQUEST_LABELSENTRY._options = None +_IMPORTENTITIESREQUEST.fields_by_name["project_id"]._options = None +_IMPORTENTITIESREQUEST.fields_by_name["input_url"]._options = None + +_DATASTOREADMIN = _descriptor.ServiceDescriptor( + name="DatastoreAdmin", + full_name="google.datastore.admin.v1.DatastoreAdmin", + file=DESCRIPTOR, + index=0, + serialized_options=b"\312A\030datastore.googleapis.com\322AXhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastore", + create_key=_descriptor._internal_create_key, + serialized_start=2643, + serialized_end=3567, + methods=[ + _descriptor.MethodDescriptor( + name="ExportEntities", + full_name="google.datastore.admin.v1.DatastoreAdmin.ExportEntities", + index=0, + containing_service=None, + input_type=_EXPORTENTITIESREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=b'\202\323\344\223\002%" /v1/projects/{project_id}:export:\001*\332A1project_id,labels,entity_filter,output_url_prefix\312A0\n\026ExportEntitiesResponse\022\026ExportEntitiesMetadata', + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name="ImportEntities", + full_name="google.datastore.admin.v1.DatastoreAdmin.ImportEntities", + index=1, + containing_service=None, + input_type=_IMPORTENTITIESREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=b'\202\323\344\223\002%" /v1/projects/{project_id}:import:\001*\332A)project_id,labels,input_url,entity_filter\312A/\n\025google.protobuf.Empty\022\026ImportEntitiesMetadata', + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name="GetIndex", + full_name="google.datastore.admin.v1.DatastoreAdmin.GetIndex", + index=2, + containing_service=None, + input_type=_GETINDEXREQUEST, + output_type=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2._INDEX, + serialized_options=b"\202\323\344\223\002.\022,/v1/projects/{project_id}/indexes/{index_id}", + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name="ListIndexes", + full_name="google.datastore.admin.v1.DatastoreAdmin.ListIndexes", + index=3, + containing_service=None, + input_type=_LISTINDEXESREQUEST, + output_type=_LISTINDEXESRESPONSE, + serialized_options=b"\202\323\344\223\002#\022!/v1/projects/{project_id}/indexes", + create_key=_descriptor._internal_create_key, + ), + ], +) +_sym_db.RegisterServiceDescriptor(_DATASTOREADMIN) + +DESCRIPTOR.services_by_name["DatastoreAdmin"] = _DATASTOREADMIN + +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py new file mode 100644 index 000000000000..177889e13306 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py @@ -0,0 +1,414 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from google.cloud.datastore_admin_v1.proto import ( + datastore_admin_pb2 as google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2, +) +from google.cloud.datastore_admin_v1.proto import ( + index_pb2 as google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) + + +class DatastoreAdminStub(object): + """Google Cloud Datastore Admin API + + + The Datastore Admin API provides several admin services for Cloud Datastore. + + ----------------------------------------------------------------------------- + ## Concepts + + Project, namespace, kind, and entity as defined in the Google Cloud Datastore + API. + + Operation: An Operation represents work being performed in the background. + + EntityFilter: Allows specifying a subset of entities in a project. This is + specified as a combination of kinds and namespaces (either or both of which + may be all). + + ----------------------------------------------------------------------------- + ## Services + + # Export/Import + + The Export/Import service provides the ability to copy all or a subset of + entities to/from Google Cloud Storage. + + Exported data may be imported into Cloud Datastore for any Google Cloud + Platform project. It is not restricted to the export source project. It is + possible to export from one project and then import into another. + + Exported data can also be loaded into Google BigQuery for analysis. + + Exports and imports are performed asynchronously. An Operation resource is + created for each export/import. The state (including any errors encountered) + of the export/import may be queried via the Operation resource. + + # Index + + The index service manages Cloud Datastore composite indexes. + + Index creation and deletion are performed asynchronously. + An Operation resource is created for each such asynchronous operation. + The state of the operation (including any errors encountered) + may be queried via the Operation resource. + + # Operation + + The Operations collection provides a record of actions performed for the + specified project (including any operations in progress). Operations are not + created directly but through calls on other collections or resources. + + An operation that is not yet done may be cancelled. The request to cancel is + asynchronous and the operation may continue to run for some time after the + request to cancel is made. + + An operation that is done may be deleted so that it is no longer listed as + part of the Operation collection. + + ListOperations returns all pending operations, but not completed operations. + + Operations are created by service DatastoreAdmin, + but are accessed via service google.longrunning.Operations. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ExportEntities = channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/ExportEntities", + request_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ExportEntitiesRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.ImportEntities = channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/ImportEntities", + request_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ImportEntitiesRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.GetIndex = channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/GetIndex", + request_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.GetIndexRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2.Index.FromString, + ) + self.ListIndexes = channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/ListIndexes", + request_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesResponse.FromString, + ) + + +class DatastoreAdminServicer(object): + """Google Cloud Datastore Admin API + + + The Datastore Admin API provides several admin services for Cloud Datastore. + + ----------------------------------------------------------------------------- + ## Concepts + + Project, namespace, kind, and entity as defined in the Google Cloud Datastore + API. + + Operation: An Operation represents work being performed in the background. + + EntityFilter: Allows specifying a subset of entities in a project. This is + specified as a combination of kinds and namespaces (either or both of which + may be all). + + ----------------------------------------------------------------------------- + ## Services + + # Export/Import + + The Export/Import service provides the ability to copy all or a subset of + entities to/from Google Cloud Storage. + + Exported data may be imported into Cloud Datastore for any Google Cloud + Platform project. It is not restricted to the export source project. It is + possible to export from one project and then import into another. + + Exported data can also be loaded into Google BigQuery for analysis. + + Exports and imports are performed asynchronously. An Operation resource is + created for each export/import. The state (including any errors encountered) + of the export/import may be queried via the Operation resource. + + # Index + + The index service manages Cloud Datastore composite indexes. + + Index creation and deletion are performed asynchronously. + An Operation resource is created for each such asynchronous operation. + The state of the operation (including any errors encountered) + may be queried via the Operation resource. + + # Operation + + The Operations collection provides a record of actions performed for the + specified project (including any operations in progress). Operations are not + created directly but through calls on other collections or resources. + + An operation that is not yet done may be cancelled. The request to cancel is + asynchronous and the operation may continue to run for some time after the + request to cancel is made. + + An operation that is done may be deleted so that it is no longer listed as + part of the Operation collection. + + ListOperations returns all pending operations, but not completed operations. + + Operations are created by service DatastoreAdmin, + but are accessed via service google.longrunning.Operations. + """ + + def ExportEntities(self, request, context): + """Exports a copy of all or a subset of entities from Google Cloud Datastore + to another storage system, such as Google Cloud Storage. Recent updates to + entities may not be reflected in the export. The export occurs in the + background and its progress can be monitored and managed via the + Operation resource that is created. The output of an export may only be + used once the associated operation is done. If an export operation is + cancelled before completion it may leave partial data behind in Google + Cloud Storage. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ImportEntities(self, request, context): + """Imports entities into Google Cloud Datastore. Existing entities with the + same key are overwritten. The import occurs in the background and its + progress can be monitored and managed via the Operation resource that is + created. If an ImportEntities operation is cancelled, it is possible + that a subset of the data has already been imported to Cloud Datastore. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetIndex(self, request, context): + """Gets an index. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListIndexes(self, request, context): + """Lists the indexes that match the specified filters. Datastore uses an + eventually consistent query to fetch the list of indexes and may + occasionally return stale results. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_DatastoreAdminServicer_to_server(servicer, server): + rpc_method_handlers = { + "ExportEntities": grpc.unary_unary_rpc_method_handler( + servicer.ExportEntities, + request_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ExportEntitiesRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "ImportEntities": grpc.unary_unary_rpc_method_handler( + servicer.ImportEntities, + request_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ImportEntitiesRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "GetIndex": grpc.unary_unary_rpc_method_handler( + servicer.GetIndex, + request_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.GetIndexRequest.FromString, + response_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2.Index.SerializeToString, + ), + "ListIndexes": grpc.unary_unary_rpc_method_handler( + servicer.ListIndexes, + request_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesRequest.FromString, + response_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.datastore.admin.v1.DatastoreAdmin", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) + + +# This class is part of an EXPERIMENTAL API. +class DatastoreAdmin(object): + """Google Cloud Datastore Admin API + + + The Datastore Admin API provides several admin services for Cloud Datastore. + + ----------------------------------------------------------------------------- + ## Concepts + + Project, namespace, kind, and entity as defined in the Google Cloud Datastore + API. + + Operation: An Operation represents work being performed in the background. + + EntityFilter: Allows specifying a subset of entities in a project. This is + specified as a combination of kinds and namespaces (either or both of which + may be all). + + ----------------------------------------------------------------------------- + ## Services + + # Export/Import + + The Export/Import service provides the ability to copy all or a subset of + entities to/from Google Cloud Storage. + + Exported data may be imported into Cloud Datastore for any Google Cloud + Platform project. It is not restricted to the export source project. It is + possible to export from one project and then import into another. + + Exported data can also be loaded into Google BigQuery for analysis. + + Exports and imports are performed asynchronously. An Operation resource is + created for each export/import. The state (including any errors encountered) + of the export/import may be queried via the Operation resource. + + # Index + + The index service manages Cloud Datastore composite indexes. + + Index creation and deletion are performed asynchronously. + An Operation resource is created for each such asynchronous operation. + The state of the operation (including any errors encountered) + may be queried via the Operation resource. + + # Operation + + The Operations collection provides a record of actions performed for the + specified project (including any operations in progress). Operations are not + created directly but through calls on other collections or resources. + + An operation that is not yet done may be cancelled. The request to cancel is + asynchronous and the operation may continue to run for some time after the + request to cancel is made. + + An operation that is done may be deleted so that it is no longer listed as + part of the Operation collection. + + ListOperations returns all pending operations, but not completed operations. + + Operations are created by service DatastoreAdmin, + but are accessed via service google.longrunning.Operations. + """ + + @staticmethod + def ExportEntities( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.admin.v1.DatastoreAdmin/ExportEntities", + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ExportEntitiesRequest.SerializeToString, + google_dot_longrunning_dot_operations__pb2.Operation.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def ImportEntities( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.admin.v1.DatastoreAdmin/ImportEntities", + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ImportEntitiesRequest.SerializeToString, + google_dot_longrunning_dot_operations__pb2.Operation.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def GetIndex( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.admin.v1.DatastoreAdmin/GetIndex", + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.GetIndexRequest.SerializeToString, + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2.Index.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def ListIndexes( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.admin.v1.DatastoreAdmin/ListIndexes", + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesRequest.SerializeToString, + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index.proto b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index.proto new file mode 100644 index 000000000000..96c2278b3b63 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index.proto @@ -0,0 +1,115 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.datastore.admin.v1; + +import "google/api/field_behavior.proto"; +import "google/api/annotations.proto"; + +option csharp_namespace = "Google.Cloud.Datastore.Admin.V1"; +option go_package = "google.golang.org/genproto/googleapis/datastore/admin/v1;admin"; +option java_multiple_files = true; +option java_outer_classname = "IndexProto"; +option java_package = "com.google.datastore.admin.v1"; +option ruby_package = "Google::Cloud::Datastore::Admin::V1"; + +// A minimal index definition. +message Index { + // A property of an index. + message IndexedProperty { + // Required. The property name to index. + string name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The indexed property's direction. Must not be DIRECTION_UNSPECIFIED. + Direction direction = 2 [(google.api.field_behavior) = REQUIRED]; + } + + // For an ordered index, specifies whether each of the entity's ancestors + // will be included. + enum AncestorMode { + // The ancestor mode is unspecified. + ANCESTOR_MODE_UNSPECIFIED = 0; + + // Do not include the entity's ancestors in the index. + NONE = 1; + + // Include all the entity's ancestors in the index. + ALL_ANCESTORS = 2; + } + + // The direction determines how a property is indexed. + enum Direction { + // The direction is unspecified. + DIRECTION_UNSPECIFIED = 0; + + // The property's values are indexed so as to support sequencing in + // ascending order and also query by <, >, <=, >=, and =. + ASCENDING = 1; + + // The property's values are indexed so as to support sequencing in + // descending order and also query by <, >, <=, >=, and =. + DESCENDING = 2; + } + + // The possible set of states of an index. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The index is being created, and cannot be used by queries. + // There is an active long-running operation for the index. + // The index is updated when writing an entity. + // Some index data may exist. + CREATING = 1; + + // The index is ready to be used. + // The index is updated when writing an entity. + // The index is fully populated from all stored entities it applies to. + READY = 2; + + // The index is being deleted, and cannot be used by queries. + // There is an active long-running operation for the index. + // The index is not updated when writing an entity. + // Some index data may exist. + DELETING = 3; + + // The index was being created or deleted, but something went wrong. + // The index cannot by used by queries. + // There is no active long-running operation for the index, + // and the most recently finished long-running operation failed. + // The index is not updated when writing an entity. + // Some index data may exist. + ERROR = 4; + } + + // Output only. Project ID. + string project_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The resource ID of the index. + string index_id = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Required. The entity kind to which this index applies. + string kind = 4 [(google.api.field_behavior) = REQUIRED]; + + // Required. The index's ancestor mode. Must not be ANCESTOR_MODE_UNSPECIFIED. + AncestorMode ancestor = 5 [(google.api.field_behavior) = REQUIRED]; + + // Required. An ordered sequence of property names and their index attributes. + repeated IndexedProperty properties = 6 [(google.api.field_behavior) = REQUIRED]; + + // Output only. The state of the index. + State state = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; +} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2.py new file mode 100644 index 000000000000..c1ccb034c511 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2.py @@ -0,0 +1,430 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/datastore_admin_v1/proto/index.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/datastore_admin_v1/proto/index.proto", + package="google.datastore.admin.v1", + syntax="proto3", + serialized_options=b"\n\035com.google.datastore.admin.v1B\nIndexProtoP\001Z>google.golang.org/genproto/googleapis/datastore/admin/v1;admin\252\002\037Google.Cloud.Datastore.Admin.V1\352\002#Google::Cloud::Datastore::Admin::V1", + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n1google/cloud/datastore_admin_v1/proto/index.proto\x12\x19google.datastore.admin.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1cgoogle/api/annotations.proto"\xe6\x04\n\x05Index\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x15\n\x08index_id\x18\x03 \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04kind\x18\x04 \x01(\tB\x03\xe0\x41\x02\x12\x44\n\x08\x61ncestor\x18\x05 \x01(\x0e\x32-.google.datastore.admin.v1.Index.AncestorModeB\x03\xe0\x41\x02\x12I\n\nproperties\x18\x06 \x03(\x0b\x32\x30.google.datastore.admin.v1.Index.IndexedPropertyB\x03\xe0\x41\x02\x12:\n\x05state\x18\x07 \x01(\x0e\x32&.google.datastore.admin.v1.Index.StateB\x03\xe0\x41\x03\x1ah\n\x0fIndexedProperty\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x42\n\tdirection\x18\x02 \x01(\x0e\x32*.google.datastore.admin.v1.Index.DirectionB\x03\xe0\x41\x02"J\n\x0c\x41ncestorMode\x12\x1d\n\x19\x41NCESTOR_MODE_UNSPECIFIED\x10\x00\x12\x08\n\x04NONE\x10\x01\x12\x11\n\rALL_ANCESTORS\x10\x02"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02"P\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\t\n\x05READY\x10\x02\x12\x0c\n\x08\x44\x45LETING\x10\x03\x12\t\n\x05\x45RROR\x10\x04\x42\xb5\x01\n\x1d\x63om.google.datastore.admin.v1B\nIndexProtoP\x01Z>google.golang.org/genproto/googleapis/datastore/admin/v1;admin\xaa\x02\x1fGoogle.Cloud.Datastore.Admin.V1\xea\x02#Google::Cloud::Datastore::Admin::V1b\x06proto3', + dependencies=[ + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + ], +) + + +_INDEX_ANCESTORMODE = _descriptor.EnumDescriptor( + name="AncestorMode", + full_name="google.datastore.admin.v1.Index.AncestorMode", + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name="ANCESTOR_MODE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="NONE", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="ALL_ANCESTORS", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=531, + serialized_end=605, +) +_sym_db.RegisterEnumDescriptor(_INDEX_ANCESTORMODE) + +_INDEX_DIRECTION = _descriptor.EnumDescriptor( + name="Direction", + full_name="google.datastore.admin.v1.Index.Direction", + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name="DIRECTION_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="ASCENDING", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="DESCENDING", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=607, + serialized_end=676, +) +_sym_db.RegisterEnumDescriptor(_INDEX_DIRECTION) + +_INDEX_STATE = _descriptor.EnumDescriptor( + name="State", + full_name="google.datastore.admin.v1.Index.State", + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name="STATE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="CREATING", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="READY", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="DELETING", + index=3, + number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="ERROR", + index=4, + number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=678, + serialized_end=758, +) +_sym_db.RegisterEnumDescriptor(_INDEX_STATE) + + +_INDEX_INDEXEDPROPERTY = _descriptor.Descriptor( + name="IndexedProperty", + full_name="google.datastore.admin.v1.Index.IndexedProperty", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.datastore.admin.v1.Index.IndexedProperty.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\002", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="direction", + full_name="google.datastore.admin.v1.Index.IndexedProperty.direction", + index=1, + number=2, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\002", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=425, + serialized_end=529, +) + +_INDEX = _descriptor.Descriptor( + name="Index", + full_name="google.datastore.admin.v1.Index", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.datastore.admin.v1.Index.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\003", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="index_id", + full_name="google.datastore.admin.v1.Index.index_id", + index=1, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\003", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="kind", + full_name="google.datastore.admin.v1.Index.kind", + index=2, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\002", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="ancestor", + full_name="google.datastore.admin.v1.Index.ancestor", + index=3, + number=5, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\002", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="properties", + full_name="google.datastore.admin.v1.Index.properties", + index=4, + number=6, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\002", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="state", + full_name="google.datastore.admin.v1.Index.state", + index=5, + number=7, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\003", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[_INDEX_INDEXEDPROPERTY,], + enum_types=[_INDEX_ANCESTORMODE, _INDEX_DIRECTION, _INDEX_STATE,], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=144, + serialized_end=758, +) + +_INDEX_INDEXEDPROPERTY.fields_by_name["direction"].enum_type = _INDEX_DIRECTION +_INDEX_INDEXEDPROPERTY.containing_type = _INDEX +_INDEX.fields_by_name["ancestor"].enum_type = _INDEX_ANCESTORMODE +_INDEX.fields_by_name["properties"].message_type = _INDEX_INDEXEDPROPERTY +_INDEX.fields_by_name["state"].enum_type = _INDEX_STATE +_INDEX_ANCESTORMODE.containing_type = _INDEX +_INDEX_DIRECTION.containing_type = _INDEX +_INDEX_STATE.containing_type = _INDEX +DESCRIPTOR.message_types_by_name["Index"] = _INDEX +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Index = _reflection.GeneratedProtocolMessageType( + "Index", + (_message.Message,), + { + "IndexedProperty": _reflection.GeneratedProtocolMessageType( + "IndexedProperty", + (_message.Message,), + { + "DESCRIPTOR": _INDEX_INDEXEDPROPERTY, + "__module__": "google.cloud.datastore_admin_v1.proto.index_pb2", + "__doc__": """A property of an index. + + Attributes: + name: + Required. The property name to index. + direction: + Required. The indexed property’s direction. Must not be + DIRECTION_UNSPECIFIED. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.Index.IndexedProperty) + }, + ), + "DESCRIPTOR": _INDEX, + "__module__": "google.cloud.datastore_admin_v1.proto.index_pb2", + "__doc__": """A minimal index definition. + + Attributes: + project_id: + Output only. Project ID. + index_id: + Output only. The resource ID of the index. + kind: + Required. The entity kind to which this index applies. + ancestor: + Required. The index’s ancestor mode. Must not be + ANCESTOR_MODE_UNSPECIFIED. + properties: + Required. An ordered sequence of property names and their + index attributes. + state: + Output only. The state of the index. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.Index) + }, +) +_sym_db.RegisterMessage(Index) +_sym_db.RegisterMessage(Index.IndexedProperty) + + +DESCRIPTOR._options = None +_INDEX_INDEXEDPROPERTY.fields_by_name["name"]._options = None +_INDEX_INDEXEDPROPERTY.fields_by_name["direction"]._options = None +_INDEX.fields_by_name["project_id"]._options = None +_INDEX.fields_by_name["index_id"]._options = None +_INDEX.fields_by_name["kind"]._options = None +_INDEX.fields_by_name["ancestor"]._options = None +_INDEX.fields_by_name["properties"]._options = None +_INDEX.fields_by_name["state"]._options = None +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py new file mode 100644 index 000000000000..8a9393943bdf --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py @@ -0,0 +1,3 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py index 12958c41a487..5f9b530fadb7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -39,7 +39,7 @@ _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-datastore" + "google-cloud-datastore", ).version @@ -167,12 +167,12 @@ def __init__( self.transport = transport else: self.transport = datastore_grpc_transport.DatastoreGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -183,7 +183,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -219,8 +219,8 @@ def lookup( >>> response = client.lookup(project_id, keys) Args: - project_id (str): The ID of the project against which to make the request. - keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): Keys of entities to look up. + project_id (str): Required. The ID of the project against which to make the request. + keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): Required. Keys of entities to look up. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.Key` @@ -259,7 +259,7 @@ def lookup( ) request = datastore_pb2.LookupRequest( - project_id=project_id, keys=keys, read_options=read_options + project_id=project_id, keys=keys, read_options=read_options, ) if metadata is None: metadata = [] @@ -281,7 +281,7 @@ def lookup( def run_query( self, project_id, - partition_id, + partition_id=None, read_options=None, query=None, gql_query=None, @@ -300,13 +300,10 @@ def run_query( >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> - >>> # TODO: Initialize `partition_id`: - >>> partition_id = {} - >>> - >>> response = client.run_query(project_id, partition_id) + >>> response = client.run_query(project_id) Args: - project_id (str): The ID of the project against which to make the request. + project_id (str): Required. The ID of the project against which to make the request. partition_id (Union[dict, ~google.cloud.datastore_v1.types.PartitionId]): Entities are partitioned into subsets, identified by a partition ID. Queries are scoped to a single partition. This partition ID is normalized with the standard default context @@ -358,7 +355,9 @@ def run_query( # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof(query=query, gql_query=gql_query) + google.api_core.protobuf_helpers.check_oneof( + query=query, gql_query=gql_query, + ) request = datastore_pb2.RunQueryRequest( project_id=project_id, @@ -384,6 +383,90 @@ def run_query( request, retry=retry, timeout=timeout, metadata=metadata ) + def reserve_ids( + self, + project_id, + keys, + database_id=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Prevents the supplied keys' IDs from being auto-allocated by Cloud + Datastore. + + Example: + >>> from google.cloud import datastore_v1 + >>> + >>> client = datastore_v1.DatastoreClient() + >>> + >>> # TODO: Initialize `project_id`: + >>> project_id = '' + >>> + >>> # TODO: Initialize `keys`: + >>> keys = [] + >>> + >>> response = client.reserve_ids(project_id, keys) + + Args: + project_id (str): Required. The ID of the project against which to make the request. + keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): Required. A list of keys with complete key paths whose numeric IDs should not be + auto-allocated. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.Key` + database_id (str): If not empty, the ID of the database against which to make the request. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.datastore_v1.types.ReserveIdsResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "reserve_ids" not in self._inner_api_calls: + self._inner_api_calls[ + "reserve_ids" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.reserve_ids, + default_retry=self._method_configs["ReserveIds"].retry, + default_timeout=self._method_configs["ReserveIds"].timeout, + client_info=self._client_info, + ) + + request = datastore_pb2.ReserveIdsRequest( + project_id=project_id, keys=keys, database_id=database_id, + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["reserve_ids"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + def begin_transaction( self, project_id, @@ -406,7 +489,7 @@ def begin_transaction( >>> response = client.begin_transaction(project_id) Args: - project_id (str): The ID of the project against which to make the request. + project_id (str): Required. The ID of the project against which to make the request. transaction_options (Union[dict, ~google.cloud.datastore_v1.types.TransactionOptions]): Options for a new transaction. If a dict is provided, it must be of the same form as the protobuf @@ -442,7 +525,7 @@ def begin_transaction( ) request = datastore_pb2.BeginTransactionRequest( - project_id=project_id, transaction_options=transaction_options + project_id=project_id, transaction_options=transaction_options, ) if metadata is None: metadata = [] @@ -464,8 +547,8 @@ def begin_transaction( def commit( self, project_id, - mode, - mutations, + mode=None, + mutations=None, transaction=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, @@ -477,24 +560,20 @@ def commit( Example: >>> from google.cloud import datastore_v1 - >>> from google.cloud.datastore_v1 import enums >>> >>> client = datastore_v1.DatastoreClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> - >>> # TODO: Initialize `mode`: - >>> mode = enums.CommitRequest.Mode.MODE_UNSPECIFIED - >>> - >>> # TODO: Initialize `mutations`: - >>> mutations = [] - >>> - >>> response = client.commit(project_id, mode, mutations) + >>> response = client.commit(project_id) Args: - project_id (str): The ID of the project against which to make the request. + project_id (str): Required. The ID of the project against which to make the request. mode (~google.cloud.datastore_v1.types.Mode): The type of commit to perform. Defaults to ``TRANSACTIONAL``. + transaction (bytes): The identifier of the transaction associated with the commit. A + transaction identifier is returned by a call to + ``Datastore.BeginTransaction``. mutations (list[Union[dict, ~google.cloud.datastore_v1.types.Mutation]]): The mutations to perform. When mode is ``TRANSACTIONAL``, mutations affecting a single entity are @@ -511,9 +590,6 @@ def commit( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.Mutation` - transaction (bytes): The identifier of the transaction associated with the commit. A - transaction identifier is returned by a call to - ``Datastore.BeginTransaction``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -546,13 +622,13 @@ def commit( # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof(transaction=transaction) + google.api_core.protobuf_helpers.check_oneof(transaction=transaction,) request = datastore_pb2.CommitRequest( project_id=project_id, mode=mode, - mutations=mutations, transaction=transaction, + mutations=mutations, ) if metadata is None: metadata = [] @@ -596,8 +672,8 @@ def rollback( >>> response = client.rollback(project_id, transaction) Args: - project_id (str): The ID of the project against which to make the request. - transaction (bytes): The transaction identifier, returned by a call to + project_id (str): Required. The ID of the project against which to make the request. + transaction (bytes): Required. The transaction identifier, returned by a call to ``Datastore.BeginTransaction``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -630,7 +706,7 @@ def rollback( ) request = datastore_pb2.RollbackRequest( - project_id=project_id, transaction=transaction + project_id=project_id, transaction=transaction, ) if metadata is None: metadata = [] @@ -675,8 +751,8 @@ def allocate_ids( >>> response = client.allocate_ids(project_id, keys) Args: - project_id (str): The ID of the project against which to make the request. - keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): A list of keys with incomplete key paths for which to allocate IDs. + project_id (str): Required. The ID of the project against which to make the request. + keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): Required. A list of keys with incomplete key paths for which to allocate IDs. No key may be reserved/read-only. If a dict is provided, it must be of the same form as the protobuf @@ -711,7 +787,7 @@ def allocate_ids( client_info=self._client_info, ) - request = datastore_pb2.AllocateIdsRequest(project_id=project_id, keys=keys) + request = datastore_pb2.AllocateIdsRequest(project_id=project_id, keys=keys,) if metadata is None: metadata = [] metadata = list(metadata) @@ -728,87 +804,3 @@ def allocate_ids( return self._inner_api_calls["allocate_ids"]( request, retry=retry, timeout=timeout, metadata=metadata ) - - def reserve_ids( - self, - project_id, - keys, - database_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Prevents the supplied keys' IDs from being auto-allocated by Cloud - Datastore. - - Example: - >>> from google.cloud import datastore_v1 - >>> - >>> client = datastore_v1.DatastoreClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `keys`: - >>> keys = [] - >>> - >>> response = client.reserve_ids(project_id, keys) - - Args: - project_id (str): The ID of the project against which to make the request. - keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): A list of keys with complete key paths whose numeric IDs should not be - auto-allocated. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datastore_v1.types.Key` - database_id (str): If not empty, the ID of the database against which to make the request. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datastore_v1.types.ReserveIdsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "reserve_ids" not in self._inner_api_calls: - self._inner_api_calls[ - "reserve_ids" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.reserve_ids, - default_retry=self._method_configs["ReserveIds"].retry, - default_timeout=self._method_configs["ReserveIds"].timeout, - client_info=self._client_info, - ) - - request = datastore_pb2.ReserveIdsRequest( - project_id=project_id, keys=keys, database_id=database_id - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project_id", project_id)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["reserve_ids"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py index 95822b8babfc..5346b3ce84a5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py @@ -27,6 +27,11 @@ "retry_codes_name": "idempotent", "retry_params_name": "default", }, + "ReserveIds": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, "BeginTransaction": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", @@ -47,11 +52,6 @@ "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, - "ReserveIds": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, }, } } diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py index b56d0fd39269..f84538a3c665 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -21,8 +21,8 @@ class NullValue(enum.IntEnum): """ - ``NullValue`` is a singleton enumeration to represent the null value for - the ``Value`` type union. + ``NullValue`` is a singleton enumeration to represent the null value + for the ``Value`` type union. The JSON representation for ``NullValue`` is JSON ``null``. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py index c7c640c472e1..74552d8a32d6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -56,7 +56,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -74,7 +74,9 @@ def __init__( # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. - self._stubs = {"datastore_stub": datastore_pb2_grpc.DatastoreStub(channel)} + self._stubs = { + "datastore_stub": datastore_pb2_grpc.DatastoreStub(channel), + } @classmethod def create_channel( @@ -134,6 +136,20 @@ def run_query(self): """ return self._stubs["datastore_stub"].RunQuery + @property + def reserve_ids(self): + """Return the gRPC stub for :meth:`DatastoreClient.reserve_ids`. + + Prevents the supplied keys' IDs from being auto-allocated by Cloud + Datastore. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["datastore_stub"].ReserveIds + @property def begin_transaction(self): """Return the gRPC stub for :meth:`DatastoreClient.begin_transaction`. @@ -187,17 +203,3 @@ def allocate_ids(self): deserialized response object. """ return self._stubs["datastore_stub"].AllocateIds - - @property - def reserve_ids(self): - """Return the gRPC stub for :meth:`DatastoreClient.reserve_ids`. - - Prevents the supplied keys' IDs from being auto-allocated by Cloud - Datastore. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["datastore_stub"].ReserveIds diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore.proto b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore.proto index 51d69acb9db8..ad016194ab2d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore.proto +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google Inc. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,6 +17,8 @@ syntax = "proto3"; package google.datastore.v1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/datastore/v1/entity.proto"; import "google/datastore/v1/query.proto"; @@ -26,6 +28,7 @@ option java_multiple_files = true; option java_outer_classname = "DatastoreProto"; option java_package = "com.google.datastore.v1"; option php_namespace = "Google\\Cloud\\Datastore\\V1"; +option ruby_package = "Google::Cloud::Datastore::V1"; // Each RPC normalizes the partition IDs of the keys in its input entities, // and always returns entities with keys with normalized partition IDs. @@ -35,12 +38,18 @@ option php_namespace = "Google\\Cloud\\Datastore\\V1"; // the request. // service Datastore { + option (google.api.default_host) = "datastore.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/datastore"; + // Looks up entities by key. rpc Lookup(LookupRequest) returns (LookupResponse) { option (google.api.http) = { post: "/v1/projects/{project_id}:lookup" body: "*" }; + option (google.api.method_signature) = "project_id,read_options,keys"; } // Queries for entities. @@ -52,12 +61,12 @@ service Datastore { } // Begins a new transaction. - rpc BeginTransaction(BeginTransactionRequest) - returns (BeginTransactionResponse) { + rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) { option (google.api.http) = { post: "/v1/projects/{project_id}:beginTransaction" body: "*" }; + option (google.api.method_signature) = "project_id"; } // Commits a transaction, optionally creating, deleting or modifying some @@ -67,6 +76,8 @@ service Datastore { post: "/v1/projects/{project_id}:commit" body: "*" }; + option (google.api.method_signature) = "project_id,mode,transaction,mutations"; + option (google.api.method_signature) = "project_id,mode,mutations"; } // Rolls back a transaction. @@ -75,6 +86,7 @@ service Datastore { post: "/v1/projects/{project_id}:rollback" body: "*" }; + option (google.api.method_signature) = "project_id,transaction"; } // Allocates IDs for the given keys, which is useful for referencing an entity @@ -84,6 +96,7 @@ service Datastore { post: "/v1/projects/{project_id}:allocateIds" body: "*" }; + option (google.api.method_signature) = "project_id,keys"; } // Prevents the supplied keys' IDs from being auto-allocated by Cloud @@ -93,19 +106,20 @@ service Datastore { post: "/v1/projects/{project_id}:reserveIds" body: "*" }; + option (google.api.method_signature) = "project_id,keys"; } } // The request for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. message LookupRequest { - // The ID of the project against which to make the request. - string project_id = 8; + // Required. The ID of the project against which to make the request. + string project_id = 8 [(google.api.field_behavior) = REQUIRED]; // The options for this lookup request. ReadOptions read_options = 1; - // Keys of entities to look up. - repeated Key keys = 3; + // Required. Keys of entities to look up. + repeated Key keys = 3 [(google.api.field_behavior) = REQUIRED]; } // The response for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. @@ -128,8 +142,8 @@ message LookupResponse { // The request for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. message RunQueryRequest { - // The ID of the project against which to make the request. - string project_id = 8; + // Required. The ID of the project against which to make the request. + string project_id = 8 [(google.api.field_behavior) = REQUIRED]; // Entities are partitioned into subsets, identified by a partition ID. // Queries are scoped to a single partition. @@ -150,8 +164,7 @@ message RunQueryRequest { } } -// The response for -// [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. +// The response for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. message RunQueryResponse { // A batch of query results (always present). QueryResultBatch batch = 1; @@ -160,18 +173,16 @@ message RunQueryResponse { Query query = 2; } -// The request for -// [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. +// The request for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. message BeginTransactionRequest { - // The ID of the project against which to make the request. - string project_id = 8; + // Required. The ID of the project against which to make the request. + string project_id = 8 [(google.api.field_behavior) = REQUIRED]; // Options for a new transaction. TransactionOptions transaction_options = 10; } -// The response for -// [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. +// The response for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. message BeginTransactionResponse { // The transaction identifier (always present). bytes transaction = 1; @@ -179,18 +190,19 @@ message BeginTransactionResponse { // The request for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. message RollbackRequest { - // The ID of the project against which to make the request. - string project_id = 8; + // Required. The ID of the project against which to make the request. + string project_id = 8 [(google.api.field_behavior) = REQUIRED]; - // The transaction identifier, returned by a call to + // Required. The transaction identifier, returned by a call to // [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - bytes transaction = 1; + bytes transaction = 1 [(google.api.field_behavior) = REQUIRED]; } -// The response for -// [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. (an empty -// message). -message RollbackResponse {} +// The response for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. +// (an empty message). +message RollbackResponse { + +} // The request for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. message CommitRequest { @@ -208,8 +220,8 @@ message CommitRequest { NON_TRANSACTIONAL = 2; } - // The ID of the project against which to make the request. - string project_id = 8; + // Required. The ID of the project against which to make the request. + string project_id = 8 [(google.api.field_behavior) = REQUIRED]; // The type of commit to perform. Defaults to `TRANSACTIONAL`. Mode mode = 5; @@ -249,42 +261,40 @@ message CommitResponse { int32 index_updates = 4; } -// The request for -// [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. +// The request for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. message AllocateIdsRequest { - // The ID of the project against which to make the request. - string project_id = 8; + // Required. The ID of the project against which to make the request. + string project_id = 8 [(google.api.field_behavior) = REQUIRED]; - // A list of keys with incomplete key paths for which to allocate IDs. + // Required. A list of keys with incomplete key paths for which to allocate IDs. // No key may be reserved/read-only. - repeated Key keys = 1; + repeated Key keys = 1 [(google.api.field_behavior) = REQUIRED]; } -// The response for -// [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. +// The response for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. message AllocateIdsResponse { // The keys specified in the request (in the same order), each with // its key path completed with a newly allocated ID. repeated Key keys = 1; } -// The request for -// [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. +// The request for [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. message ReserveIdsRequest { - // The ID of the project against which to make the request. - string project_id = 8; + // Required. The ID of the project against which to make the request. + string project_id = 8 [(google.api.field_behavior) = REQUIRED]; // If not empty, the ID of the database against which to make the request. string database_id = 9; - // A list of keys with complete key paths whose numeric IDs should not be + // Required. A list of keys with complete key paths whose numeric IDs should not be // auto-allocated. - repeated Key keys = 1; + repeated Key keys = 1 [(google.api.field_behavior) = REQUIRED]; } -// The response for -// [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. -message ReserveIdsResponse {} +// The response for [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. +message ReserveIdsResponse { + +} // A mutation to apply to an entity. message Mutation { @@ -374,10 +384,8 @@ message ReadOptions { // Options for beginning a new transaction. // // Transactions can be created explicitly with calls to -// [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction] -// or implicitly by setting -// [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] -// in read requests. +// [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction] or implicitly by setting +// [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] in read requests. message TransactionOptions { // Options specific to read / write transactions. message ReadWrite { @@ -386,7 +394,9 @@ message TransactionOptions { } // Options specific to read-only transactions. - message ReadOnly {} + message ReadOnly { + + } // The `mode` of the transaction, indicating whether write operations are // supported. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py index c62dea6375e0..cf7a3cfd1168 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py @@ -1,10 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_v1/proto/datastore.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +"""Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection @@ -16,6 +13,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.datastore_v1.proto import ( entity_pb2 as google_dot_cloud_dot_datastore__v1_dot_proto_dot_entity__pb2, ) @@ -28,14 +27,13 @@ name="google/cloud/datastore_v1/proto/datastore.proto", package="google.datastore.v1", syntax="proto3", - serialized_options=_b( - "\n\027com.google.datastore.v1B\016DatastoreProtoP\001Z\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32#.google.datastore.v1.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type"\xa9\x01\n\x0f\x43ompositeFilter\x12\x39\n\x02op\x18\x01 \x01(\x0e\x32-.google.datastore.v1.CompositeFilter.Operator\x12,\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x1b.google.datastore.v1.Filter"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01"\xc7\x02\n\x0ePropertyFilter\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x38\n\x02op\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyFilter.Operator\x12)\n\x05value\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.Value"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b"\xa5\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12H\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x30.google.datastore.v1.GqlQuery.NamedBindingsEntry\x12\x43\n\x13positional_bindings\x18\x04 \x03(\x0b\x32&.google.datastore.v1.GqlQueryParameter\x1a\\\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.datastore.v1.GqlQueryParameter:\x02\x38\x01"d\n\x11GqlQueryParameter\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type"\xde\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12H\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32,.google.datastore.v1.EntityResult.ResultType\x12\x39\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12K\n\x0cmore_results\x18\x05 \x01(\x0e\x32\x35.google.datastore.v1.QueryResultBatch.MoreResultsType\x12\x18\n\x10snapshot_version\x18\x07 \x01(\x03"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42\x9d\x01\n\x17\x63om.google.datastore.v1B\nQueryProtoP\x01Z\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32#.google.datastore.v1.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type"\xa9\x01\n\x0f\x43ompositeFilter\x12\x39\n\x02op\x18\x01 \x01(\x0e\x32-.google.datastore.v1.CompositeFilter.Operator\x12,\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x1b.google.datastore.v1.Filter"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01"\xc7\x02\n\x0ePropertyFilter\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x38\n\x02op\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyFilter.Operator\x12)\n\x05value\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.Value"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b"\xa5\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12H\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x30.google.datastore.v1.GqlQuery.NamedBindingsEntry\x12\x43\n\x13positional_bindings\x18\x04 \x03(\x0b\x32&.google.datastore.v1.GqlQueryParameter\x1a\\\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.datastore.v1.GqlQueryParameter:\x02\x38\x01"d\n\x11GqlQueryParameter\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type"\xde\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12H\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32,.google.datastore.v1.EntityResult.ResultType\x12\x39\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12K\n\x0cmore_results\x18\x05 \x01(\x0e\x32\x35.google.datastore.v1.QueryResultBatch.MoreResultsType\x12\x18\n\x10snapshot_version\x18\x07 \x01(\x03"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42\xbc\x01\n\x17\x63om.google.datastore.v1B\nQueryProtoP\x01Z= 0 if specified. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.Query) - ), + }, ) _sym_db.RegisterMessage(Query) KindExpression = _reflection.GeneratedProtocolMessageType( "KindExpression", (_message.Message,), - dict( - DESCRIPTOR=_KINDEXPRESSION, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""A representation of a kind. - + { + "DESCRIPTOR": _KINDEXPRESSION, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """A representation of a kind. Attributes: name: The name of the kind. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.KindExpression) - ), + }, ) _sym_db.RegisterMessage(KindExpression) PropertyReference = _reflection.GeneratedProtocolMessageType( "PropertyReference", (_message.Message,), - dict( - DESCRIPTOR=_PROPERTYREFERENCE, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""A reference to a property relative to the kind expressions. - + { + "DESCRIPTOR": _PROPERTYREFERENCE, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """A reference to a property relative to the kind expressions. Attributes: name: - The name of the property. If name includes "."s, it may be + The name of the property. If name includes “.”s, it may be interpreted as a property name path. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyReference) - ), + }, ) _sym_db.RegisterMessage(PropertyReference) Projection = _reflection.GeneratedProtocolMessageType( "Projection", (_message.Message,), - dict( - DESCRIPTOR=_PROJECTION, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""A representation of a property in a projection. - + { + "DESCRIPTOR": _PROJECTION, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """A representation of a property in a projection. Attributes: property: The property to project. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.Projection) - ), + }, ) _sym_db.RegisterMessage(Projection) PropertyOrder = _reflection.GeneratedProtocolMessageType( "PropertyOrder", (_message.Message,), - dict( - DESCRIPTOR=_PROPERTYORDER, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""The desired order for a specific property. - + { + "DESCRIPTOR": _PROPERTYORDER, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """The desired order for a specific property. Attributes: property: @@ -1437,18 +1549,17 @@ The direction to order by. Defaults to ``ASCENDING``. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyOrder) - ), + }, ) _sym_db.RegisterMessage(PropertyOrder) Filter = _reflection.GeneratedProtocolMessageType( "Filter", (_message.Message,), - dict( - DESCRIPTOR=_FILTER, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""A holder for any type of filter. - + { + "DESCRIPTOR": _FILTER, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """A holder for any type of filter. Attributes: filter_type: @@ -1459,18 +1570,17 @@ A filter on a property. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.Filter) - ), + }, ) _sym_db.RegisterMessage(Filter) CompositeFilter = _reflection.GeneratedProtocolMessageType( "CompositeFilter", (_message.Message,), - dict( - DESCRIPTOR=_COMPOSITEFILTER, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""A filter that merges multiple other filters using the given operator. - + { + "DESCRIPTOR": _COMPOSITEFILTER, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """A filter that merges multiple other filters using the given operator. Attributes: op: @@ -1480,18 +1590,17 @@ filter. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.CompositeFilter) - ), + }, ) _sym_db.RegisterMessage(CompositeFilter) PropertyFilter = _reflection.GeneratedProtocolMessageType( "PropertyFilter", (_message.Message,), - dict( - DESCRIPTOR=_PROPERTYFILTER, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""A filter on a specific property. - + { + "DESCRIPTOR": _PROPERTYFILTER, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """A filter on a specific property. Attributes: property: @@ -1502,28 +1611,27 @@ The value to compare the property to. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyFilter) - ), + }, ) _sym_db.RegisterMessage(PropertyFilter) GqlQuery = _reflection.GeneratedProtocolMessageType( "GqlQuery", (_message.Message,), - dict( - NamedBindingsEntry=_reflection.GeneratedProtocolMessageType( + { + "NamedBindingsEntry": _reflection.GeneratedProtocolMessageType( "NamedBindingsEntry", (_message.Message,), - dict( - DESCRIPTOR=_GQLQUERY_NAMEDBINDINGSENTRY, - __module__="google.cloud.datastore_v1.proto.query_pb2" + { + "DESCRIPTOR": _GQLQUERY_NAMEDBINDINGSENTRY, + "__module__": "google.cloud.datastore_v1.proto.query_pb2" # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery.NamedBindingsEntry) - ), + }, ), - DESCRIPTOR=_GQLQUERY, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""A `GQL - query `__. - + "DESCRIPTOR": _GQLQUERY, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """A `GQL query + `__. Attributes: query_string: @@ -1548,7 +1656,7 @@ The inverse must also be true. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery) - ), + }, ) _sym_db.RegisterMessage(GqlQuery) _sym_db.RegisterMessage(GqlQuery.NamedBindingsEntry) @@ -1556,11 +1664,10 @@ GqlQueryParameter = _reflection.GeneratedProtocolMessageType( "GqlQueryParameter", (_message.Message,), - dict( - DESCRIPTOR=_GQLQUERYPARAMETER, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""A binding parameter for a GQL query. - + { + "DESCRIPTOR": _GQLQUERYPARAMETER, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """A binding parameter for a GQL query. Attributes: parameter_type: @@ -1572,18 +1679,17 @@ batches. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQueryParameter) - ), + }, ) _sym_db.RegisterMessage(GqlQueryParameter) QueryResultBatch = _reflection.GeneratedProtocolMessageType( "QueryResultBatch", (_message.Message,), - dict( - DESCRIPTOR=_QUERYRESULTBATCH, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""A batch of results produced by a query. - + { + "DESCRIPTOR": _QUERYRESULTBATCH, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """A batch of results produced by a query. Attributes: skipped_results: @@ -1602,17 +1708,17 @@ The state of the query after the current batch. snapshot_version: The version number of the snapshot this batch was returned - from. This applies to the range of results from the query's + from. This applies to the range of results from the query’s ``start_cursor`` (or the beginning of the query if no cursor - was given) to this batch's ``end_cursor`` (not the query's + was given) to this batch’s ``end_cursor`` (not the query’s ``end_cursor``). In a single transaction, subsequent query result batches for the same query can have a greater snapshot - version number. Each batch's snapshot version is valid for all + version number. Each batch’s snapshot version is valid for all preceding batches. The value will be zero for eventually consistent queries. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.QueryResultBatch) - ), + }, ) _sym_db.RegisterMessage(QueryResultBatch) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2_grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2_grpc.py index 07cb78fe03a9..8a9393943bdf 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2_grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2_grpc.py @@ -1,2 +1,3 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" import grpc diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 1e43b20eb9e2..187124ab1294 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -23,14 +23,15 @@ import nox -BLACK_VERSION = "black==19.3b0" +BLACK_VERSION = "black==19.10b0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] -if os.path.exists("samples"): - BLACK_PATHS.append("samples") +DEFAULT_PYTHON_VERSION = "3.8" +SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"] -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): """Run linters. @@ -38,7 +39,9 @@ def lint(session): serious code quality issues. """ session.install("flake8", BLACK_VERSION) - session.run("black", "--check", *BLACK_PATHS) + session.run( + "black", "--check", *BLACK_PATHS, + ) session.run("flake8", "google", "tests") @@ -53,10 +56,12 @@ def blacken(session): check the state of the `gcp_ubuntu_config` we use for that Kokoro run. """ session.install(BLACK_VERSION) - session.run("black", *BLACK_PATHS) + session.run( + "black", *BLACK_PATHS, + ) -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.install("docutils", "pygments") @@ -84,13 +89,13 @@ def default(session): ) -@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"]) +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) def unit(session): """Run the unit test suite.""" default(session) -@nox.session(python=["2.7", "3.7"]) +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") @@ -110,7 +115,9 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install("mock", "pytest", "google-cloud-testutils") + session.install( + "mock", "pytest", "google-cloud-testutils", + ) session.install("-e", ".") # Run py.test against the system tests. @@ -120,7 +127,7 @@ def system(session): session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. @@ -133,7 +140,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def docs(session): """Build the docs for this library.""" diff --git a/packages/google-cloud-datastore/scripts/decrypt-secrets.sh b/packages/google-cloud-datastore/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..ff599eb2af25 --- /dev/null +++ b/packages/google-cloud-datastore/scripts/decrypt-secrets.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + > testing/client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-datastore/scripts/readme-gen/readme_gen.py b/packages/google-cloud-datastore/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..d309d6e97518 --- /dev/null +++ b/packages/google-cloud-datastore/scripts/readme-gen/readme_gen.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + +README_TMPL = jinja_env.get_template('README.tmpl.rst') + + +def get_help(file): + return subprocess.check_output(['python', file, '--help']).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('source') + parser.add_argument('--destination', default='README.rst') + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals['get_help'] = get_help + + with io.open(source, 'r') as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, 'w') as f: + f.write(output) + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-datastore/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-datastore/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-datastore/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-datastore/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-datastore/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-datastore/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-datastore/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-datastore/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-datastore/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-datastore/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-datastore/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..a0406dba8c84 --- /dev/null +++ b/packages/google-cloud-datastore/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-datastore/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-datastore/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-datastore/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 58b5cedc04b6..865b99bc7c34 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -1,32 +1,25 @@ { "sources": [ - { - "generator": { - "name": "artman", - "version": "2.0.0", - "dockerImage": "googleapis/artman@sha256:b3b47805231a305d0f40c4bf069df20f6a2635574e6d4259fac651d3f9f6e098" - } - }, { "git": { "name": ".", "remote": "git@github.com:googleapis/python-datastore", - "sha": "f9c0937f24f3a9874db6a8710e260c6ce2907069" + "sha": "f822b98873c829d4ae01d3de1b0d58e0076948fd" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "756b174de4a122461993c1c583345533d819936d", - "internalRef": "308824110" + "sha": "5202cfe3e5c2907a1a21a4c6d4bd0812029b6aa3", + "internalRef": "319247865" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "01b6f23d24b27878b48667ce597876d66b59780e" + "sha": "303271797a360f8a439203413f13a160f2f5b3b4" } } ], @@ -37,8 +30,16 @@ "apiName": "datastore", "apiVersion": "v1", "language": "python", - "generator": "gapic", - "config": "google/datastore/artman_datastore.yaml" + "generator": "bazel" + } + }, + { + "client": { + "source": "googleapis", + "apiName": "datastore_admin", + "apiVersion": "v1", + "language": "python", + "generator": "bazel" } } ] diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index 49e9f69495a4..36b5150af9bc 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -45,10 +45,39 @@ s.move(library / "google/cloud/datastore_admin_v1/proto") s.move(library / "google/cloud/datastore_admin_v1/gapic") +# TODO(busunkim): Remove during the microgenerator transition. +# This re-orders the parameters to avoid breaking existing code. +num = s.replace( +"google/**/datastore_client.py", +"""def commit\( +\s+self, +\s+project_id, +\s+mode=None, +\s+transaction=None, +\s+mutations=None, +\s+retry=google\.api_core\.gapic_v1\.method\.DEFAULT, +\s+timeout=google\.api_core\.gapic_v1\.method\.DEFAULT, +\s+metadata=None\):""", +"""def commit( + self, + project_id, + mode=None, + mutations=None, + transaction=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ):""" +) + +if num != 1: + raise Exception("Required replacement not made.") # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library(unit_cov_level=97, cov_level=99) -s.move(templated_files, excludes=["docs/conf.py"]) +s.move(templated_files, excludes=["docs/conf.py", "docs/multiprocessing.rst"]) + +s.replace("noxfile.py", """["']sphinx['"]""", '''"sphinx<3.0.0"''') s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/packages/google-cloud-datastore/testing/.gitignore b/packages/google-cloud-datastore/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-datastore/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 5a7448fc8894..6a30089c228f 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -572,7 +572,7 @@ def test_get_multi_hit(self): client._datastore_api_internal = ds_api key = Key(kind, id_, project=self.PROJECT) - result, = client.get_multi([key]) + (result,) = client.get_multi([key]) new_key = result.key # Check the returned value is as expected. @@ -609,7 +609,7 @@ def test_get_multi_hit_w_transaction(self): key = Key(kind, id_, project=self.PROJECT) txn = client.transaction() txn._id = txn_id - result, = client.get_multi([key], transaction=txn) + (result,) = client.get_multi([key], transaction=txn) new_key = result.key # Check the returned value is as expected. diff --git a/packages/google-cloud-datastore/tests/unit/test_key.py b/packages/google-cloud-datastore/tests/unit/test_key.py index a157d50b576a..0478e2cb8a85 100644 --- a/packages/google-cloud-datastore/tests/unit/test_key.py +++ b/packages/google-cloud-datastore/tests/unit/test_key.py @@ -358,7 +358,7 @@ def test_to_protobuf_defaults(self): self.assertEqual(pb.partition_id.namespace_id, "") # Check the element PB matches the partial key and kind. - elem, = list(pb.path) + (elem,) = list(pb.path) self.assertEqual(elem.kind, _KIND) # Unset values are False-y. self.assertEqual(elem.name, "") From 4551cc0711e50ff97350377f8d80ba9df9f6d3e1 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 9 Jul 2020 13:55:14 -0700 Subject: [PATCH 267/611] chore: release 1.13.0 (#29) * updated CHANGELOG.md [ci skip] * updated setup.cfg [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 9 +++++++++ packages/google-cloud-datastore/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 83593e298d1a..2a3f1faaf5b9 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,15 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [1.13.0](https://www.github.com/googleapis/python-datastore/compare/v1.12.0...v1.13.0) (2020-07-01) + + +### Features + +* add datastore admin client ([#39](https://www.github.com/googleapis/python-datastore/issues/39)) ([1963fd8](https://www.github.com/googleapis/python-datastore/commit/1963fd84c012cc7985e44ed0fc03c15a6429833b)) +* add synth config to generate datastore_admin_v1 ([#27](https://www.github.com/googleapis/python-datastore/issues/27)) ([83c636e](https://www.github.com/googleapis/python-datastore/commit/83c636efc6e5bd02bd8dc614e4114f9477c74972)) +* Create CODEOWNERS ([#28](https://www.github.com/googleapis/python-datastore/issues/28)) ([0198419](https://www.github.com/googleapis/python-datastore/commit/0198419a759d4d3932fa92c268772f18aa29e2ca)) + ## [1.12.0](https://www.github.com/googleapis/python-datastore/compare/v1.11.0...v1.12.0) (2020-04-07) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index a20a1dd63005..7f0131cd2c80 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-datastore" description = "Google Cloud Datastore API client library" -version = "1.12.0" +version = "1.13.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 583f0efa34382ded95744136c074a3695abf5af5 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Mon, 13 Jul 2020 16:55:47 -0700 Subject: [PATCH 268/611] fix: add missing datastore admin client files (#43) * fix: add missing datastore admin client files * fix: remove commented line --- .../cloud/datastore_admin_v1/__init__.py | 45 +++++++++++++++ .../proto/datastore_admin_pb2.py | 2 +- .../datastore_admin_v1/proto/index_pb2.py | 2 +- .../google/cloud/datastore_admin_v1/types.py | 56 +++++++++++++++++++ .../google-cloud-datastore/synth.metadata | 9 ++- packages/google-cloud-datastore/synth.py | 6 +- 6 files changed, 112 insertions(+), 8 deletions(-) create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types.py diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py new file mode 100644 index 000000000000..a588c3e0fc48 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import +import sys +import warnings + +from google.cloud.datastore_admin_v1 import types +from google.cloud.datastore_admin_v1.gapic import datastore_admin_client +from google.cloud.datastore_admin_v1.gapic import enums + + +if sys.version_info[:2] == (2, 7): + message = ( + "A future version of this library will drop support for Python 2.7. " + "More details about Python 2 support for Google Cloud Client Libraries " + "can be found at https://cloud.google.com/python/docs/python2-sunset/" + ) + warnings.warn(message, DeprecationWarning) + + +class DatastoreAdminClient(datastore_admin_client.DatastoreAdminClient): + __doc__ = datastore_admin_client.DatastoreAdminClient.__doc__ + enums = enums + + +__all__ = ( + "enums", + "types", + "DatastoreAdminClient", +) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py index f16463bb0475..f7c8d82e40b8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_admin_v1/proto/datastore_admin.proto -"""Generated protocol buffer code.""" + from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2.py index c1ccb034c511..41f17a3fc811 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_admin_v1/proto/index.proto -"""Generated protocol buffer code.""" + from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types.py new file mode 100644 index 000000000000..17ae2d27e8cb --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import +import sys + +from google.api_core.protobuf_helpers import get_messages + +from google.cloud.datastore_admin_v1.proto import datastore_admin_pb2 +from google.cloud.datastore_admin_v1.proto import index_pb2 +from google.longrunning import operations_pb2 +from google.protobuf import any_pb2 +from google.protobuf import timestamp_pb2 +from google.rpc import status_pb2 + + +_shared_modules = [ + operations_pb2, + any_pb2, + timestamp_pb2, + status_pb2, +] + +_local_modules = [ + datastore_admin_pb2, + index_pb2, +] + +names = [] + +for module in _shared_modules: # pragma: NO COVER + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) +for module in _local_modules: + for name, message in get_messages(module).items(): + message.__module__ = "google.cloud.datastore_admin_v1.types" + setattr(sys.modules[__name__], name, message) + names.append(name) + + +__all__ = tuple(sorted(names)) diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 865b99bc7c34..133453f2cb5c 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -3,23 +3,22 @@ { "git": { "name": ".", - "remote": "git@github.com:googleapis/python-datastore", - "sha": "f822b98873c829d4ae01d3de1b0d58e0076948fd" + "sha": "0a5e07c0394709a4a4e79d937a281bec824415c3" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "5202cfe3e5c2907a1a21a4c6d4bd0812029b6aa3", - "internalRef": "319247865" + "sha": "59f97e6044a1275f83427ab7962a154c00d915b5", + "internalRef": "321058159" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "303271797a360f8a439203413f13a160f2f5b3b4" + "sha": "d82deccf657a66e31bd5da9efdb96c6fa322fc7e" } } ], diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index 36b5150af9bc..792c159f0897 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -42,8 +42,12 @@ include_protos=True, ) +s.move( + library / "datastore-admin-v1-py/google/cloud/datastore_admin_v1", + "google/cloud/datastore_admin_v1" +) + s.move(library / "google/cloud/datastore_admin_v1/proto") -s.move(library / "google/cloud/datastore_admin_v1/gapic") # TODO(busunkim): Remove during the microgenerator transition. # This re-orders the parameters to avoid breaking existing code. From 41133ca162239c07150427f2f715e479e99ceae6 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 13 Jul 2020 17:44:59 -0700 Subject: [PATCH 269/611] chore: release 1.13.1 (#44) * updated CHANGELOG.md [ci skip] * updated setup.cfg [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ packages/google-cloud-datastore/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 2a3f1faaf5b9..35e17e3b2e7c 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +### [1.13.1](https://www.github.com/googleapis/python-datastore/compare/v1.13.0...v1.13.1) (2020-07-13) + + +### Bug Fixes + +* add missing datastore admin client files ([#43](https://www.github.com/googleapis/python-datastore/issues/43)) ([0d40f87](https://www.github.com/googleapis/python-datastore/commit/0d40f87eeacd2a256d4b45ccb742599b5df93096)) + ## [1.13.0](https://www.github.com/googleapis/python-datastore/compare/v1.12.0...v1.13.0) (2020-07-01) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 7f0131cd2c80..a67bd83f43a4 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-datastore" description = "Google Cloud Datastore API client library" -version = "1.13.0" +version = "1.13.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 138467a7d99133e3299a0512c0627fb12e4ac73b Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Fri, 17 Jul 2020 15:06:57 -0700 Subject: [PATCH 270/611] fix: modify admin pkg name in gapic (#47) --- .../datastore_admin_v1/gapic/datastore_admin_client.py | 2 +- packages/google-cloud-datastore/synth.py | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py index 9495419eb3ea..62db2b92e60c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py @@ -45,7 +45,7 @@ _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-datastore-admin", + "google-cloud-datastore", ).version diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index 792c159f0897..f0d11183d8a4 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -49,6 +49,12 @@ s.move(library / "google/cloud/datastore_admin_v1/proto") +s.replace( + "google/**/datastore_admin_client.py", + "google-cloud-datastore-admin", + "google-cloud-datstore" +) + # TODO(busunkim): Remove during the microgenerator transition. # This re-orders the parameters to avoid breaking existing code. num = s.replace( From 6f0bbbaba6f811d4a7311c68ea28db73ede99024 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 17 Jul 2020 15:24:40 -0700 Subject: [PATCH 271/611] chore: release 1.13.2 (#48) * updated CHANGELOG.md [ci skip] * updated setup.cfg [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ packages/google-cloud-datastore/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 35e17e3b2e7c..adc6c4027614 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +### [1.13.2](https://www.github.com/googleapis/python-datastore/compare/v1.13.1...v1.13.2) (2020-07-17) + + +### Bug Fixes + +* modify admin pkg name in gapic ([#47](https://www.github.com/googleapis/python-datastore/issues/47)) ([5b5011d](https://www.github.com/googleapis/python-datastore/commit/5b5011daf74133ecdd579bf19bbcf356e6f40dad)) + ### [1.13.1](https://www.github.com/googleapis/python-datastore/compare/v1.13.0...v1.13.1) (2020-07-13) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index a67bd83f43a4..96377b930e81 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-datastore" description = "Google Cloud Datastore API client library" -version = "1.13.1" +version = "1.13.2" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 606abd82a121868f18dd13065394b34585fe63b1 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 5 Aug 2020 00:16:14 -0400 Subject: [PATCH 272/611] tests: fix unit test broken by google-cloud-core release (#54) Closes #53 --- packages/google-cloud-datastore/tests/unit/test_client.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 6a30089c228f..f172044e6590 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -155,6 +155,7 @@ def test_constructor_w_implicit_inputs(self): from google.cloud.datastore.client import _CLIENT_INFO from google.cloud.datastore.client import _DATASTORE_BASE_URL + klass = self._get_target_class() other = "other" creds = _make_credentials() @@ -180,7 +181,7 @@ def test_constructor_w_implicit_inputs(self): self.assertIsNone(client.current_batch) self.assertIsNone(client.current_transaction) - default.assert_called_once_with() + default.assert_called_once_with(scopes=klass.SCOPE,) _determine_default_project.assert_called_once_with(None) def test_constructor_w_explicit_inputs(self): From 2983cf439b019e7ce7f3830088f51e38a58c9698 Mon Sep 17 00:00:00 2001 From: Aaron Wilson Date: Wed, 5 Aug 2020 04:26:19 +0000 Subject: [PATCH 273/611] docs: correct semantics of 'complete_key' arg to 'Client.reserve_ids' (#36) Co-authored-by: Tres Seaver --- .../google-cloud-datastore/google/cloud/datastore/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 69bbc0342eaf..eba90c5e3414 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -667,7 +667,7 @@ def reserve_ids(self, complete_key, num_ids): """Reserve a list of IDs from a complete key. :type complete_key: :class:`google.cloud.datastore.key.Key` - :param complete_key: Partial key to use as base for reserved IDs. + :param complete_key: Complete key to use as base for reserved IDs. :type num_ids: int :param num_ids: The number of IDs to reserve. From 4af0932d80d8790986064a167b149002d50ff28f Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 5 Aug 2020 12:35:00 -0700 Subject: [PATCH 274/611] docs: update docs build (via synth) (#58) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * feat(python-library): changes to docs job * feat(python-library): changes to docs job * migrate to Trampoline V2 * add docs-presubmit job * create docfx yaml files and upload them to another bucket * remove redundant envvars Source-Author: Takashi Matsuo Source-Date: Wed Jul 29 16:15:18 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: f07cb4446192952f19be3056957f56d180586055 Source-Link: https://github.com/googleapis/synthtool/commit/f07cb4446192952f19be3056957f56d180586055 * Revert "feat(python-library): changes to docs job (#700)" This reverts commit f07cb4446192952f19be3056957f56d180586055. Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Wed Jul 29 17:33:57 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: ee7506d15daa3873accfff9430eff7e3953f0248 Source-Link: https://github.com/googleapis/synthtool/commit/ee7506d15daa3873accfff9430eff7e3953f0248 * feat(python-library): changes to docs job * feat(python-library): changes to docs job * migrate to Trampoline V2 * add docs-presubmit job * create docfx yaml files and upload them to another bucket * remove redundant envvars * add a failing test first * fix TemplateSyntaxError: Missing end of comment tag * serving_path is not needed any more * use `raw` to make jinja happy Source-Author: Takashi Matsuo Source-Date: Thu Jul 30 12:44:02 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: 5dfda5621df45b71b6e88544ebbb53b1a8c90214 Source-Link: https://github.com/googleapis/synthtool/commit/5dfda5621df45b71b6e88544ebbb53b1a8c90214 * fix(python-library): add missing changes Source-Author: Takashi Matsuo Source-Date: Thu Jul 30 18:26:35 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: 39b527a39f5cd56d4882b3874fc08eed4756cebe Source-Link: https://github.com/googleapis/synthtool/commit/39b527a39f5cd56d4882b3874fc08eed4756cebe * chore(py_library): add split_system_tests Source-Author: Takashi Matsuo Source-Date: Fri Jul 31 16:17:13 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: bfcdbe0da977b2de6c1c0471bb6dc2f1e13bf669 Source-Link: https://github.com/googleapis/synthtool/commit/bfcdbe0da977b2de6c1c0471bb6dc2f1e13bf669 * chore: patch docs/conf.py manually * docs: clean up docs/conf.py Co-authored-by: Takashi Matsuo --- packages/google-cloud-datastore/.gitignore | 3 +- .../google-cloud-datastore/.kokoro/build.sh | 8 +- .../.kokoro/docker/docs/Dockerfile | 98 ++++ .../.kokoro/docker/docs/fetch_gpg_keys.sh | 45 ++ .../.kokoro/docs/common.cfg | 21 +- .../.kokoro/docs/docs-presubmit.cfg | 17 + .../.kokoro/publish-docs.sh | 39 +- .../.kokoro/trampoline_v2.sh | 487 ++++++++++++++++++ packages/google-cloud-datastore/.trampolinerc | 51 ++ packages/google-cloud-datastore/docs/conf.py | 9 +- .../cloud/datastore_v1/proto/datastore_pb2.py | 2 +- .../cloud/datastore_v1/proto/entity_pb2.py | 2 +- .../cloud/datastore_v1/proto/query_pb2.py | 2 +- packages/google-cloud-datastore/noxfile.py | 37 ++ .../google-cloud-datastore/synth.metadata | 5 +- 15 files changed, 798 insertions(+), 28 deletions(-) create mode 100644 packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile create mode 100755 packages/google-cloud-datastore/.kokoro/docker/docs/fetch_gpg_keys.sh create mode 100644 packages/google-cloud-datastore/.kokoro/docs/docs-presubmit.cfg create mode 100755 packages/google-cloud-datastore/.kokoro/trampoline_v2.sh create mode 100644 packages/google-cloud-datastore/.trampolinerc diff --git a/packages/google-cloud-datastore/.gitignore b/packages/google-cloud-datastore/.gitignore index b87e1ed580d9..b9daa52f118d 100644 --- a/packages/google-cloud-datastore/.gitignore +++ b/packages/google-cloud-datastore/.gitignore @@ -46,6 +46,7 @@ pip-log.txt # Built documentation docs/_build bigquery/docs/generated +docs.metadata # Virtual environment env/ @@ -57,4 +58,4 @@ system_tests/local_test_setup # Make sure a generated file isn't accidentally committed. pylintrc -pylintrc.test \ No newline at end of file +pylintrc.test diff --git a/packages/google-cloud-datastore/.kokoro/build.sh b/packages/google-cloud-datastore/.kokoro/build.sh index 6237d6617c66..df77f9793578 100755 --- a/packages/google-cloud-datastore/.kokoro/build.sh +++ b/packages/google-cloud-datastore/.kokoro/build.sh @@ -36,4 +36,10 @@ python3.6 -m pip uninstall --yes --quiet nox-automation python3.6 -m pip install --upgrade --quiet nox python3.6 -m nox --version -python3.6 -m nox +# If NOX_SESSION is set, it only runs the specified session, +# otherwise run all the sessions. +if [[ -n "${NOX_SESSION:-}" ]]; then + python3.6 -m nox -s "${NOX_SESSION:-}" +else + python3.6 -m nox +fi diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile new file mode 100644 index 000000000000..412b0b56a921 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile @@ -0,0 +1,98 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ubuntu:20.04 + +ENV DEBIAN_FRONTEND noninteractive + +# Ensure local Python is preferred over distribution Python. +ENV PATH /usr/local/bin:$PATH + +# Install dependencies. +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + apt-transport-https \ + build-essential \ + ca-certificates \ + curl \ + dirmngr \ + git \ + gpg-agent \ + graphviz \ + libbz2-dev \ + libdb5.3-dev \ + libexpat1-dev \ + libffi-dev \ + liblzma-dev \ + libreadline-dev \ + libsnappy-dev \ + libssl-dev \ + libsqlite3-dev \ + portaudio19-dev \ + redis-server \ + software-properties-common \ + ssh \ + sudo \ + tcl \ + tcl-dev \ + tk \ + tk-dev \ + uuid-dev \ + wget \ + zlib1g-dev \ + && add-apt-repository universe \ + && apt-get update \ + && apt-get -y install jq \ + && apt-get clean autoclean \ + && apt-get autoremove -y \ + && rm -rf /var/lib/apt/lists/* \ + && rm -f /var/cache/apt/archives/*.deb + + +COPY fetch_gpg_keys.sh /tmp +# Install the desired versions of Python. +RUN set -ex \ + && export GNUPGHOME="$(mktemp -d)" \ + && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \ + && /tmp/fetch_gpg_keys.sh \ + && for PYTHON_VERSION in 3.7.8 3.8.5; do \ + wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ + && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ + && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \ + && rm -r python-${PYTHON_VERSION}.tar.xz.asc \ + && mkdir -p /usr/src/python-${PYTHON_VERSION} \ + && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \ + && rm python-${PYTHON_VERSION}.tar.xz \ + && cd /usr/src/python-${PYTHON_VERSION} \ + && ./configure \ + --enable-shared \ + # This works only on Python 2.7 and throws a warning on every other + # version, but seems otherwise harmless. + --enable-unicode=ucs4 \ + --with-system-ffi \ + --without-ensurepip \ + && make -j$(nproc) \ + && make install \ + && ldconfig \ + ; done \ + && rm -rf "${GNUPGHOME}" \ + && rm -rf /usr/src/python* \ + && rm -rf ~/.cache/ + +RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ + && python3.7 /tmp/get-pip.py \ + && python3.8 /tmp/get-pip.py \ + && rm /tmp/get-pip.py + +CMD ["python3.7"] diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/fetch_gpg_keys.sh b/packages/google-cloud-datastore/.kokoro/docker/docs/fetch_gpg_keys.sh new file mode 100755 index 000000000000..d653dd868e4b --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/docker/docs/fetch_gpg_keys.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A script to fetch gpg keys with retry. +# Avoid jinja parsing the file. +# + +function retry { + if [[ "${#}" -le 1 ]]; then + echo "Usage: ${0} retry_count commands.." + exit 1 + fi + local retries=${1} + local command="${@:2}" + until [[ "${retries}" -le 0 ]]; do + $command && return 0 + if [[ $? -ne 0 ]]; then + echo "command failed, retrying" + ((retries--)) + fi + done + return 1 +} + +# 3.6.9, 3.7.5 (Ned Deily) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D + +# 3.8.0 (Łukasz Langa) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + E3FF2839C048B25C084DEBE9B26995E310250568 + +# diff --git a/packages/google-cloud-datastore/.kokoro/docs/common.cfg b/packages/google-cloud-datastore/.kokoro/docs/common.cfg index 3b561689b108..895e96a69feb 100644 --- a/packages/google-cloud-datastore/.kokoro/docs/common.cfg +++ b/packages/google-cloud-datastore/.kokoro/docs/common.cfg @@ -11,12 +11,12 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline.sh" +build_file: "python-datastore/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" + value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" @@ -28,6 +28,23 @@ env_vars: { value: "docs-staging" } +env_vars: { + key: "V2_STAGING_BUCKET" + value: "docs-staging-v2-staging" +} + +# It will upload the docker image after successful builds. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "true" +} + +# It will always build the docker image. +env_vars: { + key: "TRAMPOLINE_DOCKERFILE" + value: ".kokoro/docker/docs/Dockerfile" +} + # Fetch the token needed for reporting release status to GitHub before_action { fetch_keystore { diff --git a/packages/google-cloud-datastore/.kokoro/docs/docs-presubmit.cfg b/packages/google-cloud-datastore/.kokoro/docs/docs-presubmit.cfg new file mode 100644 index 000000000000..1118107829b7 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/docs/docs-presubmit.cfg @@ -0,0 +1,17 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "STAGING_BUCKET" + value: "gcloud-python-test" +} + +env_vars: { + key: "V2_STAGING_BUCKET" + value: "gcloud-python-test" +} + +# We only upload the image in the main `docs` build. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "false" +} diff --git a/packages/google-cloud-datastore/.kokoro/publish-docs.sh b/packages/google-cloud-datastore/.kokoro/publish-docs.sh index 487d823ae057..8acb14e802b0 100755 --- a/packages/google-cloud-datastore/.kokoro/publish-docs.sh +++ b/packages/google-cloud-datastore/.kokoro/publish-docs.sh @@ -18,26 +18,16 @@ set -eo pipefail # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 -cd github/python-datastore - -# Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --user --upgrade --quiet nox +python3 -m nox --version # build docs nox -s docs -python3 -m pip install gcp-docuploader - -# install a json parser -sudo apt-get update -sudo apt-get -y install software-properties-common -sudo add-apt-repository universe -sudo apt-get update -sudo apt-get -y install jq +python3 -m pip install --user gcp-docuploader # create metadata python3 -m docuploader create-metadata \ @@ -52,4 +42,23 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging +python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" + + +# docfx yaml files +nox -s docfx + +# create metadata. +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh b/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh new file mode 100755 index 000000000000..719bcd5ba84d --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh @@ -0,0 +1,487 @@ +#!/usr/bin/env bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# trampoline_v2.sh +# +# This script does 3 things. +# +# 1. Prepare the Docker image for the test +# 2. Run the Docker with appropriate flags to run the test +# 3. Upload the newly built Docker image +# +# in a way that is somewhat compatible with trampoline_v1. +# +# To run this script, first download few files from gcs to /dev/shm. +# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). +# +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm +# +# Then run the script. +# .kokoro/trampoline_v2.sh +# +# These environment variables are required: +# TRAMPOLINE_IMAGE: The docker image to use. +# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. +# +# You can optionally change these environment variables: +# TRAMPOLINE_IMAGE_UPLOAD: +# (true|false): Whether to upload the Docker image after the +# successful builds. +# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. +# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. +# Defaults to /workspace. +# Potentially there are some repo specific envvars in .trampolinerc in +# the project root. + + +set -euo pipefail + +TRAMPOLINE_VERSION="2.0.5" + +if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then + readonly IO_COLOR_RED="$(tput setaf 1)" + readonly IO_COLOR_GREEN="$(tput setaf 2)" + readonly IO_COLOR_YELLOW="$(tput setaf 3)" + readonly IO_COLOR_RESET="$(tput sgr0)" +else + readonly IO_COLOR_RED="" + readonly IO_COLOR_GREEN="" + readonly IO_COLOR_YELLOW="" + readonly IO_COLOR_RESET="" +fi + +function function_exists { + [ $(LC_ALL=C type -t $1)"" == "function" ] +} + +# Logs a message using the given color. The first argument must be one +# of the IO_COLOR_* variables defined above, such as +# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the +# given color. The log message will also have an RFC-3339 timestamp +# prepended (in UTC). You can disable the color output by setting +# TERM=vt100. +function log_impl() { + local color="$1" + shift + local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" + echo "================================================================" + echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" + echo "================================================================" +} + +# Logs the given message with normal coloring and a timestamp. +function log() { + log_impl "${IO_COLOR_RESET}" "$@" +} + +# Logs the given message in green with a timestamp. +function log_green() { + log_impl "${IO_COLOR_GREEN}" "$@" +} + +# Logs the given message in yellow with a timestamp. +function log_yellow() { + log_impl "${IO_COLOR_YELLOW}" "$@" +} + +# Logs the given message in red with a timestamp. +function log_red() { + log_impl "${IO_COLOR_RED}" "$@" +} + +readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) +readonly tmphome="${tmpdir}/h" +mkdir -p "${tmphome}" + +function cleanup() { + rm -rf "${tmpdir}" +} +trap cleanup EXIT + +RUNNING_IN_CI="${RUNNING_IN_CI:-false}" + +# The workspace in the container, defaults to /workspace. +TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" + +pass_down_envvars=( + # TRAMPOLINE_V2 variables. + # Tells scripts whether they are running as part of CI or not. + "RUNNING_IN_CI" + # Indicates which CI system we're in. + "TRAMPOLINE_CI" + # Indicates the version of the script. + "TRAMPOLINE_VERSION" +) + +log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" + +# Detect which CI systems we're in. If we're in any of the CI systems +# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be +# the name of the CI system. Both envvars will be passing down to the +# container for telling which CI system we're in. +if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then + # descriptive env var for indicating it's on CI. + RUNNING_IN_CI="true" + TRAMPOLINE_CI="kokoro" + if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then + if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then + log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." + exit 1 + fi + # This service account will be activated later. + TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" + else + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + gcloud auth list + fi + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet + fi + pass_down_envvars+=( + # KOKORO dynamic variables. + "KOKORO_BUILD_NUMBER" + "KOKORO_BUILD_ID" + "KOKORO_JOB_NAME" + "KOKORO_GIT_COMMIT" + "KOKORO_GITHUB_COMMIT" + "KOKORO_GITHUB_PULL_REQUEST_NUMBER" + "KOKORO_GITHUB_PULL_REQUEST_COMMIT" + # For Build Cop Bot + "KOKORO_GITHUB_COMMIT_URL" + "KOKORO_GITHUB_PULL_REQUEST_URL" + ) +elif [[ "${TRAVIS:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="travis" + pass_down_envvars+=( + "TRAVIS_BRANCH" + "TRAVIS_BUILD_ID" + "TRAVIS_BUILD_NUMBER" + "TRAVIS_BUILD_WEB_URL" + "TRAVIS_COMMIT" + "TRAVIS_COMMIT_MESSAGE" + "TRAVIS_COMMIT_RANGE" + "TRAVIS_JOB_NAME" + "TRAVIS_JOB_NUMBER" + "TRAVIS_JOB_WEB_URL" + "TRAVIS_PULL_REQUEST" + "TRAVIS_PULL_REQUEST_BRANCH" + "TRAVIS_PULL_REQUEST_SHA" + "TRAVIS_PULL_REQUEST_SLUG" + "TRAVIS_REPO_SLUG" + "TRAVIS_SECURE_ENV_VARS" + "TRAVIS_TAG" + ) +elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="github-workflow" + pass_down_envvars+=( + "GITHUB_WORKFLOW" + "GITHUB_RUN_ID" + "GITHUB_RUN_NUMBER" + "GITHUB_ACTION" + "GITHUB_ACTIONS" + "GITHUB_ACTOR" + "GITHUB_REPOSITORY" + "GITHUB_EVENT_NAME" + "GITHUB_EVENT_PATH" + "GITHUB_SHA" + "GITHUB_REF" + "GITHUB_HEAD_REF" + "GITHUB_BASE_REF" + ) +elif [[ "${CIRCLECI:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="circleci" + pass_down_envvars+=( + "CIRCLE_BRANCH" + "CIRCLE_BUILD_NUM" + "CIRCLE_BUILD_URL" + "CIRCLE_COMPARE_URL" + "CIRCLE_JOB" + "CIRCLE_NODE_INDEX" + "CIRCLE_NODE_TOTAL" + "CIRCLE_PREVIOUS_BUILD_NUM" + "CIRCLE_PROJECT_REPONAME" + "CIRCLE_PROJECT_USERNAME" + "CIRCLE_REPOSITORY_URL" + "CIRCLE_SHA1" + "CIRCLE_STAGE" + "CIRCLE_USERNAME" + "CIRCLE_WORKFLOW_ID" + "CIRCLE_WORKFLOW_JOB_ID" + "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" + "CIRCLE_WORKFLOW_WORKSPACE_ID" + ) +fi + +# Configure the service account for pulling the docker image. +function repo_root() { + local dir="$1" + while [[ ! -d "${dir}/.git" ]]; do + dir="$(dirname "$dir")" + done + echo "${dir}" +} + +# Detect the project root. In CI builds, we assume the script is in +# the git tree and traverse from there, otherwise, traverse from `pwd` +# to find `.git` directory. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + PROGRAM_PATH="$(realpath "$0")" + PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" + PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" +else + PROJECT_ROOT="$(repo_root $(pwd))" +fi + +log_yellow "Changing to the project root: ${PROJECT_ROOT}." +cd "${PROJECT_ROOT}" + +# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need +# to use this environment variable in `PROJECT_ROOT`. +if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then + + mkdir -p "${tmpdir}/gcloud" + gcloud_config_dir="${tmpdir}/gcloud" + + log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." + export CLOUDSDK_CONFIG="${gcloud_config_dir}" + + log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." + gcloud auth activate-service-account \ + --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet +fi + +required_envvars=( + # The basic trampoline configurations. + "TRAMPOLINE_IMAGE" + "TRAMPOLINE_BUILD_FILE" +) + +if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then + source "${PROJECT_ROOT}/.trampolinerc" +fi + +log_yellow "Checking environment variables." +for e in "${required_envvars[@]}" +do + if [[ -z "${!e:-}" ]]; then + log "Missing ${e} env var. Aborting." + exit 1 + fi +done + +# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 +# script: e.g. "github/repo-name/.kokoro/run_tests.sh" +TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" +log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" + +# ignore error on docker operations and test execution +set +e + +log_yellow "Preparing Docker image." +# We only download the docker image in CI builds. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + # Download the docker image specified by `TRAMPOLINE_IMAGE` + + # We may want to add --max-concurrent-downloads flag. + + log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." + if docker pull "${TRAMPOLINE_IMAGE}"; then + log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="true" + else + log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="false" + fi +else + # For local run, check if we have the image. + if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then + has_image="true" + else + has_image="false" + fi +fi + + +# The default user for a Docker container has uid 0 (root). To avoid +# creating root-owned files in the build directory we tell docker to +# use the current user ID. +user_uid="$(id -u)" +user_gid="$(id -g)" +user_name="$(id -un)" + +# To allow docker in docker, we add the user to the docker group in +# the host os. +docker_gid=$(cut -d: -f3 < <(getent group docker)) + +update_cache="false" +if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then + # Build the Docker image from the source. + context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") + docker_build_flags=( + "-f" "${TRAMPOLINE_DOCKERFILE}" + "-t" "${TRAMPOLINE_IMAGE}" + "--build-arg" "UID=${user_uid}" + "--build-arg" "USERNAME=${user_name}" + ) + if [[ "${has_image}" == "true" ]]; then + docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") + fi + + log_yellow "Start building the docker image." + if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then + echo "docker build" "${docker_build_flags[@]}" "${context_dir}" + fi + + # ON CI systems, we want to suppress docker build logs, only + # output the logs when it fails. + if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + if docker build "${docker_build_flags[@]}" "${context_dir}" \ + > "${tmpdir}/docker_build.log" 2>&1; then + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + cat "${tmpdir}/docker_build.log" + fi + + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + log_yellow "Dumping the build logs:" + cat "${tmpdir}/docker_build.log" + exit 1 + fi + else + if docker build "${docker_build_flags[@]}" "${context_dir}"; then + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + exit 1 + fi + fi +else + if [[ "${has_image}" != "true" ]]; then + log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." + exit 1 + fi +fi + +# We use an array for the flags so they are easier to document. +docker_flags=( + # Remove the container after it exists. + "--rm" + + # Use the host network. + "--network=host" + + # Run in priviledged mode. We are not using docker for sandboxing or + # isolation, just for packaging our dev tools. + "--privileged" + + # Run the docker script with the user id. Because the docker image gets to + # write in ${PWD} you typically want this to be your user id. + # To allow docker in docker, we need to use docker gid on the host. + "--user" "${user_uid}:${docker_gid}" + + # Pass down the USER. + "--env" "USER=${user_name}" + + # Mount the project directory inside the Docker container. + "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" + "--workdir" "${TRAMPOLINE_WORKSPACE}" + "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" + + # Mount the temporary home directory. + "--volume" "${tmphome}:/h" + "--env" "HOME=/h" + + # Allow docker in docker. + "--volume" "/var/run/docker.sock:/var/run/docker.sock" + + # Mount the /tmp so that docker in docker can mount the files + # there correctly. + "--volume" "/tmp:/tmp" + # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR + # TODO(tmatsuo): This part is not portable. + "--env" "TRAMPOLINE_SECRET_DIR=/secrets" + "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" + "--env" "KOKORO_GFILE_DIR=/secrets/gfile" + "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" + "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" +) + +# Add an option for nicer output if the build gets a tty. +if [[ -t 0 ]]; then + docker_flags+=("-it") +fi + +# Passing down env vars +for e in "${pass_down_envvars[@]}" +do + if [[ -n "${!e:-}" ]]; then + docker_flags+=("--env" "${e}=${!e}") + fi +done + +# If arguments are given, all arguments will become the commands run +# in the container, otherwise run TRAMPOLINE_BUILD_FILE. +if [[ $# -ge 1 ]]; then + log_yellow "Running the given commands '" "${@:1}" "' in the container." + readonly commands=("${@:1}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" +else + log_yellow "Running the tests in a Docker container." + docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" +fi + + +test_retval=$? + +if [[ ${test_retval} -eq 0 ]]; then + log_green "Build finished with ${test_retval}" +else + log_red "Build finished with ${test_retval}" +fi + +# Only upload it when the test passes. +if [[ "${update_cache}" == "true" ]] && \ + [[ $test_retval == 0 ]] && \ + [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then + log_yellow "Uploading the Docker image." + if docker push "${TRAMPOLINE_IMAGE}"; then + log_green "Finished uploading the Docker image." + else + log_red "Failed uploading the Docker image." + fi + # Call trampoline_after_upload_hook if it's defined. + if function_exists trampoline_after_upload_hook; then + trampoline_after_upload_hook + fi + +fi + +exit "${test_retval}" diff --git a/packages/google-cloud-datastore/.trampolinerc b/packages/google-cloud-datastore/.trampolinerc new file mode 100644 index 000000000000..995ee29111e1 --- /dev/null +++ b/packages/google-cloud-datastore/.trampolinerc @@ -0,0 +1,51 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Template for .trampolinerc + +# Add required env vars here. +required_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Add env vars which are passed down into the container here. +pass_down_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Prevent unintentional override on the default image. +if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ + [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." + exit 1 +fi + +# Define the default value if it makes sense. +if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then + TRAMPOLINE_IMAGE_UPLOAD="" +fi + +if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + TRAMPOLINE_IMAGE="" +fi + +if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then + TRAMPOLINE_DOCKERFILE="" +fi + +if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then + TRAMPOLINE_BUILD_FILE="" +fi diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index bb3a9142b77c..2e1f5404adb8 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -20,6 +20,10 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + __version__ = "" # -- General configuration ------------------------------------------------ @@ -39,6 +43,7 @@ "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags @@ -50,10 +55,6 @@ # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py index cf7a3cfd1168..3f5266e2365b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_v1/proto/datastore.proto -"""Generated protocol buffer code.""" + from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py index 43321289b17e..4df7bbb66148 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_v1/proto/entity.proto -"""Generated protocol buffer code.""" + from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2.py index e3bd1141e7f0..cf915c8445b7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_v1/proto/query.proto -"""Generated protocol buffer code.""" + from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 187124ab1294..811d4ff80c8e 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -100,6 +100,10 @@ def system(session): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") @@ -160,3 +164,36 @@ def docs(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark", "sphinx-docfx-yaml") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 133453f2cb5c..c07e27eae1c9 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -3,7 +3,8 @@ { "git": { "name": ".", - "sha": "0a5e07c0394709a4a4e79d937a281bec824415c3" + "remote": "https://github.com/googleapis/python-datastore.git", + "sha": "50ed94503da244434df0be58098a0ccf2da54b16" } }, { @@ -18,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d82deccf657a66e31bd5da9efdb96c6fa322fc7e" + "sha": "bfcdbe0da977b2de6c1c0471bb6dc2f1e13bf669" } } ], From 5f660166c335c14e52c8b15bcf2a7ca3fb71e8a6 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 5 Aug 2020 12:58:04 -0700 Subject: [PATCH 275/611] chore: apply new gapic configuration (from synth) (#57) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/4be218b6-dac7-413a-9b70-c61c140820cb/targets - [ ] To automatically regenerate this PR, check this box. PiperOrigin-RevId: 321580134 Source-Link: https://github.com/googleapis/googleapis/commit/bbe5f05ccb4ccb4a4dae92c558a25c6d3fe55842 --- .../gapic/datastore_client_config.py | 57 ++++++++++++------- .../google-cloud-datastore/synth.metadata | 4 +- 2 files changed, 40 insertions(+), 21 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py index 5346b3ce84a5..b1f7991ef30a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py @@ -2,55 +2,74 @@ "interfaces": { "google.datastore.v1.Datastore": { "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], + "retry_policy_1_codes": ["UNAVAILABLE", "DEADLINE_EXCEEDED"], + "no_retry_codes": [], + "no_retry_1_codes": [], }, "retry_params": { - "default": { + "retry_policy_1_params": { "initial_retry_delay_millis": 100, "retry_delay_multiplier": 1.3, "max_retry_delay_millis": 60000, "initial_rpc_timeout_millis": 60000, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000, - } + "total_timeout_millis": 60000, + }, + "no_retry_params": { + "initial_retry_delay_millis": 0, + "retry_delay_multiplier": 0.0, + "max_retry_delay_millis": 0, + "initial_rpc_timeout_millis": 0, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 0, + "total_timeout_millis": 0, + }, + "no_retry_1_params": { + "initial_retry_delay_millis": 0, + "retry_delay_multiplier": 0.0, + "max_retry_delay_millis": 0, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 60000, + }, }, "methods": { "Lookup": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", + "retry_codes_name": "retry_policy_1_codes", + "retry_params_name": "retry_policy_1_params", }, "RunQuery": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", + "retry_codes_name": "retry_policy_1_codes", + "retry_params_name": "retry_policy_1_params", }, "ReserveIds": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", + "retry_codes_name": "retry_policy_1_codes", + "retry_params_name": "retry_policy_1_params", }, "BeginTransaction": { "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", + "retry_codes_name": "no_retry_1_codes", + "retry_params_name": "no_retry_1_params", }, "Commit": { "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", + "retry_codes_name": "no_retry_1_codes", + "retry_params_name": "no_retry_1_params", }, "Rollback": { "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", + "retry_codes_name": "no_retry_1_codes", + "retry_params_name": "no_retry_1_params", }, "AllocateIds": { "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", + "retry_codes_name": "no_retry_1_codes", + "retry_params_name": "no_retry_1_params", }, }, } diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index c07e27eae1c9..ac48db8b5bb6 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -11,8 +11,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "59f97e6044a1275f83427ab7962a154c00d915b5", - "internalRef": "321058159" + "sha": "bbe5f05ccb4ccb4a4dae92c558a25c6d3fe55842", + "internalRef": "321580134" } }, { From cf25c0411d7a10cf5b3ec98e8f1b6297250c7e51 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 5 Aug 2020 16:52:07 -0400 Subject: [PATCH 276/611] feat: pass 'client_options' to base class ctor (#60) packaging: pin 'google-cloud-core >= 1.4.0' Closes #50. --- .../google-cloud-datastore/google/cloud/datastore/client.py | 5 ++++- packages/google-cloud-datastore/setup.py | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index eba90c5e3414..7dd286d39fb6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -239,7 +239,10 @@ def __init__( _use_grpc=None, ): super(Client, self).__init__( - project=project, credentials=credentials, _http=_http + project=project, + credentials=credentials, + client_options=client_options, + _http=_http, ) self.namespace = namespace self._client_info = client_info diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 96377b930e81..f19ccbb85954 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -30,7 +30,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", - "google-cloud-core >= 1.0.3, < 2.0dev", + "google-cloud-core >= 1.4.0, < 2.0dev", ] extras = {} From d83c7b8e63b5d5eff8e5f403604b84a3f6bfcc6c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 5 Aug 2020 17:05:13 -0400 Subject: [PATCH 277/611] chore: release 1.14.0 (#62) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 13 +++++++++++++ packages/google-cloud-datastore/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index adc6c4027614..85ac2961a9a6 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,19 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [1.14.0](https://www.github.com/googleapis/python-datastore/compare/v1.13.2...v1.14.0) (2020-08-05) + + +### Features + +* pass 'client_options' to base class ctor ([#60](https://www.github.com/googleapis/python-datastore/issues/60)) ([2575697](https://www.github.com/googleapis/python-datastore/commit/2575697380a2e57b210a37033f2558de582ec10e)), closes [#50](https://www.github.com/googleapis/python-datastore/issues/50) + + +### Documentation + +* correct semantics of 'complete_key' arg to 'Client.reserve_ids' ([#36](https://www.github.com/googleapis/python-datastore/issues/36)) ([50ed945](https://www.github.com/googleapis/python-datastore/commit/50ed94503da244434df0be58098a0ccf2da54b16)) +* update docs build (via synth) ([#58](https://www.github.com/googleapis/python-datastore/issues/58)) ([5bdacd4](https://www.github.com/googleapis/python-datastore/commit/5bdacd4785f3d433e6e7302fc6839a3c5a3314b4)), closes [#700](https://www.github.com/googleapis/python-datastore/issues/700) + ### [1.13.2](https://www.github.com/googleapis/python-datastore/compare/v1.13.1...v1.13.2) (2020-07-17) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index f19ccbb85954..5396ce7eb0cf 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-datastore" description = "Google Cloud Datastore API client library" -version = "1.13.2" +version = "1.14.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From bbe936fb0d7cbb1e80111385ea9def5bbf2072db Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 11 Aug 2020 17:24:56 -0400 Subject: [PATCH 278/611] docs: add docs for admin client (#63) Closes #49 --- packages/google-cloud-datastore/docs/admin_client.rst | 6 ++++++ packages/google-cloud-datastore/docs/index.rst | 1 + .../datastore_admin_v1/gapic/datastore_admin_client.py | 2 -- packages/google-cloud-datastore/synth.metadata | 10 +++++----- packages/google-cloud-datastore/synth.py | 7 +++++++ 5 files changed, 19 insertions(+), 7 deletions(-) create mode 100644 packages/google-cloud-datastore/docs/admin_client.rst diff --git a/packages/google-cloud-datastore/docs/admin_client.rst b/packages/google-cloud-datastore/docs/admin_client.rst new file mode 100644 index 000000000000..1c025ca5f132 --- /dev/null +++ b/packages/google-cloud-datastore/docs/admin_client.rst @@ -0,0 +1,6 @@ +Datastore Admin Client +====================== + +.. automodule:: google.cloud.datastore_admin_v1.gapic.datastore_admin_client + :members: + :show-inheritance: diff --git a/packages/google-cloud-datastore/docs/index.rst b/packages/google-cloud-datastore/docs/index.rst index 30d0575a2294..eed055183f79 100644 --- a/packages/google-cloud-datastore/docs/index.rst +++ b/packages/google-cloud-datastore/docs/index.rst @@ -13,6 +13,7 @@ API Reference transactions batches helpers + admin_client Changelog diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py index 62db2b92e60c..d275eca1b65f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py @@ -56,7 +56,6 @@ class DatastoreAdminClient(object): The Datastore Admin API provides several admin services for Cloud Datastore. - ----------------------------------------------------------------------------- ## Concepts Project, namespace, kind, and entity as defined in the Google Cloud Datastore @@ -68,7 +67,6 @@ class DatastoreAdminClient(object): specified as a combination of kinds and namespaces (either or both of which may be all). - ----------------------------------------------------------------------------- ## Services # Export/Import diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index ac48db8b5bb6..15ff18f3d046 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -3,23 +3,23 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-datastore.git", - "sha": "50ed94503da244434df0be58098a0ccf2da54b16" + "remote": "git@github.com:googleapis/python-datastore", + "sha": "f6290c02c46c83616a26aaa9babc43c3c6bed2f2" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "bbe5f05ccb4ccb4a4dae92c558a25c6d3fe55842", - "internalRef": "321580134" + "sha": "94006b3cb8d2fb44703cf535da15608eed6bf7db", + "internalRef": "325949033" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "bfcdbe0da977b2de6c1c0471bb6dc2f1e13bf669" + "sha": "bd0deaa1113b588d70449535ab9cbf0f2bd0e72f" } } ], diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index f0d11183d8a4..8fb7a6a0277b 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -55,6 +55,13 @@ "google-cloud-datstore" ) +# Remove spurious markup +s.replace( + "google/**/datastore_admin_client.py", + "-----------------------------------------------------------------------------", + "" +) + # TODO(busunkim): Remove during the microgenerator transition. # This re-orders the parameters to avoid breaking existing code. num = s.replace( From 62e7abc1982fcd35dea47996e172499911f8d128 Mon Sep 17 00:00:00 2001 From: Chris Rossi Date: Tue, 11 Aug 2020 20:02:37 -0400 Subject: [PATCH 279/611] fix: smooth over system test bumps (#66) Adds back some missing documentation to `CONTRIBUTING.rst` that covers creating indexes and populating test data for system tests. Also increases the number of `timestamp_key` entities created by `populate_datastore.py` to 10000, so that system tests can pass. Co-authored-by: Tres Seaver --- .../google-cloud-datastore/CONTRIBUTING.rst | 25 +++++++++++++ packages/google-cloud-datastore/synth.py | 37 +++++++++++++++++++ .../tests/system/utils/populate_datastore.py | 2 +- 3 files changed, 63 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CONTRIBUTING.rst b/packages/google-cloud-datastore/CONTRIBUTING.rst index 653ac4341f8f..6f63d2c5ca49 100644 --- a/packages/google-cloud-datastore/CONTRIBUTING.rst +++ b/packages/google-cloud-datastore/CONTRIBUTING.rst @@ -170,6 +170,31 @@ Running System Tests $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json" +- You'll need to create composite + `indexes `__ + with the ``gcloud`` command line + `tool `__:: + + # Install the app (App Engine Command Line Interface) component. + $ gcloud components install app-engine-python + + # Authenticate the gcloud tool with your account. + $ GOOGLE_APPLICATION_CREDENTIALS="path/to/app_credentials.json" + $ gcloud auth activate-service-account > --key-file=${GOOGLE_APPLICATION_CREDENTIALS} + + # Create the indexes + $ gcloud datastore indexes create tests/system/index.yaml + +- You'll also need stored data in your dataset. To populate this data, run:: + + $ python tests/system/utils/populate_datastore.py + +- If you make a mistake during development (i.e. a failing test that + prevents clean-up) you can clear all system test data from your + datastore instance via:: + + $ python tests/system/utils/clear_datastore.py + ************* Test Coverage diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index 8fb7a6a0277b..c29df7a50fdc 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -89,6 +89,7 @@ if num != 1: raise Exception("Required replacement not made.") + # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- @@ -98,3 +99,39 @@ s.replace("noxfile.py", """["']sphinx['"]""", '''"sphinx<3.0.0"''') s.shell.run(["nox", "-s", "blacken"], hide_output=False) + +# Add documentation about creating indexes and populating data for system +# tests. +num = s.replace( + "CONTRIBUTING.rst", + 'app_credentials.json"', + """app_credentials.json" + +- You'll need to create composite + `indexes `__ + with the ``gcloud`` command line + `tool `__:: + + # Install the app (App Engine Command Line Interface) component. + $ gcloud components install app-engine-python + + # Authenticate the gcloud tool with your account. + $ GOOGLE_APPLICATION_CREDENTIALS="path/to/app_credentials.json" + $ gcloud auth activate-service-account \ + > --key-file=${GOOGLE_APPLICATION_CREDENTIALS} + + # Create the indexes + $ gcloud datastore indexes create tests/system/index.yaml + +- You'll also need stored data in your dataset. To populate this data, run:: + + $ python tests/system/utils/populate_datastore.py + +- If you make a mistake during development (i.e. a failing test that + prevents clean-up) you can clear all system test data from your + datastore instance via:: + + $ python tests/system/utils/clear_datastore.py""") + +if num != 1: + raise Exception("Required replacement not made.") diff --git a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py index e8e1574aa841..223741e8897c 100644 --- a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py @@ -156,7 +156,7 @@ def add_timestamp_keys(client=None): # Get a client that uses the test dataset. client = datastore.Client() - num_batches = 2 + num_batches = 20 batch_size = 500 timestamp_micros = set() From 316d913c1e8cf2dbd11f3e1a7f666de295b58ec7 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 12 Aug 2020 15:38:05 -0400 Subject: [PATCH 280/611] feat: add retry and timeout args to API methods (#67) Closes #3 --- .../google/cloud/datastore/batch.py | 27 ++- .../google/cloud/datastore/client.py | 180 ++++++++++++++++-- .../google/cloud/datastore/query.py | 47 ++++- .../google/cloud/datastore/transaction.py | 67 ++++++- .../tests/unit/test_batch.py | 40 ++++ .../tests/unit/test_client.py | 149 +++++++++++---- .../tests/unit/test_query.py | 46 ++++- .../tests/unit/test_transaction.py | 67 +++++-- 8 files changed, 543 insertions(+), 80 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/batch.py b/packages/google-cloud-datastore/google/cloud/datastore/batch.py index dc8463f3f973..294c1b45e9a5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/batch.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/batch.py @@ -236,7 +236,7 @@ def begin(self): raise ValueError("Batch already started previously.") self._status = self._IN_PROGRESS - def _commit(self): + def _commit(self, retry, timeout): """Commits the batch. This is called by :meth:`commit`. @@ -246,8 +246,16 @@ def _commit(self): else: mode = _datastore_pb2.CommitRequest.TRANSACTIONAL + kwargs = {} + + if retry is not None: + kwargs["retry"] = retry + + if timeout is not None: + kwargs["timeout"] = timeout + commit_response_pb = self._client._datastore_api.commit( - self.project, mode, self._mutations, transaction=self._id + self.project, mode, self._mutations, transaction=self._id, **kwargs ) _, updated_keys = _parse_commit_response(commit_response_pb) # If the back-end returns without error, we are guaranteed that @@ -257,13 +265,24 @@ def _commit(self): new_id = new_key_pb.path[-1].id entity.key = entity.key.completed_key(new_id) - def commit(self): + def commit(self, retry=None, timeout=None): """Commits the batch. This is called automatically upon exiting a with statement, however it can be called explicitly if you don't want to use a context manager. + :type retry: :class:`google.api_core.retry.Retry` + :param retry: + A retry object used to retry requests. If ``None`` is specified, + requests will be retried using a default configuration. + + :type timeout: float + :param timeout: + Time, in seconds, to wait for the request to complete. + Note that if ``retry`` is specified, the timeout applies + to each individual attempt. + :raises: :class:`~exceptions.ValueError` if the batch is not in progress. """ @@ -271,7 +290,7 @@ def commit(self): raise ValueError("Batch must be in progress to commit()") try: - self._commit() + self._commit(retry=retry, timeout=timeout) finally: self._status = self._FINISHED diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 7dd286d39fb6..0a4466303f0e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -89,6 +89,19 @@ def _determine_default_project(project=None): return project +def _make_retry_timeout_kwargs(retry, timeout): + """Helper: make optional retry / timeout kwargs dict.""" + kwargs = {} + + if retry is not None: + kwargs["retry"] = retry + + if timeout is not None: + kwargs["timeout"] = timeout + + return kwargs + + def _extended_lookup( datastore_api, project, @@ -97,6 +110,8 @@ def _extended_lookup( deferred=None, eventual=False, transaction_id=None, + retry=None, + timeout=None, ): """Repeat lookup until all keys found (unless stop requested). @@ -133,6 +148,17 @@ def _extended_lookup( the given transaction. Incompatible with ``eventual==True``. + :type retry: :class:`google.api_core.retry.Retry` + :param retry: + A retry object used to retry requests. If ``None`` is specified, + requests will be retried using a default configuration. + + :type timeout: float + :param timeout: + Time, in seconds, to wait for the request to complete. + Note that if ``retry`` is specified, the timeout applies + to each individual attempt. + :rtype: list of :class:`.entity_pb2.Entity` :returns: The requested entities. :raises: :class:`ValueError` if missing / deferred are not null or @@ -144,6 +170,8 @@ def _extended_lookup( if deferred is not None and deferred != []: raise ValueError("deferred must be None or an empty list") + kwargs = _make_retry_timeout_kwargs(retry, timeout) + results = [] loop_num = 0 @@ -151,7 +179,7 @@ def _extended_lookup( while loop_num < _MAX_LOOPS: # loop against possible deferred. loop_num += 1 lookup_response = datastore_api.lookup( - project, key_pbs, read_options=read_options + project, key_pbs, read_options=read_options, **kwargs ) # Accumulate the new results. @@ -338,7 +366,16 @@ def current_transaction(self): if isinstance(transaction, Transaction): return transaction - def get(self, key, missing=None, deferred=None, transaction=None, eventual=False): + def get( + self, + key, + missing=None, + deferred=None, + transaction=None, + eventual=False, + retry=None, + timeout=None, + ): """Retrieve an entity from a single key (if it exists). .. note:: @@ -369,6 +406,17 @@ def get(self, key, missing=None, deferred=None, transaction=None, eventual=False Setting True will use eventual consistency, but cannot be used inside a transaction or will raise ValueError. + :type retry: :class:`google.api_core.retry.Retry` + :param retry: + A retry object used to retry requests. If ``None`` is specified, + requests will be retried using a default configuration. + + :type timeout: float + :param timeout: + Time, in seconds, to wait for the request to complete. + Note that if ``retry`` is specified, the timeout applies + to each individual attempt. + :rtype: :class:`google.cloud.datastore.entity.Entity` or ``NoneType`` :returns: The requested entity if it exists. @@ -380,12 +428,21 @@ def get(self, key, missing=None, deferred=None, transaction=None, eventual=False deferred=deferred, transaction=transaction, eventual=eventual, + retry=retry, + timeout=timeout, ) if entities: return entities[0] def get_multi( - self, keys, missing=None, deferred=None, transaction=None, eventual=False + self, + keys, + missing=None, + deferred=None, + transaction=None, + eventual=False, + retry=None, + timeout=None, ): """Retrieve entities, along with their attributes. @@ -412,6 +469,17 @@ def get_multi( Setting True will use eventual consistency, but cannot be used inside a transaction or will raise ValueError. + :type retry: :class:`google.api_core.retry.Retry` + :param retry: + A retry object used to retry requests. If ``None`` is specified, + requests will be retried using a default configuration. + + :type timeout: float + :param timeout: + Time, in seconds, to wait for the request to complete. + Note that if ``retry`` is specified, the timeout applies + to each individual attempt. + :rtype: list of :class:`google.cloud.datastore.entity.Entity` :returns: The requested entities. :raises: :class:`ValueError` if one or more of ``keys`` has a project @@ -437,6 +505,8 @@ def get_multi( missing=missing, deferred=deferred, transaction_id=transaction and transaction.id, + retry=retry, + timeout=timeout, ) if missing is not None: @@ -451,7 +521,7 @@ def get_multi( return [helpers.entity_from_protobuf(entity_pb) for entity_pb in entity_pbs] - def put(self, entity): + def put(self, entity, retry=None, timeout=None): """Save an entity in the Cloud Datastore. .. note:: @@ -462,15 +532,41 @@ def put(self, entity): :type entity: :class:`google.cloud.datastore.entity.Entity` :param entity: The entity to be saved to the datastore. + + :type retry: :class:`google.api_core.retry.Retry` + :param retry: + A retry object used to retry requests. If ``None`` is specified, + requests will be retried using a default configuration. + Only meaningful outside of another batch / transaction. + + :type timeout: float + :param timeout: + Time, in seconds, to wait for the request to complete. + Note that if ``retry`` is specified, the timeout applies + to each individual attempt. Only meaningful outside of another + batch / transaction. """ - self.put_multi(entities=[entity]) + self.put_multi(entities=[entity], retry=retry, timeout=timeout) - def put_multi(self, entities): + def put_multi(self, entities, retry=None, timeout=None): """Save entities in the Cloud Datastore. :type entities: list of :class:`google.cloud.datastore.entity.Entity` :param entities: The entities to be saved to the datastore. + :type retry: :class:`google.api_core.retry.Retry` + :param retry: + A retry object used to retry requests. If ``None`` is specified, + requests will be retried using a default configuration. + Only meaningful outside of another batch / transaction. + + :type timeout: float + :param timeout: + Time, in seconds, to wait for the request to complete. + Note that if ``retry`` is specified, the timeout applies + to each individual attempt. Only meaningful outside of another + batch / transaction. + :raises: :class:`ValueError` if ``entities`` is a single entity. """ if isinstance(entities, Entity): @@ -490,9 +586,9 @@ def put_multi(self, entities): current.put(entity) if not in_batch: - current.commit() + current.commit(retry=retry, timeout=timeout) - def delete(self, key): + def delete(self, key, retry=None, timeout=None): """Delete the key in the Cloud Datastore. .. note:: @@ -503,14 +599,40 @@ def delete(self, key): :type key: :class:`google.cloud.datastore.key.Key` :param key: The key to be deleted from the datastore. + + :type retry: :class:`google.api_core.retry.Retry` + :param retry: + A retry object used to retry requests. If ``None`` is specified, + requests will be retried using a default configuration. + Only meaningful outside of another batch / transaction. + + :type timeout: float + :param timeout: + Time, in seconds, to wait for the request to complete. + Note that if ``retry`` is specified, the timeout applies + to each individual attempt. Only meaningful outside of another + batch / transaction. """ - self.delete_multi(keys=[key]) + self.delete_multi(keys=[key], retry=retry, timeout=timeout) - def delete_multi(self, keys): + def delete_multi(self, keys, retry=None, timeout=None): """Delete keys from the Cloud Datastore. :type keys: list of :class:`google.cloud.datastore.key.Key` :param keys: The keys to be deleted from the Datastore. + + :type retry: :class:`google.api_core.retry.Retry` + :param retry: + A retry object used to retry requests. If ``None`` is specified, + requests will be retried using a default configuration. + Only meaningful outside of another batch / transaction. + + :type timeout: float + :param timeout: + Time, in seconds, to wait for the request to complete. + Note that if ``retry`` is specified, the timeout applies + to each individual attempt. Only meaningful outside of another + batch / transaction. """ if not keys: return @@ -527,9 +649,9 @@ def delete_multi(self, keys): current.delete(key) if not in_batch: - current.commit() + current.commit(retry=retry, timeout=timeout) - def allocate_ids(self, incomplete_key, num_ids): + def allocate_ids(self, incomplete_key, num_ids, retry=None, timeout=None): """Allocate a list of IDs from a partial key. :type incomplete_key: :class:`google.cloud.datastore.key.Key` @@ -538,6 +660,17 @@ def allocate_ids(self, incomplete_key, num_ids): :type num_ids: int :param num_ids: The number of IDs to allocate. + :type retry: :class:`google.api_core.retry.Retry` + :param retry: + A retry object used to retry requests. If ``None`` is specified, + requests will be retried using a default configuration. + + :type timeout: float + :param timeout: + Time, in seconds, to wait for the request to complete. + Note that if ``retry`` is specified, the timeout applies + to each individual attempt. + :rtype: list of :class:`google.cloud.datastore.key.Key` :returns: The (complete) keys allocated with ``incomplete_key`` as root. @@ -550,8 +683,10 @@ def allocate_ids(self, incomplete_key, num_ids): incomplete_key_pb = incomplete_key.to_protobuf() incomplete_key_pbs = [incomplete_key_pb] * num_ids + kwargs = _make_retry_timeout_kwargs(retry, timeout) + response_pb = self._datastore_api.allocate_ids( - incomplete_key.project, incomplete_key_pbs + incomplete_key.project, incomplete_key_pbs, **kwargs ) allocated_ids = [ allocated_key_pb.path[-1].id for allocated_key_pb in response_pb.keys @@ -666,7 +801,7 @@ def do_something(entity): kwargs["namespace"] = self.namespace return Query(self, **kwargs) - def reserve_ids(self, complete_key, num_ids): + def reserve_ids(self, complete_key, num_ids, retry=None, timeout=None): """Reserve a list of IDs from a complete key. :type complete_key: :class:`google.cloud.datastore.key.Key` @@ -675,6 +810,17 @@ def reserve_ids(self, complete_key, num_ids): :type num_ids: int :param num_ids: The number of IDs to reserve. + :type retry: :class:`google.api_core.retry.Retry` + :param retry: + A retry object used to retry requests. If ``None`` is specified, + requests will be retried using a default configuration. + + :type timeout: float + :param timeout: + Time, in seconds, to wait for the request to complete. + Note that if ``retry`` is specified, the timeout applies + to each individual attempt. + :rtype: class:`NoneType` :returns: None :raises: :class:`ValueError` if `complete_key`` is not a @@ -686,9 +832,13 @@ def reserve_ids(self, complete_key, num_ids): if not isinstance(num_ids, int): raise ValueError(("num_ids is not a valid integer.", num_ids)) + kwargs = _make_retry_timeout_kwargs(retry, timeout) + complete_key_pb = complete_key.to_protobuf() complete_key_pbs = [complete_key_pb] * num_ids - self._datastore_api.reserve_ids(complete_key.project, complete_key_pbs) + self._datastore_api.reserve_ids( + complete_key.project, complete_key_pbs, **kwargs + ) return None diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 78a153cb3247..7a4bedeb8082 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -344,6 +344,8 @@ def fetch( end_cursor=None, client=None, eventual=False, + retry=None, + timeout=None, ): """Execute the Query; return an iterator for the matching entities. @@ -380,6 +382,17 @@ def fetch( but cannot be used inside a transaction or will raise ValueError. + :type retry: :class:`google.api_core.retry.Retry` + :param retry: + A retry object used to retry requests. If ``None`` is specified, + requests will be retried using a default configuration. + + :type timeout: float + :param timeout: + Time, in seconds, to wait for the request to complete. + Note that if ``retry`` is specified, the timeout applies + to each individual attempt. + :rtype: :class:`Iterator` :returns: The iterator for the query. """ @@ -394,6 +407,8 @@ def fetch( start_cursor=start_cursor, end_cursor=end_cursor, eventual=eventual, + retry=retry, + timeout=timeout, ) @@ -427,6 +442,17 @@ class Iterator(page_iterator.Iterator): Setting True will use eventual consistency, but cannot be used inside a transaction or will raise ValueError. + + :type retry: :class:`google.api_core.retry.Retry` + :param retry: + A retry object used to retry requests. If ``None`` is specified, + requests will be retried using a default configuration. + + :type timeout: float + :param timeout: + Time, in seconds, to wait for the request to complete. + Note that if ``retry`` is specified, the timeout applies + to each individual attempt. """ next_page_token = None @@ -440,6 +466,8 @@ def __init__( start_cursor=None, end_cursor=None, eventual=False, + retry=None, + timeout=None, ): super(Iterator, self).__init__( client=client, @@ -451,6 +479,8 @@ def __init__( self._offset = offset self._end_cursor = end_cursor self._eventual = eventual + self._retry = retry + self._timeout = timeout # The attributes below will change over the life of the iterator. self._more_results = True self._skipped_results = 0 @@ -536,8 +566,17 @@ def _next_page(self): partition_id = entity_pb2.PartitionId( project_id=self._query.project, namespace_id=self._query.namespace ) + + kwargs = {} + + if self._retry is not None: + kwargs["retry"] = self._retry + + if self._timeout is not None: + kwargs["timeout"] = self._timeout + response_pb = self.client._datastore_api.run_query( - self._query.project, partition_id, read_options, query=query_pb + self._query.project, partition_id, read_options, query=query_pb, **kwargs ) while ( @@ -551,7 +590,11 @@ def _next_page(self): query_pb.start_cursor = response_pb.batch.skipped_cursor query_pb.offset -= response_pb.batch.skipped_results response_pb = self.client._datastore_api.run_query( - self._query.project, partition_id, read_options, query=query_pb + self._query.project, + partition_id, + read_options, + query=query_pb, + **kwargs ) entity_pbs = self._process_query_results(response_pb) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index ccff5561c644..705cc059895f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -18,6 +18,19 @@ from google.cloud.datastore_v1.types import TransactionOptions +def _make_retry_timeout_kwargs(retry, timeout): + """Helper: make optional retry / timeout kwargs dict.""" + kwargs = {} + + if retry is not None: + kwargs["retry"] = retry + + if timeout is not None: + kwargs["timeout"] = timeout + + return kwargs + + class Transaction(Batch): """An abstraction representing datastore Transactions. @@ -193,40 +206,69 @@ def current(self): if isinstance(top, Transaction): return top - def begin(self): + def begin(self, retry=None, timeout=None): """Begins a transaction. This method is called automatically when entering a with statement, however it can be called explicitly if you don't want to use a context manager. + :type retry: :class:`google.api_core.retry.Retry` + :param retry: + A retry object used to retry requests. If ``None`` is specified, + requests will be retried using a default configuration. + + :type timeout: float + :param timeout: + Time, in seconds, to wait for the request to complete. + Note that if ``retry`` is specified, the timeout applies + to each individual attempt. + :raises: :class:`~exceptions.ValueError` if the transaction has already begun. """ super(Transaction, self).begin() + + kwargs = _make_retry_timeout_kwargs(retry, timeout) + try: - response_pb = self._client._datastore_api.begin_transaction(self.project) + response_pb = self._client._datastore_api.begin_transaction( + self.project, **kwargs + ) self._id = response_pb.transaction except: # noqa: E722 do not use bare except, specify exception instead self._status = self._ABORTED raise - def rollback(self): + def rollback(self, retry=None, timeout=None): """Rolls back the current transaction. This method has necessary side-effects: - Sets the current transaction's ID to None. + + :type retry: :class:`google.api_core.retry.Retry` + :param retry: + A retry object used to retry requests. If ``None`` is specified, + requests will be retried using a default configuration. + + :type timeout: float + :param timeout: + Time, in seconds, to wait for the request to complete. + Note that if ``retry`` is specified, the timeout applies + to each individual attempt. """ + kwargs = _make_retry_timeout_kwargs(retry, timeout) + try: # No need to use the response it contains nothing. - self._client._datastore_api.rollback(self.project, self._id) + self._client._datastore_api.rollback(self.project, self._id, **kwargs) finally: super(Transaction, self).rollback() # Clear our own ID in case this gets accidentally reused. self._id = None - def commit(self): + def commit(self, retry=None, timeout=None): """Commits the transaction. This is called automatically upon exiting a with statement, @@ -236,9 +278,22 @@ def commit(self): This method has necessary side-effects: - Sets the current transaction's ID to None. + + :type retry: :class:`google.api_core.retry.Retry` + :param retry: + A retry object used to retry requests. If ``None`` is specified, + requests will be retried using a default configuration. + + :type timeout: float + :param timeout: + Time, in seconds, to wait for the request to complete. + Note that if ``retry`` is specified, the timeout applies + to each individual attempt. """ + kwargs = _make_retry_timeout_kwargs(retry, timeout) + try: - super(Transaction, self).commit() + super(Transaction, self).commit(**kwargs) finally: # Clear our own ID in case this gets accidentally reused. self._id = None diff --git a/packages/google-cloud-datastore/tests/unit/test_batch.py b/packages/google-cloud-datastore/tests/unit/test_batch.py index 8516e78c0c7a..7ad2aeab4d33 100644 --- a/packages/google-cloud-datastore/tests/unit/test_batch.py +++ b/packages/google-cloud-datastore/tests/unit/test_batch.py @@ -229,6 +229,46 @@ def test_commit(self): mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL commit_method.assert_called_with(project, mode, [], transaction=None) + def test_commit_w_timeout(self): + from google.cloud.datastore_v1.proto import datastore_pb2 + + project = "PROJECT" + client = _Client(project) + batch = self._make_one(client) + timeout = 100000 + + self.assertEqual(batch._status, batch._INITIAL) + batch.begin() + self.assertEqual(batch._status, batch._IN_PROGRESS) + batch.commit(timeout=timeout) + self.assertEqual(batch._status, batch._FINISHED) + + commit_method = client._datastore_api.commit + mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL + commit_method.assert_called_with( + project, mode, [], transaction=None, timeout=timeout + ) + + def test_commit_w_retry(self): + from google.cloud.datastore_v1.proto import datastore_pb2 + + project = "PROJECT" + client = _Client(project) + batch = self._make_one(client) + retry = mock.Mock() + + self.assertEqual(batch._status, batch._INITIAL) + batch.begin() + self.assertEqual(batch._status, batch._IN_PROGRESS) + batch.commit(retry=retry) + self.assertEqual(batch._status, batch._FINISHED) + + commit_method = client._datastore_api.commit + mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL + commit_method.assert_called_with( + project, mode, [], transaction=None, retry=retry + ) + def test_commit_wrong_status(self): project = "PROJECT" client = _Client(project) diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index f172044e6590..ab186a8df628 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -356,25 +356,24 @@ def test__push_batch_and__pop_batch(self): self.assertEqual(list(client._batch_stack), []) def test_get_miss(self): - _called_with = [] - - def _get_multi(*args, **kw): - _called_with.append((args, kw)) - return [] creds = _make_credentials() client = self._make_one(credentials=creds) - client.get_multi = _get_multi + get_multi = client.get_multi = mock.Mock(return_value=[]) key = object() self.assertIsNone(client.get(key)) - self.assertEqual(_called_with[0][0], ()) - self.assertEqual(_called_with[0][1]["keys"], [key]) - self.assertIsNone(_called_with[0][1]["missing"]) - self.assertIsNone(_called_with[0][1]["deferred"]) - self.assertIsNone(_called_with[0][1]["transaction"]) + get_multi.assert_called_once_with( + keys=[key], + missing=None, + deferred=None, + transaction=None, + eventual=False, + retry=None, + timeout=None, + ) def test_get_hit(self): TXN_ID = "123" @@ -554,13 +553,15 @@ def test_get_multi_w_deferred_from_backend_but_not_passed(self): self.PROJECT, [key1_pb, key2_pb], read_options=read_options ) - def test_get_multi_hit(self): + def test_get_multi_hit_w_retry_w_timeout(self): from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.key import Key kind = "Kind" id_ = 1234 path = [{"kind": kind, "id": id_}] + retry = mock.Mock() + timeout = 100000 # Make a found entity pb to be returned from mock backend. entity_pb = _make_entity_pb(self.PROJECT, kind, id_, "foo", "Foo") @@ -573,7 +574,7 @@ def test_get_multi_hit(self): client._datastore_api_internal = ds_api key = Key(kind, id_, project=self.PROJECT) - (result,) = client.get_multi([key]) + (result,) = client.get_multi([key], retry=retry, timeout=timeout) new_key = result.key # Check the returned value is as expected. @@ -585,7 +586,11 @@ def test_get_multi_hit(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, [key.to_protobuf()], read_options=read_options + self.PROJECT, + [key.to_protobuf()], + read_options=read_options, + retry=retry, + timeout=timeout, ) def test_get_multi_hit_w_transaction(self): @@ -711,20 +716,30 @@ def test_get_multi_max_loops(self): ds_api.lookup.assert_not_called() def test_put(self): - _called_with = [] - - def _put_multi(*args, **kw): - _called_with.append((args, kw)) creds = _make_credentials() client = self._make_one(credentials=creds) - client.put_multi = _put_multi - entity = object() + put_multi = client.put_multi = mock.Mock() + entity = mock.Mock() client.put(entity) - self.assertEqual(_called_with[0][0], ()) - self.assertEqual(_called_with[0][1]["entities"], [entity]) + put_multi.assert_called_once_with(entities=[entity], retry=None, timeout=None) + + def test_put_w_retry_w_timeout(self): + + creds = _make_credentials() + client = self._make_one(credentials=creds) + put_multi = client.put_multi = mock.Mock() + entity = mock.Mock() + retry = mock.Mock() + timeout = 100000 + + client.put(entity, retry=retry, timeout=timeout) + + put_multi.assert_called_once_with( + entities=[entity], retry=retry, timeout=timeout + ) def test_put_multi_no_entities(self): creds = _make_credentials() @@ -739,13 +754,15 @@ def test_put_multi_w_single_empty_entity(self): client = self._make_one(credentials=creds) self.assertRaises(ValueError, client.put_multi, Entity()) - def test_put_multi_no_batch_w_partial_key(self): + def test_put_multi_no_batch_w_partial_key_w_retry_w_timeout(self): from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.helpers import _property_tuples entity = _Entity(foo=u"bar") key = entity.key = _Key(self.PROJECT) key._id = None + retry = mock.Mock() + timeout = 100000 creds = _make_credentials() client = self._make_one(credentials=creds) @@ -753,12 +770,13 @@ def test_put_multi_no_batch_w_partial_key(self): ds_api = _make_datastore_api(key_pb) client._datastore_api_internal = ds_api - result = client.put_multi([entity]) + result = client.put_multi([entity], retry=retry, timeout=timeout) self.assertIsNone(result) self.assertEqual(ds_api.commit.call_count, 1) _, positional, keyword = ds_api.commit.mock_calls[0] - self.assertEqual(keyword, {"transaction": None}) + expected_kw = {"transaction": None, "retry": retry, "timeout": timeout} + self.assertEqual(keyword, expected_kw) self.assertEqual(len(positional), 3) self.assertEqual(positional[0], self.PROJECT) @@ -796,20 +814,26 @@ def test_put_multi_existing_batch_w_completed_key(self): self.assertEqual(value_pb.string_value, u"bar") def test_delete(self): - _called_with = [] + creds = _make_credentials() + client = self._make_one(credentials=creds) + delete_multi = client.delete_multi = mock.Mock() + key = mock.Mock() - def _delete_multi(*args, **kw): - _called_with.append((args, kw)) + client.delete(key) + delete_multi.assert_called_once_with(keys=[key], retry=None, timeout=None) + + def test_delete_w_retry_w_timeout(self): creds = _make_credentials() client = self._make_one(credentials=creds) - client.delete_multi = _delete_multi - key = object() + delete_multi = client.delete_multi = mock.Mock() + key = mock.Mock() + retry = mock.Mock() + timeout = 100000 - client.delete(key) + client.delete(key, retry=retry, timeout=timeout) - self.assertEqual(_called_with[0][0], ()) - self.assertEqual(_called_with[0][1]["keys"], [key]) + delete_multi.assert_called_once_with(keys=[key], retry=retry, timeout=timeout) def test_delete_multi_no_keys(self): creds = _make_credentials() @@ -820,22 +844,25 @@ def test_delete_multi_no_keys(self): self.assertIsNone(result) client._datastore_api_internal.commit.assert_not_called() - def test_delete_multi_no_batch(self): + def test_delete_multi_no_batch_w_retry_w_timeout(self): from google.cloud.datastore_v1.proto import datastore_pb2 key = _Key(self.PROJECT) + retry = mock.Mock() + timeout = 100000 creds = _make_credentials() client = self._make_one(credentials=creds) ds_api = _make_datastore_api() client._datastore_api_internal = ds_api - result = client.delete_multi([key]) + result = client.delete_multi([key], retry=retry, timeout=timeout) self.assertIsNone(result) self.assertEqual(ds_api.commit.call_count, 1) _, positional, keyword = ds_api.commit.mock_calls[0] - self.assertEqual(keyword, {"transaction": None}) + expected_kw = {"transaction": None, "retry": retry, "timeout": timeout} + self.assertEqual(keyword, expected_kw) self.assertEqual(len(positional), 3) self.assertEqual(positional[0], self.PROJECT) @@ -893,6 +920,36 @@ def test_allocate_ids_w_partial_key(self): # Check the IDs returned. self.assertEqual([key._id for key in result], list(range(num_ids))) + expected_keys = [incomplete_key.to_protobuf()] * num_ids + alloc_ids.assert_called_once_with(self.PROJECT, expected_keys) + + def test_allocate_ids_w_partial_key_w_retry_w_timeout(self): + num_ids = 2 + + incomplete_key = _Key(self.PROJECT) + incomplete_key._id = None + retry = mock.Mock() + timeout = 100000 + + creds = _make_credentials() + client = self._make_one(credentials=creds, _use_grpc=False) + allocated = mock.Mock(keys=[_KeyPB(i) for i in range(num_ids)], spec=["keys"]) + alloc_ids = mock.Mock(return_value=allocated, spec=[]) + ds_api = mock.Mock(allocate_ids=alloc_ids, spec=["allocate_ids"]) + client._datastore_api_internal = ds_api + + result = client.allocate_ids( + incomplete_key, num_ids, retry=retry, timeout=timeout + ) + + # Check the IDs returned. + self.assertEqual([key._id for key in result], list(range(num_ids))) + + expected_keys = [incomplete_key.to_protobuf()] * num_ids + alloc_ids.assert_called_once_with( + self.PROJECT, expected_keys, retry=retry, timeout=timeout + ) + def test_allocate_ids_w_completed_key(self): creds = _make_credentials() client = self._make_one(credentials=creds) @@ -913,6 +970,26 @@ def test_reserve_ids_w_completed_key(self): expected_keys = [complete_key.to_protobuf()] * num_ids reserve_ids.assert_called_once_with(self.PROJECT, expected_keys) + def test_reserve_ids_w_completed_key_w_retry_w_timeout(self): + num_ids = 2 + retry = mock.Mock() + timeout = 100000 + + creds = _make_credentials() + client = self._make_one(credentials=creds, _use_grpc=False) + complete_key = _Key(self.PROJECT) + self.assertTrue(not complete_key.is_partial) + reserve_ids = mock.Mock() + ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) + client._datastore_api_internal = ds_api + + client.reserve_ids(complete_key, num_ids, retry=retry, timeout=timeout) + + expected_keys = [complete_key.to_protobuf()] * num_ids + reserve_ids.assert_called_once_with( + self.PROJECT, expected_keys, retry=retry, timeout=timeout + ) + def test_reserve_ids_w_partial_key(self): num_ids = 2 incomplete_key = _Key(self.PROJECT) diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index ed6cbc9d9405..fbaadb2862c7 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -332,6 +332,7 @@ def test_fetch_defaults_w_client_attr(self): client = self._make_client() query = self._make_one(client) + iterator = query.fetch() self.assertIsInstance(iterator, Iterator) @@ -339,19 +340,29 @@ def test_fetch_defaults_w_client_attr(self): self.assertIs(iterator.client, client) self.assertIsNone(iterator.max_results) self.assertEqual(iterator._offset, 0) + self.assertIsNone(iterator._retry) + self.assertIsNone(iterator._timeout) - def test_fetch_w_explicit_client(self): + def test_fetch_w_explicit_client_w_retry_w_timeout(self): from google.cloud.datastore.query import Iterator client = self._make_client() other_client = self._make_client() query = self._make_one(client) - iterator = query.fetch(limit=7, offset=8, client=other_client) + retry = mock.Mock() + timeout = 100000 + + iterator = query.fetch( + limit=7, offset=8, client=other_client, retry=retry, timeout=timeout + ) + self.assertIsInstance(iterator, Iterator) self.assertIs(iterator._query, query) self.assertIs(iterator.client, other_client) self.assertEqual(iterator.max_results, 7) self.assertEqual(iterator._offset, 8) + self.assertEqual(iterator._retry, retry) + self.assertEqual(iterator._timeout, timeout) class TestIterator(unittest.TestCase): @@ -367,6 +378,7 @@ def _make_one(self, *args, **kw): def test_constructor_defaults(self): query = object() client = object() + iterator = self._make_one(query, client) self.assertFalse(iterator._started) @@ -379,6 +391,8 @@ def test_constructor_defaults(self): self.assertIsNone(iterator._offset) self.assertIsNone(iterator._end_cursor) self.assertTrue(iterator._more_results) + self.assertIsNone(iterator._retry) + self.assertIsNone(iterator._timeout) def test_constructor_explicit(self): query = object() @@ -387,6 +401,9 @@ def test_constructor_explicit(self): offset = 9 start_cursor = b"8290\xff" end_cursor = b"so20rc\ta" + retry = mock.Mock() + timeout = 100000 + iterator = self._make_one( query, client, @@ -394,6 +411,8 @@ def test_constructor_explicit(self): offset=offset, start_cursor=start_cursor, end_cursor=end_cursor, + retry=retry, + timeout=timeout, ) self.assertFalse(iterator._started) @@ -406,6 +425,8 @@ def test_constructor_explicit(self): self.assertEqual(iterator._offset, offset) self.assertEqual(iterator._end_cursor, end_cursor) self.assertTrue(iterator._more_results) + self.assertEqual(iterator._retry, retry) + self.assertEqual(iterator._timeout, timeout) def test__build_protobuf_empty(self): from google.cloud.datastore_v1.proto import query_pb2 @@ -513,7 +534,7 @@ def test__process_query_results_bad_enum(self): with self.assertRaises(ValueError): iterator._process_query_results(response_pb) - def _next_page_helper(self, txn_id=None): + def _next_page_helper(self, txn_id=None, retry=None, timeout=None): from google.api_core import page_iterator from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore_v1.proto import entity_pb2 @@ -531,9 +552,18 @@ def _next_page_helper(self, txn_id=None): client = _Client(project, datastore_api=ds_api, transaction=transaction) query = Query(client) - iterator = self._make_one(query, client) + kwargs = {} + + if retry is not None: + kwargs["retry"] = retry + + if timeout is not None: + kwargs["timeout"] = timeout + + iterator = self._make_one(query, client, **kwargs) page = iterator._next_page() + self.assertIsInstance(page, page_iterator.Page) self.assertIs(page._parent, iterator) @@ -544,12 +574,18 @@ def _next_page_helper(self, txn_id=None): read_options = datastore_pb2.ReadOptions(transaction=txn_id) empty_query = query_pb2.Query() ds_api.run_query.assert_called_once_with( - project, partition_id, read_options, query=empty_query + project, partition_id, read_options, query=empty_query, **kwargs ) def test__next_page(self): self._next_page_helper() + def test__next_page_w_retry(self): + self._next_page_helper(retry=mock.Mock()) + + def test__next_page_w_timeout(self): + self._next_page_helper(timeout=100000) + def test__next_page_in_transaction(self): txn_id = b"1xo1md\xe2\x98\x83" self._next_page_helper(txn_id) diff --git a/packages/google-cloud-datastore/tests/unit/test_transaction.py b/packages/google-cloud-datastore/tests/unit/test_transaction.py index a1e23610368a..b285db1f5058 100644 --- a/packages/google-cloud-datastore/tests/unit/test_transaction.py +++ b/packages/google-cloud-datastore/tests/unit/test_transaction.py @@ -94,6 +94,23 @@ def test_begin(self): self.assertEqual(xact.id, id_) ds_api.begin_transaction.assert_called_once_with(project) + def test_begin_w_retry_w_timeout(self): + project = "PROJECT" + id_ = 889 + retry = mock.Mock() + timeout = 100000 + + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) + xact = self._make_one(client) + + xact.begin(retry=retry, timeout=timeout) + + self.assertEqual(xact.id, id_) + ds_api.begin_transaction.assert_called_once_with( + project, retry=retry, timeout=timeout + ) + def test_begin_tombstoned(self): project = "PROJECT" id_ = 1094 @@ -131,52 +148,77 @@ def test_rollback(self): client = _Client(project, datastore_api=ds_api) xact = self._make_one(client) xact.begin() + xact.rollback() - client._datastore_api.rollback.assert_called_once_with(project, id_) + self.assertIsNone(xact.id) - ds_api.begin_transaction.assert_called_once_with(project) + ds_api.rollback.assert_called_once_with(project, id_) + + def test_rollback_w_retry_w_timeout(self): + project = "PROJECT" + id_ = 239 + retry = mock.Mock() + timeout = 100000 + + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) + xact = self._make_one(client) + xact.begin() + + xact.rollback(retry=retry, timeout=timeout) + + self.assertIsNone(xact.id) + ds_api.rollback.assert_called_once_with( + project, id_, retry=retry, timeout=timeout + ) def test_commit_no_partial_keys(self): from google.cloud.datastore_v1.proto import datastore_pb2 project = "PROJECT" id_ = 1002930 + mode = datastore_pb2.CommitRequest.TRANSACTIONAL + ds_api = _make_datastore_api(xact_id=id_) client = _Client(project, datastore_api=ds_api) xact = self._make_one(client) xact.begin() xact.commit() - mode = datastore_pb2.CommitRequest.TRANSACTIONAL - client._datastore_api.commit.assert_called_once_with( - project, mode, [], transaction=id_ - ) + ds_api.commit.assert_called_once_with(project, mode, [], transaction=id_) self.assertIsNone(xact.id) - ds_api.begin_transaction.assert_called_once_with(project) - def test_commit_w_partial_keys(self): + def test_commit_w_partial_keys_w_retry_w_timeout(self): from google.cloud.datastore_v1.proto import datastore_pb2 project = "PROJECT" kind = "KIND" id1 = 123 + mode = datastore_pb2.CommitRequest.TRANSACTIONAL key = _make_key(kind, id1, project) id2 = 234 + retry = mock.Mock() + timeout = 100000 + ds_api = _make_datastore_api(key, xact_id=id2) client = _Client(project, datastore_api=ds_api) xact = self._make_one(client) xact.begin() entity = _Entity() + xact.put(entity) - xact.commit() + xact.commit(retry=retry, timeout=timeout) - mode = datastore_pb2.CommitRequest.TRANSACTIONAL ds_api.commit.assert_called_once_with( - project, mode, xact.mutations, transaction=id2 + project, + mode, + xact.mutations, + transaction=id2, + retry=retry, + timeout=timeout, ) self.assertIsNone(xact.id) self.assertEqual(entity.key.path, [{"kind": kind, "id": id1}]) - ds_api.begin_transaction.assert_called_once_with(project) def test_context_manager_no_raise(self): from google.cloud.datastore_v1.proto import datastore_pb2 @@ -238,6 +280,7 @@ def test_put_read_only(self): entity = _Entity() xact = self._make_one(client, read_only=True) xact.begin() + with self.assertRaises(RuntimeError): xact.put(entity) From b51c4f9d2abf99a831688b61225788788087e1c9 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 14 Aug 2020 13:55:45 -0400 Subject: [PATCH 281/611] feat: supply anonymous credentials under emulator (#71) Closes: #70. --- .../google/cloud/datastore/client.py | 31 +++++++++++----- .../tests/system/test_system.py | 36 ++++++++++--------- .../tests/unit/test_client.py | 29 +++++++++++---- 3 files changed, 64 insertions(+), 32 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 0a4466303f0e..67651db34042 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -16,6 +16,7 @@ import os import google.api_core.client_options +from google.auth.credentials import AnonymousCredentials from google.cloud._helpers import _LocalStack from google.cloud._helpers import _determine_default_project as _base_default_project from google.cloud.client import ClientWithProject @@ -27,9 +28,6 @@ from google.cloud.datastore.key import Key from google.cloud.datastore.query import Query from google.cloud.datastore.transaction import Transaction -from google.cloud.environment_vars import DISABLE_GRPC -from google.cloud.environment_vars import GCD_DATASET -from google.cloud.environment_vars import GCD_HOST try: from google.cloud.datastore._gapic import make_datastore_api @@ -54,13 +52,20 @@ _DATASTORE_BASE_URL = "https://datastore.googleapis.com" """Datastore API request URL base.""" +DATASTORE_EMULATOR_HOST = "DATASTORE_EMULATOR_HOST" +"""Environment variable defining host for datastore emulator server.""" +DATASTORE_DATASET = "DATASTORE_DATASET" +"""Environment variable defining default dataset ID under GCD.""" +DISABLE_GRPC = "GOOGLE_CLOUD_DISABLE_GRPC" +"""Environment variable acting as flag to disable gRPC.""" + _USE_GRPC = _HAVE_GRPC and not os.getenv(DISABLE_GRPC, False) def _get_gcd_project(): """Gets the GCD application ID if it can be inferred.""" - return os.getenv(GCD_DATASET) + return os.getenv(DATASTORE_DATASET) def _determine_default_project(project=None): @@ -266,6 +271,15 @@ def __init__( _http=None, _use_grpc=None, ): + emulator_host = os.getenv(DATASTORE_EMULATOR_HOST) + + if emulator_host is not None: + if credentials is not None: + raise ValueError( + "Explicit credentials are incompatible with the emulator" + ) + credentials = AnonymousCredentials() + super(Client, self).__init__( project=project, credentials=credentials, @@ -277,14 +291,15 @@ def __init__( self._client_options = client_options self._batch_stack = _LocalStack() self._datastore_api_internal = None + if _use_grpc is None: self._use_grpc = _USE_GRPC else: self._use_grpc = _use_grpc - try: - host = os.environ[GCD_HOST] - self._base_url = "http://" + host - except KeyError: + + if emulator_host is not None: + self._base_url = "http://" + emulator_host + else: api_endpoint = _DATASTORE_BASE_URL if client_options: if type(client_options) == dict: diff --git a/packages/google-cloud-datastore/tests/system/test_system.py b/packages/google-cloud-datastore/tests/system/test_system.py index 577bd748809d..85995cc93fa2 100644 --- a/packages/google-cloud-datastore/tests/system/test_system.py +++ b/packages/google-cloud-datastore/tests/system/test_system.py @@ -22,10 +22,9 @@ from google.cloud._helpers import UTC from google.cloud import datastore from google.cloud.datastore.helpers import GeoPoint -from google.cloud.environment_vars import GCD_DATASET +from google.cloud.datastore.client import DATASTORE_DATASET from google.cloud.exceptions import Conflict -from test_utils.system import EmulatorCreds from test_utils.system import unique_resource_id from tests.system.utils import clear_datastore @@ -44,28 +43,31 @@ class Config(object): def clone_client(client): - return datastore.Client( - project=client.project, - namespace=client.namespace, - credentials=client._credentials, - _http=client._http, - ) + emulator_dataset = os.getenv(DATASTORE_DATASET) + + if emulator_dataset is None: + return datastore.Client( + project=client.project, + namespace=client.namespace, + credentials=client._credentials, + _http=client._http, + ) + else: + return datastore.Client( + project=client.project, namespace=client.namespace, _http=client._http, + ) def setUpModule(): - emulator_dataset = os.getenv(GCD_DATASET) + emulator_dataset = os.getenv(DATASTORE_DATASET) # Isolated namespace so concurrent test runs don't collide. test_namespace = "ns" + unique_resource_id() if emulator_dataset is None: Config.CLIENT = datastore.Client(namespace=test_namespace) else: - credentials = EmulatorCreds() http = requests.Session() # Un-authorized. Config.CLIENT = datastore.Client( - project=emulator_dataset, - namespace=test_namespace, - credentials=credentials, - _http=http, + project=emulator_dataset, namespace=test_namespace, _http=http, ) @@ -240,7 +242,7 @@ def setUpClass(cls): cls.CLIENT.namespace = None # In the emulator, re-populating the datastore is cheap. - if os.getenv(GCD_DATASET) is not None: + if os.getenv(DATASTORE_DATASET) is not None: # Populate the datastore with the cloned client. populate_datastore.add_characters(client=cls.CLIENT) @@ -251,7 +253,7 @@ def setUpClass(cls): @classmethod def tearDownClass(cls): # In the emulator, destroy the query entities. - if os.getenv(GCD_DATASET) is not None: + if os.getenv(DATASTORE_DATASET) is not None: # Use the client for this test instead of the global. clear_datastore.remove_all_entities(client=cls.CLIENT) @@ -484,7 +486,7 @@ def setUpClass(cls): @classmethod def tearDownClass(cls): # In the emulator, destroy the query entities. - if os.getenv(GCD_DATASET) is not None: + if os.getenv(DATASTORE_DATASET) is not None: # Use the client for this test instead of the global. clear_datastore.remove_all_entities(client=cls.CLIENT) diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index ab186a8df628..1bf4c3339114 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -52,10 +52,10 @@ def test_no_value(self): self.assertIsNone(project) def test_value_set(self): - from google.cloud.datastore.client import GCD_DATASET + from google.cloud.datastore.client import DATASTORE_DATASET MOCK_PROJECT = object() - environ = {GCD_DATASET: MOCK_PROJECT} + environ = {DATASTORE_DATASET: MOCK_PROJECT} with mock.patch("os.getenv", new=environ.get): project = self._call_fut() self.assertEqual(project, MOCK_PROJECT) @@ -235,18 +235,33 @@ def test_constructor_use_grpc_default(self): ) self.assertTrue(client4._use_grpc) - def test_constructor_gcd_host(self): - from google.cloud.environment_vars import GCD_HOST + def test_constructor_w_emulator_w_creds(self): + from google.cloud.datastore.client import DATASTORE_EMULATOR_HOST host = "localhost:1234" - fake_environ = {GCD_HOST: host} + fake_environ = {DATASTORE_EMULATOR_HOST: host} project = "PROJECT" creds = _make_credentials() http = object() with mock.patch("os.environ", new=fake_environ): - client = self._make_one(project=project, credentials=creds, _http=http) - self.assertEqual(client.base_url, "http://" + host) + with self.assertRaises(ValueError): + self._make_one(project=project, credentials=creds, _http=http) + + def test_constructor_w_emulator_wo_creds(self): + from google.auth.credentials import AnonymousCredentials + from google.cloud.datastore.client import DATASTORE_EMULATOR_HOST + + host = "localhost:1234" + fake_environ = {DATASTORE_EMULATOR_HOST: host} + project = "PROJECT" + http = object() + + with mock.patch("os.environ", new=fake_environ): + client = self._make_one(project=project, _http=http) + + self.assertEqual(client.base_url, "http://" + host) + self.assertIsInstance(client._credentials, AnonymousCredentials) def test_base_url_property(self): from google.cloud.datastore.client import _DATASTORE_BASE_URL From c3c21df1fa9ba1e5ac6eec5c3f255618189ebe73 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 14 Aug 2020 14:16:07 -0400 Subject: [PATCH 282/611] chore: release 1.15.0 (#69) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 18 ++++++++++++++++++ packages/google-cloud-datastore/setup.py | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 85ac2961a9a6..9bc922bfe0a7 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,24 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [1.15.0](https://www.github.com/googleapis/python-datastore/compare/v1.14.0...v1.15.0) (2020-08-14) + + +### Features + +* add retry and timeout args to API methods ([#67](https://www.github.com/googleapis/python-datastore/issues/67)) ([f3283e1](https://www.github.com/googleapis/python-datastore/commit/f3283e14c34c36c8386e4cb6b43c109d469f118c)), closes [#3](https://www.github.com/googleapis/python-datastore/issues/3) +* supply anonymous credentials under emulator ([#71](https://www.github.com/googleapis/python-datastore/issues/71)) ([4db3c40](https://www.github.com/googleapis/python-datastore/commit/4db3c4048e53c220eee0aea2063c05292bbc5334)), closes [#70](https://www.github.com/googleapis/python-datastore/issues/70) + + +### Bug Fixes + +* smooth over system test bumps ([#66](https://www.github.com/googleapis/python-datastore/issues/66)) ([8bb17ea](https://www.github.com/googleapis/python-datastore/commit/8bb17ea30ed94c0a298a54cc75c031b67d0a576a)) + + +### Documentation + +* add docs for admin client ([#63](https://www.github.com/googleapis/python-datastore/issues/63)) ([43ff64a](https://www.github.com/googleapis/python-datastore/commit/43ff64a5889aeac321fbead967ec527ede414fa2)), closes [#49](https://www.github.com/googleapis/python-datastore/issues/49) + ## [1.14.0](https://www.github.com/googleapis/python-datastore/compare/v1.13.2...v1.14.0) (2020-08-05) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 5396ce7eb0cf..255a8ce4cebb 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-datastore" description = "Google Cloud Datastore API client library" -version = "1.14.0" +version = "1.15.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From f6ff87cdb44c5151d314720faa8234a1d6b13e68 Mon Sep 17 00:00:00 2001 From: Chris Rossi Date: Wed, 2 Sep 2020 15:00:08 -0400 Subject: [PATCH 283/611] fix: repair implementation of `Client.reserve_ids` (#76) `Client.reserve_ids` has been reimplemented in a way that should be a lot more useful, and has been renamed `Client_reserve_ids_sequential`, leaving the old name as a deprecated alias. `Client.reserve_ids_multi` has been added, which takes sequence of complete keys to reserve. Fixes #37 --- .../google/cloud/datastore/client.py | 81 +++++- .../tests/system/test_system.py | 27 ++ .../tests/unit/test_client.py | 236 +++++++++++++++--- 3 files changed, 303 insertions(+), 41 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 67651db34042..71a0327e251a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -14,6 +14,7 @@ """Convenience wrapper for invoking APIs/factories w/ a project.""" import os +import warnings import google.api_core.client_options from google.auth.credentials import AnonymousCredentials @@ -816,11 +817,18 @@ def do_something(entity): kwargs["namespace"] = self.namespace return Query(self, **kwargs) - def reserve_ids(self, complete_key, num_ids, retry=None, timeout=None): - """Reserve a list of IDs from a complete key. + def reserve_ids_sequential(self, complete_key, num_ids, retry=None, timeout=None): + """Reserve a list of IDs sequentially from a complete key. + + This will reserve the key passed as `complete_key` as well as + additional keys derived by incrementing the last ID in the path of + `complete_key` sequentially to obtain the number of keys specified in + `num_ids`. :type complete_key: :class:`google.cloud.datastore.key.Key` - :param complete_key: Complete key to use as base for reserved IDs. + :param complete_key: + Complete key to use as base for reserved IDs. Key must use a + numeric ID and not a string name. :type num_ids: int :param num_ids: The number of IDs to reserve. @@ -844,16 +852,75 @@ def reserve_ids(self, complete_key, num_ids, retry=None, timeout=None): if complete_key.is_partial: raise ValueError(("Key is not Complete.", complete_key)) + if complete_key.id is None: + raise ValueError(("Key must use numeric id.", complete_key)) + if not isinstance(num_ids, int): raise ValueError(("num_ids is not a valid integer.", num_ids)) + key_class = type(complete_key) + namespace = complete_key._namespace + project = complete_key._project + flat_path = list(complete_key._flat_path[:-1]) + start_id = complete_key._flat_path[-1] + + key_pbs = [] + for id in range(start_id, start_id + num_ids): + path = flat_path + [id] + key = key_class(*path, project=project, namespace=namespace) + key_pbs.append(key.to_protobuf()) + kwargs = _make_retry_timeout_kwargs(retry, timeout) + self._datastore_api.reserve_ids(complete_key.project, key_pbs, **kwargs) + + return None + + def reserve_ids(self, complete_key, num_ids, retry=None, timeout=None): + """Reserve a list of IDs sequentially from a complete key. - complete_key_pb = complete_key.to_protobuf() - complete_key_pbs = [complete_key_pb] * num_ids + DEPRECATED. Alias for :meth:`reserve_ids_sequential`. - self._datastore_api.reserve_ids( - complete_key.project, complete_key_pbs, **kwargs + Please use either :meth:`reserve_ids_multi` (recommended) or + :meth:`reserve_ids_sequential`. + """ + message = ( + "Client.reserve_ids is deprecated. Please use " + "Client.reserve_ids_multi or Client.reserve_ids_sequential", ) + warnings.warn(message, DeprecationWarning) + return self.reserve_ids_sequential( + complete_key, num_ids, retry=retry, timeout=timeout + ) + + def reserve_ids_multi(self, complete_keys, retry=None, timeout=None): + """Reserve IDs from a list of complete keys. + + :type complete_keys: `list` of :class:`google.cloud.datastore.key.Key` + :param complete_keys: + Complete keys for which to reserve IDs. + + :type retry: :class:`google.api_core.retry.Retry` + :param retry: + A retry object used to retry requests. If ``None`` is specified, + requests will be retried using a default configuration. + + :type timeout: float + :param timeout: + Time, in seconds, to wait for the request to complete. + Note that if ``retry`` is specified, the timeout applies + to each individual attempt. + + :rtype: class:`NoneType` + :returns: None + :raises: :class:`ValueError` if any of `complete_keys`` is not a + Complete key. + """ + for complete_key in complete_keys: + if complete_key.is_partial: + raise ValueError(("Key is not Complete.", complete_key)) + + kwargs = _make_retry_timeout_kwargs(retry, timeout) + key_pbs = [key.to_protobuf() for key in complete_keys] + self._datastore_api.reserve_ids(complete_keys[0].project, key_pbs, **kwargs) return None diff --git a/packages/google-cloud-datastore/tests/system/test_system.py b/packages/google-cloud-datastore/tests/system/test_system.py index 85995cc93fa2..c807781bc65c 100644 --- a/packages/google-cloud-datastore/tests/system/test_system.py +++ b/packages/google-cloud-datastore/tests/system/test_system.py @@ -15,6 +15,7 @@ import datetime import os import unittest +import warnings import requests import six @@ -102,6 +103,32 @@ def test_allocate_ids(self): self.assertEqual(len(unique_ids), num_ids) +class TestDatastoreReserveIDs(TestDatastore): + def test_reserve_ids_sequential(self): + # Smoke test to make sure it doesn't blow up. No return value or + # verifiable side effect to verify. + num_ids = 10 + Config.CLIENT.reserve_ids_sequential(Config.CLIENT.key("Kind", 1234), num_ids) + + def test_reserve_ids(self): + with warnings.catch_warnings(record=True) as warned: + num_ids = 10 + Config.CLIENT.reserve_ids(Config.CLIENT.key("Kind", 1234), num_ids) + + warned = [ + warning + for warning in warned + if "reserve_ids_sequential" in str(warning.message) + ] + assert len(warned) == 1 + + def test_reserve_ids_multi(self): + # Smoke test to make sure it doesn't blow up. No return value or + # verifiable side effect to verify. + keys = [Config.CLIENT.key("KIND", 1234), Config.CLIENT.key("KIND", 1235)] + Config.CLIENT.reserve_ids_multi(keys) + + class TestDatastoreSave(TestDatastore): @classmethod def setUpClass(cls): diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 1bf4c3339114..61f8af7b7bf9 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -774,8 +774,7 @@ def test_put_multi_no_batch_w_partial_key_w_retry_w_timeout(self): from google.cloud.datastore.helpers import _property_tuples entity = _Entity(foo=u"bar") - key = entity.key = _Key(self.PROJECT) - key._id = None + key = entity.key = _Key(_Key.kind, None) retry = mock.Mock() timeout = 100000 @@ -813,7 +812,7 @@ def test_put_multi_existing_batch_w_completed_key(self): creds = _make_credentials() client = self._make_one(credentials=creds) entity = _Entity(foo=u"bar") - key = entity.key = _Key(self.PROJECT) + key = entity.key = _Key() with _NoCommitBatch(client) as CURR_BATCH: result = client.put_multi([entity]) @@ -862,7 +861,7 @@ def test_delete_multi_no_keys(self): def test_delete_multi_no_batch_w_retry_w_timeout(self): from google.cloud.datastore_v1.proto import datastore_pb2 - key = _Key(self.PROJECT) + key = _Key() retry = mock.Mock() timeout = 100000 @@ -892,7 +891,7 @@ def test_delete_multi_w_existing_batch(self): client = self._make_one(credentials=creds) client._datastore_api_internal = _make_datastore_api() - key = _Key(self.PROJECT) + key = _Key() with _NoCommitBatch(client) as CURR_BATCH: result = client.delete_multi([key]) @@ -907,7 +906,7 @@ def test_delete_multi_w_existing_transaction(self): client = self._make_one(credentials=creds) client._datastore_api_internal = _make_datastore_api() - key = _Key(self.PROJECT) + key = _Key() with _NoCommitTransaction(client) as CURR_XACT: result = client.delete_multi([key]) @@ -920,8 +919,7 @@ def test_delete_multi_w_existing_transaction(self): def test_allocate_ids_w_partial_key(self): num_ids = 2 - incomplete_key = _Key(self.PROJECT) - incomplete_key._id = None + incomplete_key = _Key(_Key.kind, None) creds = _make_credentials() client = self._make_one(credentials=creds, _use_grpc=False) @@ -933,7 +931,7 @@ def test_allocate_ids_w_partial_key(self): result = client.allocate_ids(incomplete_key, num_ids) # Check the IDs returned. - self.assertEqual([key._id for key in result], list(range(num_ids))) + self.assertEqual([key.id for key in result], list(range(num_ids))) expected_keys = [incomplete_key.to_protobuf()] * num_ids alloc_ids.assert_called_once_with(self.PROJECT, expected_keys) @@ -941,8 +939,7 @@ def test_allocate_ids_w_partial_key(self): def test_allocate_ids_w_partial_key_w_retry_w_timeout(self): num_ids = 2 - incomplete_key = _Key(self.PROJECT) - incomplete_key._id = None + incomplete_key = _Key(_Key.kind, None) retry = mock.Mock() timeout = 100000 @@ -958,7 +955,7 @@ def test_allocate_ids_w_partial_key_w_retry_w_timeout(self): ) # Check the IDs returned. - self.assertEqual([key._id for key in result], list(range(num_ids))) + self.assertEqual([key.id for key in result], list(range(num_ids))) expected_keys = [incomplete_key.to_protobuf()] * num_ids alloc_ids.assert_called_once_with( @@ -969,20 +966,114 @@ def test_allocate_ids_w_completed_key(self): creds = _make_credentials() client = self._make_one(credentials=creds) - complete_key = _Key(self.PROJECT) + complete_key = _Key() self.assertRaises(ValueError, client.allocate_ids, complete_key, 2) + def test_reserve_ids_sequential_w_completed_key(self): + num_ids = 2 + creds = _make_credentials() + client = self._make_one(credentials=creds, _use_grpc=False) + complete_key = _Key() + reserve_ids = mock.Mock() + ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) + client._datastore_api_internal = ds_api + self.assertTrue(not complete_key.is_partial) + + client.reserve_ids_sequential(complete_key, num_ids) + + reserved_keys = ( + _Key(_Key.kind, id) + for id in range(complete_key.id, complete_key.id + num_ids) + ) + expected_keys = [key.to_protobuf() for key in reserved_keys] + reserve_ids.assert_called_once_with(self.PROJECT, expected_keys) + + def test_reserve_ids_sequential_w_completed_key_w_retry_w_timeout(self): + num_ids = 2 + retry = mock.Mock() + timeout = 100000 + + creds = _make_credentials() + client = self._make_one(credentials=creds, _use_grpc=False) + complete_key = _Key() + self.assertTrue(not complete_key.is_partial) + reserve_ids = mock.Mock() + ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) + client._datastore_api_internal = ds_api + + client.reserve_ids_sequential( + complete_key, num_ids, retry=retry, timeout=timeout + ) + + reserved_keys = ( + _Key(_Key.kind, id) + for id in range(complete_key.id, complete_key.id + num_ids) + ) + expected_keys = [key.to_protobuf() for key in reserved_keys] + reserve_ids.assert_called_once_with( + self.PROJECT, expected_keys, retry=retry, timeout=timeout + ) + + def test_reserve_ids_sequential_w_completed_key_w_ancestor(self): + num_ids = 2 + creds = _make_credentials() + client = self._make_one(credentials=creds, _use_grpc=False) + complete_key = _Key("PARENT", "SINGLETON", _Key.kind, 1234) + reserve_ids = mock.Mock() + ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) + client._datastore_api_internal = ds_api + self.assertTrue(not complete_key.is_partial) + + client.reserve_ids_sequential(complete_key, num_ids) + + reserved_keys = ( + _Key("PARENT", "SINGLETON", _Key.kind, id) + for id in range(complete_key.id, complete_key.id + num_ids) + ) + expected_keys = [key.to_protobuf() for key in reserved_keys] + reserve_ids.assert_called_once_with(self.PROJECT, expected_keys) + + def test_reserve_ids_sequential_w_partial_key(self): + num_ids = 2 + incomplete_key = _Key(_Key.kind, None) + creds = _make_credentials() + client = self._make_one(credentials=creds) + with self.assertRaises(ValueError): + client.reserve_ids_sequential(incomplete_key, num_ids) + + def test_reserve_ids_sequential_w_wrong_num_ids(self): + num_ids = "2" + complete_key = _Key() + creds = _make_credentials() + client = self._make_one(credentials=creds) + with self.assertRaises(ValueError): + client.reserve_ids_sequential(complete_key, num_ids) + + def test_reserve_ids_sequential_w_non_numeric_key_name(self): + num_ids = 2 + complete_key = _Key(_Key.kind, "batman") + creds = _make_credentials() + client = self._make_one(credentials=creds) + with self.assertRaises(ValueError): + client.reserve_ids_sequential(complete_key, num_ids) + def test_reserve_ids_w_completed_key(self): num_ids = 2 creds = _make_credentials() client = self._make_one(credentials=creds, _use_grpc=False) - complete_key = _Key(self.PROJECT) + complete_key = _Key() reserve_ids = mock.Mock() ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) client._datastore_api_internal = ds_api self.assertTrue(not complete_key.is_partial) + client.reserve_ids(complete_key, num_ids) - expected_keys = [complete_key.to_protobuf()] * num_ids + + reserved_keys = ( + _Key(_Key.kind, id) + for id in range(complete_key.id, complete_key.id + num_ids) + ) + expected_keys = [key.to_protobuf() for key in reserved_keys] reserve_ids.assert_called_once_with(self.PROJECT, expected_keys) def test_reserve_ids_w_completed_key_w_retry_w_timeout(self): @@ -992,7 +1083,7 @@ def test_reserve_ids_w_completed_key_w_retry_w_timeout(self): creds = _make_credentials() client = self._make_one(credentials=creds, _use_grpc=False) - complete_key = _Key(self.PROJECT) + complete_key = _Key() self.assertTrue(not complete_key.is_partial) reserve_ids = mock.Mock() ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) @@ -1000,15 +1091,37 @@ def test_reserve_ids_w_completed_key_w_retry_w_timeout(self): client.reserve_ids(complete_key, num_ids, retry=retry, timeout=timeout) - expected_keys = [complete_key.to_protobuf()] * num_ids + reserved_keys = ( + _Key(_Key.kind, id) + for id in range(complete_key.id, complete_key.id + num_ids) + ) + expected_keys = [key.to_protobuf() for key in reserved_keys] reserve_ids.assert_called_once_with( self.PROJECT, expected_keys, retry=retry, timeout=timeout ) + def test_reserve_ids_w_completed_key_w_ancestor(self): + num_ids = 2 + creds = _make_credentials() + client = self._make_one(credentials=creds, _use_grpc=False) + complete_key = _Key("PARENT", "SINGLETON", _Key.kind, 1234) + reserve_ids = mock.Mock() + ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) + client._datastore_api_internal = ds_api + self.assertTrue(not complete_key.is_partial) + + client.reserve_ids(complete_key, num_ids) + + reserved_keys = ( + _Key("PARENT", "SINGLETON", _Key.kind, id) + for id in range(complete_key.id, complete_key.id + num_ids) + ) + expected_keys = [key.to_protobuf() for key in reserved_keys] + reserve_ids.assert_called_once_with(self.PROJECT, expected_keys) + def test_reserve_ids_w_partial_key(self): num_ids = 2 - incomplete_key = _Key(self.PROJECT) - incomplete_key._id = None + incomplete_key = _Key(_Key.kind, None) creds = _make_credentials() client = self._make_one(credentials=creds) with self.assertRaises(ValueError): @@ -1016,12 +1129,41 @@ def test_reserve_ids_w_partial_key(self): def test_reserve_ids_w_wrong_num_ids(self): num_ids = "2" - complete_key = _Key(self.PROJECT) + complete_key = _Key() creds = _make_credentials() client = self._make_one(credentials=creds) with self.assertRaises(ValueError): client.reserve_ids(complete_key, num_ids) + def test_reserve_ids_w_non_numeric_key_name(self): + num_ids = 2 + complete_key = _Key(_Key.kind, "batman") + creds = _make_credentials() + client = self._make_one(credentials=creds) + with self.assertRaises(ValueError): + client.reserve_ids(complete_key, num_ids) + + def test_reserve_ids_multi(self): + creds = _make_credentials() + client = self._make_one(credentials=creds, _use_grpc=False) + key1 = _Key(_Key.kind, "one") + key2 = _Key(_Key.kind, "two") + reserve_ids = mock.Mock() + ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) + client._datastore_api_internal = ds_api + + client.reserve_ids_multi([key1, key2]) + + expected_keys = [key1.to_protobuf(), key2.to_protobuf()] + reserve_ids.assert_called_once_with(self.PROJECT, expected_keys) + + def test_reserve_ids_multi_w_partial_key(self): + incomplete_key = _Key(_Key.kind, None) + creds = _make_credentials() + client = self._make_one(credentials=creds) + with self.assertRaises(ValueError): + client.reserve_ids_multi([incomplete_key]) + def test_key_w_project(self): KIND = "KIND" ID = 1234 @@ -1252,38 +1394,64 @@ class _Entity(dict): class _Key(object): - _MARKER = object() - _kind = "KIND" + kind = "KIND" + id = 1234 + name = None + _project = project = "PROJECT" + _namespace = None + _key = "KEY" _path = None - _id = 1234 _stored = None - def __init__(self, project): - self.project = project + def __init__(self, *flat_path, **kwargs): + if flat_path: + self._flat_path = flat_path + self.kind = flat_path[-2] + id_or_name = flat_path[-1] + if isinstance(id_or_name, int): + self.id = id_or_name + else: + self.id = None + self.name = id_or_name + + else: + self._flat_path = [self.kind, self.id] + + self.__dict__.update(kwargs) + self._kw_args = kwargs @property def is_partial(self): - return self._id is None + return self.id is None and self.name is None def to_protobuf(self): from google.cloud.datastore_v1.proto import entity_pb2 key = self._key = entity_pb2.Key() - # Don't assign it, because it will just get ripped out - # key.partition_id.project_id = self.project - element = key.path.add() - element.kind = self._kind - if self._id is not None: - element.id = self._id + path = self._flat_path + while path: + element = key.path.add() + kind, id_or_name = path[:2] + element.kind = kind + if isinstance(id_or_name, int): + element.id = id_or_name + elif id_or_name is not None: + element.name = id_or_name + + path = path[2:] return key def completed_key(self, new_id): assert self.is_partial - new_key = self.__class__(self.project) - new_key._id = new_id + + path = list(self._flat_path) + path[-1] = new_id + + key_class = type(self) + new_key = key_class(*path, **self._kw_args) return new_key From edab63008a6b6b08692b7b4e2d10130fc6beee28 Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Thu, 17 Sep 2020 01:02:22 +0530 Subject: [PATCH 284/611] docs: document thread-safety of client (#75) --- packages/google-cloud-datastore/docs/index.rst | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/google-cloud-datastore/docs/index.rst b/packages/google-cloud-datastore/docs/index.rst index eed055183f79..60357e2dc3fb 100644 --- a/packages/google-cloud-datastore/docs/index.rst +++ b/packages/google-cloud-datastore/docs/index.rst @@ -1,5 +1,13 @@ .. include:: README.rst +.. note:: + + Because the Datastore client uses the :mod:`grpcio` library by default + and uses third-party :mod:`requests` library if the GRPC is disabled, + clients are safe to share instances across threads. In multiprocessing + scenarios, the best practice is to create client instances *after* + :class:`multiprocessing.Pool` or :class:`multiprocessing.Process` invokes + :func:`os.fork`. API Reference ------------- From b91eee8efbf55461475bb8c12dc7edc1f737234f Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 16 Sep 2020 13:14:46 -0700 Subject: [PATCH 285/611] Update protobuf workspace dependency to v3.13.0. (#73) PiperOrigin-RevId: 327026955 Source-Author: Google APIs Source-Date: Mon Aug 17 08:40:22 2020 -0700 Source-Repo: googleapis/googleapis Source-Sha: 0dc0a6c0f1a9f979bc0690f0caa5fbafa3000c2c Source-Link: https://github.com/googleapis/googleapis/commit/0dc0a6c0f1a9f979bc0690f0caa5fbafa3000c2c Co-authored-by: Tres Seaver --- .../cloud/datastore_admin_v1/proto/datastore_admin_pb2.py | 2 +- .../google/cloud/datastore_admin_v1/proto/index_pb2.py | 2 +- .../google/cloud/datastore_v1/proto/datastore_pb2.py | 2 +- .../google/cloud/datastore_v1/proto/entity_pb2.py | 2 +- .../google/cloud/datastore_v1/proto/query_pb2.py | 2 +- packages/google-cloud-datastore/synth.metadata | 8 ++++---- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py index f7c8d82e40b8..f16463bb0475 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_admin_v1/proto/datastore_admin.proto - +"""Generated protocol buffer code.""" from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2.py index 41f17a3fc811..c1ccb034c511 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_admin_v1/proto/index.proto - +"""Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py index 3f5266e2365b..cf7a3cfd1168 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_v1/proto/datastore.proto - +"""Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py index 4df7bbb66148..43321289b17e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_v1/proto/entity.proto - +"""Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2.py index cf915c8445b7..e3bd1141e7f0 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_v1/proto/query.proto - +"""Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 15ff18f3d046..8eeb1bf5a9ab 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -3,16 +3,16 @@ { "git": { "name": ".", - "remote": "git@github.com:googleapis/python-datastore", - "sha": "f6290c02c46c83616a26aaa9babc43c3c6bed2f2" + "remote": "https://github.com/googleapis/python-datastore.git", + "sha": "c947ad76d6619ae31767a64cb01b66073708d5db" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "94006b3cb8d2fb44703cf535da15608eed6bf7db", - "internalRef": "325949033" + "sha": "0dc0a6c0f1a9f979bc0690f0caa5fbafa3000c2c", + "internalRef": "327026955" } }, { From a9c0e8b593011b0cf4813e4d51944dbab9aea42c Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 16 Sep 2020 13:26:01 -0700 Subject: [PATCH 286/611] chore: enable snippet-bot (#81) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/b8e5112c-3774-4b12-81b9-e691b2c52110/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/d91dd8aac77f7a9c5506c238038a26fa4f9e361e --- .../.github/snippet-bot.yml | 0 .../google-cloud-datastore/synth.metadata | 91 ++++++++++++++++++- 2 files changed, 89 insertions(+), 2 deletions(-) create mode 100644 packages/google-cloud-datastore/.github/snippet-bot.yml diff --git a/packages/google-cloud-datastore/.github/snippet-bot.yml b/packages/google-cloud-datastore/.github/snippet-bot.yml new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 8eeb1bf5a9ab..038ef3b878c3 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -4,7 +4,8 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-datastore.git", - "sha": "c947ad76d6619ae31767a64cb01b66073708d5db" + "sha": "7df727d00dce7c022f2b6a3c03b31ff7c3836d49" + } }, { @@ -19,7 +20,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "bd0deaa1113b588d70449535ab9cbf0f2bd0e72f" + "sha": "d91dd8aac77f7a9c5506c238038a26fa4f9e361e" } } ], @@ -42,5 +43,91 @@ "generator": "bazel" } } + ], + "generatedFiles": [ + ".coveragerc", + ".flake8", + ".github/CONTRIBUTING.md", + ".github/ISSUE_TEMPLATE/bug_report.md", + ".github/ISSUE_TEMPLATE/feature_request.md", + ".github/ISSUE_TEMPLATE/support_request.md", + ".github/PULL_REQUEST_TEMPLATE.md", + ".github/release-please.yml", + ".github/snippet-bot.yml", + ".gitignore", + ".kokoro/build.sh", + ".kokoro/continuous/common.cfg", + ".kokoro/continuous/continuous.cfg", + ".kokoro/docker/docs/Dockerfile", + ".kokoro/docker/docs/fetch_gpg_keys.sh", + ".kokoro/docs/common.cfg", + ".kokoro/docs/docs-presubmit.cfg", + ".kokoro/docs/docs.cfg", + ".kokoro/presubmit/common.cfg", + ".kokoro/presubmit/presubmit.cfg", + ".kokoro/publish-docs.sh", + ".kokoro/release.sh", + ".kokoro/release/common.cfg", + ".kokoro/release/release.cfg", + ".kokoro/samples/lint/common.cfg", + ".kokoro/samples/lint/continuous.cfg", + ".kokoro/samples/lint/periodic.cfg", + ".kokoro/samples/lint/presubmit.cfg", + ".kokoro/samples/python3.6/common.cfg", + ".kokoro/samples/python3.6/continuous.cfg", + ".kokoro/samples/python3.6/periodic.cfg", + ".kokoro/samples/python3.6/presubmit.cfg", + ".kokoro/samples/python3.7/common.cfg", + ".kokoro/samples/python3.7/continuous.cfg", + ".kokoro/samples/python3.7/periodic.cfg", + ".kokoro/samples/python3.7/presubmit.cfg", + ".kokoro/samples/python3.8/common.cfg", + ".kokoro/samples/python3.8/continuous.cfg", + ".kokoro/samples/python3.8/periodic.cfg", + ".kokoro/samples/python3.8/presubmit.cfg", + ".kokoro/test-samples.sh", + ".kokoro/trampoline.sh", + ".kokoro/trampoline_v2.sh", + ".trampolinerc", + "CODE_OF_CONDUCT.md", + "CONTRIBUTING.rst", + "LICENSE", + "MANIFEST.in", + "docs/_static/custom.css", + "docs/_templates/layout.html", + "google/cloud/datastore_admin_v1/proto/__init__.py", + "google/cloud/datastore_admin_v1/proto/datastore_admin.proto", + "google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py", + "google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py", + "google/cloud/datastore_admin_v1/proto/index.proto", + "google/cloud/datastore_admin_v1/proto/index_pb2.py", + "google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py", + "google/cloud/datastore_v1/gapic/__init__.py", + "google/cloud/datastore_v1/gapic/datastore_client.py", + "google/cloud/datastore_v1/gapic/datastore_client_config.py", + "google/cloud/datastore_v1/gapic/enums.py", + "google/cloud/datastore_v1/gapic/transports/__init__.py", + "google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py", + "google/cloud/datastore_v1/proto/__init__.py", + "google/cloud/datastore_v1/proto/datastore.proto", + "google/cloud/datastore_v1/proto/datastore_pb2.py", + "google/cloud/datastore_v1/proto/datastore_pb2_grpc.py", + "google/cloud/datastore_v1/proto/entity.proto", + "google/cloud/datastore_v1/proto/entity_pb2.py", + "google/cloud/datastore_v1/proto/entity_pb2_grpc.py", + "google/cloud/datastore_v1/proto/query.proto", + "google/cloud/datastore_v1/proto/query_pb2.py", + "google/cloud/datastore_v1/proto/query_pb2_grpc.py", + "noxfile.py", + "renovate.json", + "scripts/decrypt-secrets.sh", + "scripts/readme-gen/readme_gen.py", + "scripts/readme-gen/templates/README.tmpl.rst", + "scripts/readme-gen/templates/auth.tmpl.rst", + "scripts/readme-gen/templates/auth_api_key.tmpl.rst", + "scripts/readme-gen/templates/install_deps.tmpl.rst", + "scripts/readme-gen/templates/install_portaudio.tmpl.rst", + "setup.cfg", + "testing/.gitignore" ] } \ No newline at end of file From d36f03f6cf6fad796120913979dff5e9d5905a99 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 21 Sep 2020 15:18:20 -0400 Subject: [PATCH 287/611] chore(CI): update kokoro secret manager, release manager, docfx (via synth) (#85) Only add 'doctest' extension if missing: Synthtool template will add it soon: https://github.com/googleapis/synthtool/pull/765 Closes #83 --- .../.kokoro/populate-secrets.sh | 43 ++++++++ .../.kokoro/release/common.cfg | 50 +++------- .../.kokoro/trampoline.sh | 15 ++- packages/google-cloud-datastore/docs/conf.py | 15 ++- packages/google-cloud-datastore/noxfile.py | 4 +- .../scripts/decrypt-secrets.sh | 15 ++- .../google-cloud-datastore/synth.metadata | 97 +------------------ packages/google-cloud-datastore/synth.py | 16 ++- 8 files changed, 113 insertions(+), 142 deletions(-) create mode 100755 packages/google-cloud-datastore/.kokoro/populate-secrets.sh diff --git a/packages/google-cloud-datastore/.kokoro/populate-secrets.sh b/packages/google-cloud-datastore/.kokoro/populate-secrets.sh new file mode 100755 index 000000000000..f52514257ef0 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/populate-secrets.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# Copyright 2020 Google LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} +function msg { println "$*" >&2 ;} +function println { printf '%s\n' "$(now) $*" ;} + + +# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: +# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com +SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" +msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" +mkdir -p ${SECRET_LOCATION} +for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") +do + msg "Retrieving secret ${key}" + docker run --entrypoint=gcloud \ + --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ + gcr.io/google.com/cloudsdktool/cloud-sdk \ + secrets versions access latest \ + --project cloud-devrel-kokoro-resources \ + --secret ${key} > \ + "${SECRET_LOCATION}/${key}" + if [[ $? == 0 ]]; then + msg "Secret written to ${SECRET_LOCATION}/${key}" + else + msg "Error retrieving secret ${key}" + fi +done diff --git a/packages/google-cloud-datastore/.kokoro/release/common.cfg b/packages/google-cloud-datastore/.kokoro/release/common.cfg index 38ca6b7ef3dd..b4f0c6ad18f6 100644 --- a/packages/google-cloud-datastore/.kokoro/release/common.cfg +++ b/packages/google-cloud-datastore/.kokoro/release/common.cfg @@ -23,42 +23,18 @@ env_vars: { value: "github/python-datastore/.kokoro/release.sh" } -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - -# Fetch magictoken to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "releasetool-magictoken" - } - } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google_cloud_pypi_password" + } + } } -# Fetch api key to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "magic-github-proxy-api-key" - } - } -} +# Tokens needed to report release status back to GitHub +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/trampoline.sh b/packages/google-cloud-datastore/.kokoro/trampoline.sh index e8c4251f3ed4..f39236e943a8 100755 --- a/packages/google-cloud-datastore/.kokoro/trampoline.sh +++ b/packages/google-cloud-datastore/.kokoro/trampoline.sh @@ -15,9 +15,14 @@ set -eo pipefail -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$? +# Always run the cleanup script, regardless of the success of bouncing into +# the container. +function cleanup() { + chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + echo "cleanup"; +} +trap cleanup EXIT -chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh -${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true - -exit ${ret_code} +$(dirname $0)/populate-secrets.sh # Secret Manager secrets. +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index 2e1f5404adb8..72346ac9fe04 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -29,7 +29,7 @@ # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" +needs_sphinx = "1.5.5" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -37,9 +37,9 @@ extensions = [ "sphinx.ext.autodoc", "sphinx.ext.autosummary", - "sphinx.ext.doctest", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", @@ -48,7 +48,7 @@ # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True @@ -95,7 +95,12 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -342,7 +347,7 @@ intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.io/grpc/python/", None), } diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 811d4ff80c8e..5ee23c5c2b85 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -171,7 +171,9 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark", "sphinx-docfx-yaml") + # sphinx-docfx-yaml supports up to sphinx version 1.5.5. + # https://github.com/docascode/sphinx-docfx-yaml/issues/97 + session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/google-cloud-datastore/scripts/decrypt-secrets.sh b/packages/google-cloud-datastore/scripts/decrypt-secrets.sh index ff599eb2af25..21f6d2a26d90 100755 --- a/packages/google-cloud-datastore/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-datastore/scripts/decrypt-secrets.sh @@ -20,14 +20,27 @@ ROOT=$( dirname "$DIR" ) # Work from the project root. cd $ROOT +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + # Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ > testing/test-env.sh gcloud secrets versions access latest \ --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ > testing/service-account.json gcloud secrets versions access latest \ --secret="python-docs-samples-client-secrets" \ - > testing/client-secrets.json \ No newline at end of file + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 038ef3b878c3..d92edc20e375 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -3,24 +3,23 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-datastore.git", - "sha": "7df727d00dce7c022f2b6a3c03b31ff7c3836d49" - + "remote": "git@github.com:googleapis/python-datastore", + "sha": "5d78275eee690e53cfaff4f43be1c93bd9598c35" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "0dc0a6c0f1a9f979bc0690f0caa5fbafa3000c2c", - "internalRef": "327026955" + "sha": "8d73f9486fc193a150f6c907dfb9f49431aff3ff", + "internalRef": "332497859" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d91dd8aac77f7a9c5506c238038a26fa4f9e361e" + "sha": "80003a3de2d8a75f5b47cb2e77e018f7f0f776cc" } } ], @@ -43,91 +42,5 @@ "generator": "bazel" } } - ], - "generatedFiles": [ - ".coveragerc", - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "google/cloud/datastore_admin_v1/proto/__init__.py", - "google/cloud/datastore_admin_v1/proto/datastore_admin.proto", - "google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py", - "google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py", - "google/cloud/datastore_admin_v1/proto/index.proto", - "google/cloud/datastore_admin_v1/proto/index_pb2.py", - "google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py", - "google/cloud/datastore_v1/gapic/__init__.py", - "google/cloud/datastore_v1/gapic/datastore_client.py", - "google/cloud/datastore_v1/gapic/datastore_client_config.py", - "google/cloud/datastore_v1/gapic/enums.py", - "google/cloud/datastore_v1/gapic/transports/__init__.py", - "google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py", - "google/cloud/datastore_v1/proto/__init__.py", - "google/cloud/datastore_v1/proto/datastore.proto", - "google/cloud/datastore_v1/proto/datastore_pb2.py", - "google/cloud/datastore_v1/proto/datastore_pb2_grpc.py", - "google/cloud/datastore_v1/proto/entity.proto", - "google/cloud/datastore_v1/proto/entity_pb2.py", - "google/cloud/datastore_v1/proto/entity_pb2_grpc.py", - "google/cloud/datastore_v1/proto/query.proto", - "google/cloud/datastore_v1/proto/query_pb2.py", - "google/cloud/datastore_v1/proto/query_pb2_grpc.py", - "noxfile.py", - "renovate.json", - "scripts/decrypt-secrets.sh", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore" ] } \ No newline at end of file diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index c29df7a50fdc..af705849487f 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -94,10 +94,24 @@ # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library(unit_cov_level=97, cov_level=99) -s.move(templated_files, excludes=["docs/conf.py", "docs/multiprocessing.rst"]) +s.move(templated_files, excludes=["docs/multiprocessing.rst"]) s.replace("noxfile.py", """["']sphinx['"]""", '''"sphinx<3.0.0"''') +# Add the `sphinx-ext-doctest` extenaion +s.replace( + "docs/conf.py", + """\ + "sphinx.ext.coverage", + "sphinx.ext.napoleon", +""", + """\ + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", +""", +) + s.shell.run(["nox", "-s", "blacken"], hide_output=False) # Add documentation about creating indexes and populating data for system From 2a48e7136bbc3aa7c158e029deb1f9e2749c7d89 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 23 Sep 2020 07:50:31 -0700 Subject: [PATCH 288/611] chore: remove note about editable installs (#88) `pip install -e .` is supported and is how we install the library for tests. Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Tue Sep 22 12:06:12 2020 -0600 Source-Repo: googleapis/synthtool Source-Sha: a651c5fb763c69a921aecdd3e1d8dc51dbf20f8d Source-Link: https://github.com/googleapis/synthtool/commit/a651c5fb763c69a921aecdd3e1d8dc51dbf20f8d --- .../google-cloud-datastore/CONTRIBUTING.rst | 19 ---- .../google-cloud-datastore/synth.metadata | 94 ++++++++++++++++++- 2 files changed, 91 insertions(+), 22 deletions(-) diff --git a/packages/google-cloud-datastore/CONTRIBUTING.rst b/packages/google-cloud-datastore/CONTRIBUTING.rst index 6f63d2c5ca49..2002603b3c7d 100644 --- a/packages/google-cloud-datastore/CONTRIBUTING.rst +++ b/packages/google-cloud-datastore/CONTRIBUTING.rst @@ -80,25 +80,6 @@ We use `nox `__ to instrument our tests. .. nox: https://pypi.org/project/nox/ -Note on Editable Installs / Develop Mode -======================================== - -- As mentioned previously, using ``setuptools`` in `develop mode`_ - or a ``pip`` `editable install`_ is not possible with this - library. This is because this library uses `namespace packages`_. - For context see `Issue #2316`_ and the relevant `PyPA issue`_. - - Since ``editable`` / ``develop`` mode can't be used, packages - need to be installed directly. Hence your changes to the source - tree don't get incorporated into the **already installed** - package. - -.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ -.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316 -.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 -.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode -.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs - ***************************************** I'm getting weird errors... Can you help? ***************************************** diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index d92edc20e375..1f030d2d102d 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -3,8 +3,8 @@ { "git": { "name": ".", - "remote": "git@github.com:googleapis/python-datastore", - "sha": "5d78275eee690e53cfaff4f43be1c93bd9598c35" + "remote": "https://github.com/googleapis/python-datastore.git", + "sha": "6a6592d2da8fdf7655c972af695ac7be9182cea3" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "80003a3de2d8a75f5b47cb2e77e018f7f0f776cc" + "sha": "a651c5fb763c69a921aecdd3e1d8dc51dbf20f8d" } } ], @@ -42,5 +42,93 @@ "generator": "bazel" } } + ], + "generatedFiles": [ + ".coveragerc", + ".flake8", + ".github/CONTRIBUTING.md", + ".github/ISSUE_TEMPLATE/bug_report.md", + ".github/ISSUE_TEMPLATE/feature_request.md", + ".github/ISSUE_TEMPLATE/support_request.md", + ".github/PULL_REQUEST_TEMPLATE.md", + ".github/release-please.yml", + ".github/snippet-bot.yml", + ".gitignore", + ".kokoro/build.sh", + ".kokoro/continuous/common.cfg", + ".kokoro/continuous/continuous.cfg", + ".kokoro/docker/docs/Dockerfile", + ".kokoro/docker/docs/fetch_gpg_keys.sh", + ".kokoro/docs/common.cfg", + ".kokoro/docs/docs-presubmit.cfg", + ".kokoro/docs/docs.cfg", + ".kokoro/populate-secrets.sh", + ".kokoro/presubmit/common.cfg", + ".kokoro/presubmit/presubmit.cfg", + ".kokoro/publish-docs.sh", + ".kokoro/release.sh", + ".kokoro/release/common.cfg", + ".kokoro/release/release.cfg", + ".kokoro/samples/lint/common.cfg", + ".kokoro/samples/lint/continuous.cfg", + ".kokoro/samples/lint/periodic.cfg", + ".kokoro/samples/lint/presubmit.cfg", + ".kokoro/samples/python3.6/common.cfg", + ".kokoro/samples/python3.6/continuous.cfg", + ".kokoro/samples/python3.6/periodic.cfg", + ".kokoro/samples/python3.6/presubmit.cfg", + ".kokoro/samples/python3.7/common.cfg", + ".kokoro/samples/python3.7/continuous.cfg", + ".kokoro/samples/python3.7/periodic.cfg", + ".kokoro/samples/python3.7/presubmit.cfg", + ".kokoro/samples/python3.8/common.cfg", + ".kokoro/samples/python3.8/continuous.cfg", + ".kokoro/samples/python3.8/periodic.cfg", + ".kokoro/samples/python3.8/presubmit.cfg", + ".kokoro/test-samples.sh", + ".kokoro/trampoline.sh", + ".kokoro/trampoline_v2.sh", + ".trampolinerc", + "CODE_OF_CONDUCT.md", + "CONTRIBUTING.rst", + "LICENSE", + "MANIFEST.in", + "docs/_static/custom.css", + "docs/_templates/layout.html", + "docs/conf.py", + "google/cloud/datastore_admin_v1/proto/__init__.py", + "google/cloud/datastore_admin_v1/proto/datastore_admin.proto", + "google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py", + "google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py", + "google/cloud/datastore_admin_v1/proto/index.proto", + "google/cloud/datastore_admin_v1/proto/index_pb2.py", + "google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py", + "google/cloud/datastore_v1/gapic/__init__.py", + "google/cloud/datastore_v1/gapic/datastore_client.py", + "google/cloud/datastore_v1/gapic/datastore_client_config.py", + "google/cloud/datastore_v1/gapic/enums.py", + "google/cloud/datastore_v1/gapic/transports/__init__.py", + "google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py", + "google/cloud/datastore_v1/proto/__init__.py", + "google/cloud/datastore_v1/proto/datastore.proto", + "google/cloud/datastore_v1/proto/datastore_pb2.py", + "google/cloud/datastore_v1/proto/datastore_pb2_grpc.py", + "google/cloud/datastore_v1/proto/entity.proto", + "google/cloud/datastore_v1/proto/entity_pb2.py", + "google/cloud/datastore_v1/proto/entity_pb2_grpc.py", + "google/cloud/datastore_v1/proto/query.proto", + "google/cloud/datastore_v1/proto/query_pb2.py", + "google/cloud/datastore_v1/proto/query_pb2_grpc.py", + "noxfile.py", + "renovate.json", + "scripts/decrypt-secrets.sh", + "scripts/readme-gen/readme_gen.py", + "scripts/readme-gen/templates/README.tmpl.rst", + "scripts/readme-gen/templates/auth.tmpl.rst", + "scripts/readme-gen/templates/auth_api_key.tmpl.rst", + "scripts/readme-gen/templates/install_deps.tmpl.rst", + "scripts/readme-gen/templates/install_portaudio.tmpl.rst", + "setup.cfg", + "testing/.gitignore" ] } \ No newline at end of file From 32f621dfce06fccec9f01a375cb4468b6b7086cd Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 23 Sep 2020 07:58:05 -0700 Subject: [PATCH 289/611] chore: start tracking obsolete files (#87) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/5da1d2aa-a963-44d1-952a-3ed428de6719/targets - [ ] To automatically regenerate this PR, check this box. --- packages/google-cloud-datastore/synth.metadata | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 1f030d2d102d..b0407f7ef425 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -19,7 +19,8 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "a651c5fb763c69a921aecdd3e1d8dc51dbf20f8d" + "sha": "d3997468c7ea6f96659993820ee2cc24b7ddc98b" + } } ], From 99957d09a5ce49793d90391cfbd3ed8fa2d42602 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 23 Sep 2020 11:27:21 -0400 Subject: [PATCH 290/611] chore: release 1.15.1 (#80) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Tres Seaver --- packages/google-cloud-datastore/CHANGELOG.md | 12 ++++++++++++ packages/google-cloud-datastore/setup.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 9bc922bfe0a7..d3316660641d 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +### [1.15.1](https://www.github.com/googleapis/python-datastore/compare/v1.15.0...v1.15.1) (2020-09-23) + + +### Bug Fixes + +* repair implementation of `Client.reserve_ids` ([#76](https://www.github.com/googleapis/python-datastore/issues/76)) ([7df727d](https://www.github.com/googleapis/python-datastore/commit/7df727d00dce7c022f2b6a3c03b31ff7c3836d49)), closes [#37](https://www.github.com/googleapis/python-datastore/issues/37) + + +### Documentation + +* document thread-safety of client ([#75](https://www.github.com/googleapis/python-datastore/issues/75)) ([ae0339c](https://www.github.com/googleapis/python-datastore/commit/ae0339ce94aa8557534e3be24890d7f5a69e806b)) + ## [1.15.0](https://www.github.com/googleapis/python-datastore/compare/v1.14.0...v1.15.0) (2020-08-14) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 255a8ce4cebb..a0c3fd75b8ea 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-datastore" description = "Google Cloud Datastore API client library" -version = "1.15.0" +version = "1.15.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 429ffa41893fa86c7ae1f673245883a6700d9e56 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 24 Sep 2020 20:00:54 -0700 Subject: [PATCH 291/611] chore: remove note about editable installs (#90) `pip install -e .` is supported and is how we install the library for tests. Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Tue Sep 22 12:06:12 2020 -0600 Source-Repo: googleapis/synthtool Source-Sha: a651c5fb763c69a921aecdd3e1d8dc51dbf20f8d Source-Link: https://github.com/googleapis/synthtool/commit/a651c5fb763c69a921aecdd3e1d8dc51dbf20f8d --- .../google-cloud-datastore/CONTRIBUTING.rst | 19 +++++++++++++++++++ .../google-cloud-datastore/synth.metadata | 3 +-- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/CONTRIBUTING.rst b/packages/google-cloud-datastore/CONTRIBUTING.rst index 2002603b3c7d..6f63d2c5ca49 100644 --- a/packages/google-cloud-datastore/CONTRIBUTING.rst +++ b/packages/google-cloud-datastore/CONTRIBUTING.rst @@ -80,6 +80,25 @@ We use `nox `__ to instrument our tests. .. nox: https://pypi.org/project/nox/ +Note on Editable Installs / Develop Mode +======================================== + +- As mentioned previously, using ``setuptools`` in `develop mode`_ + or a ``pip`` `editable install`_ is not possible with this + library. This is because this library uses `namespace packages`_. + For context see `Issue #2316`_ and the relevant `PyPA issue`_. + + Since ``editable`` / ``develop`` mode can't be used, packages + need to be installed directly. Hence your changes to the source + tree don't get incorporated into the **already installed** + package. + +.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ +.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316 +.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 +.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode +.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs + ***************************************** I'm getting weird errors... Can you help? ***************************************** diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index b0407f7ef425..fa5cd2addcf2 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-datastore.git", - "sha": "6a6592d2da8fdf7655c972af695ac7be9182cea3" + "sha": "ddbbb03fc42bf53e698b3869a660a9938b3339e8" } }, { @@ -20,7 +20,6 @@ "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", "sha": "d3997468c7ea6f96659993820ee2cc24b7ddc98b" - } } ], From be9a2ffded00c5f789ee1a0403111f1ce7d56b70 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 24 Sep 2020 20:10:05 -0700 Subject: [PATCH 292/611] chore: exclude namespace package file from coverage (#91) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/dccf2aa5-417a-444f-b348-db74e12651a1/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/f3c04883d6c43261ff13db1f52d03a283be06871 Source-Link: https://github.com/googleapis/synthtool/commit/a651c5fb763c69a921aecdd3e1d8dc51dbf20f8d --- packages/google-cloud-datastore/.coveragerc | 5 ++++- packages/google-cloud-datastore/synth.metadata | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/.coveragerc b/packages/google-cloud-datastore/.coveragerc index dd39c8546c41..0d8e6297dc9c 100644 --- a/packages/google-cloud-datastore/.coveragerc +++ b/packages/google-cloud-datastore/.coveragerc @@ -17,6 +17,8 @@ # Generated by synthtool. DO NOT EDIT! [run] branch = True +omit = + google/cloud/__init__.py [report] fail_under = 100 @@ -32,4 +34,5 @@ omit = */gapic/*.py */proto/*.py */core/*.py - */site-packages/*.py \ No newline at end of file + */site-packages/*.py + google/cloud/__init__.py diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index fa5cd2addcf2..b0699b7cfb10 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d3997468c7ea6f96659993820ee2cc24b7ddc98b" + "sha": "f3c04883d6c43261ff13db1f52d03a283be06871" } } ], From ec664bc5cfc6c81684dcd6348ee4cd0e9331f529 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 25 Sep 2020 10:41:57 -0700 Subject: [PATCH 293/611] chore: remove note about editable installs (#92) `pip install -e .` is supported and is how we install the library for tests. Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Tue Sep 22 12:06:12 2020 -0600 Source-Repo: googleapis/synthtool Source-Sha: a651c5fb763c69a921aecdd3e1d8dc51dbf20f8d Source-Link: https://github.com/googleapis/synthtool/commit/a651c5fb763c69a921aecdd3e1d8dc51dbf20f8d --- .../google-cloud-datastore/CONTRIBUTING.rst | 19 ------------------- .../google-cloud-datastore/synth.metadata | 2 +- 2 files changed, 1 insertion(+), 20 deletions(-) diff --git a/packages/google-cloud-datastore/CONTRIBUTING.rst b/packages/google-cloud-datastore/CONTRIBUTING.rst index 6f63d2c5ca49..2002603b3c7d 100644 --- a/packages/google-cloud-datastore/CONTRIBUTING.rst +++ b/packages/google-cloud-datastore/CONTRIBUTING.rst @@ -80,25 +80,6 @@ We use `nox `__ to instrument our tests. .. nox: https://pypi.org/project/nox/ -Note on Editable Installs / Develop Mode -======================================== - -- As mentioned previously, using ``setuptools`` in `develop mode`_ - or a ``pip`` `editable install`_ is not possible with this - library. This is because this library uses `namespace packages`_. - For context see `Issue #2316`_ and the relevant `PyPA issue`_. - - Since ``editable`` / ``develop`` mode can't be used, packages - need to be installed directly. Hence your changes to the source - tree don't get incorporated into the **already installed** - package. - -.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ -.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316 -.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 -.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode -.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs - ***************************************** I'm getting weird errors... Can you help? ***************************************** diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index b0699b7cfb10..4c3400d3107f 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-datastore.git", - "sha": "ddbbb03fc42bf53e698b3869a660a9938b3339e8" + "sha": "8aa3eac28e0e733b61d6ab9e1d233a99467b7081" } }, { From beb2abb9bf87258acf4f544ac9558abc8d87c413 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 6 Oct 2020 11:11:25 -0700 Subject: [PATCH 294/611] fix: use version.py instead of pkg_resources.get_distribution (#94) * fix: use version.py instead of pkg_resources.get_distribution --- .../google/cloud/datastore/__init__.py | 5 +---- .../google/cloud/datastore/client.py | 2 +- .../google/cloud/datastore/version.py | 15 +++++++++++++++ .../gapic/datastore_admin_client.py | 12 ++++++++---- .../cloud/datastore_v1/gapic/datastore_client.py | 5 ++--- packages/google-cloud-datastore/setup.py | 6 +++++- 6 files changed, 32 insertions(+), 13 deletions(-) create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/version.py diff --git a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py index 078180f57128..c188e1b9f3ca 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py @@ -55,10 +55,7 @@ """ -from pkg_resources import get_distribution - -__version__ = get_distribution("google-cloud-datastore").version - +from google.cloud.datastore.version import __version__ from google.cloud.datastore.batch import Batch from google.cloud.datastore.client import Client from google.cloud.datastore.entity import Entity diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 71a0327e251a..86e513a80696 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -21,7 +21,7 @@ from google.cloud._helpers import _LocalStack from google.cloud._helpers import _determine_default_project as _base_default_project from google.cloud.client import ClientWithProject -from google.cloud.datastore import __version__ +from google.cloud.datastore.version import __version__ from google.cloud.datastore import helpers from google.cloud.datastore._http import HTTPDatastoreAPI from google.cloud.datastore.batch import Batch diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py new file mode 100644 index 000000000000..622b910d56a1 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -0,0 +1,15 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +__version__ = "1.15.1" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py index d275eca1b65f..ffb2b030cb1b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py @@ -17,7 +17,7 @@ """Accesses the google.datastore.admin.v1 DatastoreAdmin API.""" import functools -import pkg_resources +import os import warnings from google.oauth2 import service_account @@ -43,10 +43,14 @@ from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 +# To avoid importing datastore into admin (which would result in a +# circular dependency), We exec to get the version via a dict. +dir_path = os.path.abspath(os.path.dirname(__file__)) +version = {} +with open(os.path.join(dir_path, "../../datastore/version.py")) as fp: + exec(fp.read(), version) -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-datastore", -).version +_GAPIC_LIBRARY_VERSION = version["__version__"] class DatastoreAdminClient(object): diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py index 5f9b530fadb7..ac61c12bfac2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py @@ -36,11 +36,10 @@ from google.cloud.datastore_v1.proto import datastore_pb2_grpc from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore_v1.proto import query_pb2 +from google.cloud.datastore.version import __version__ -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-datastore", -).version +_GAPIC_LIBRARY_VERSION = __version__ class DatastoreClient(object): diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index a0c3fd75b8ea..65047b15ffc4 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -22,7 +22,11 @@ name = "google-cloud-datastore" description = "Google Cloud Datastore API client library" -version = "1.15.1" +version = {} +with open("google/cloud/datastore/version.py") as fp: + exec(fp.read(), version) +version = version["__version__"] + # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 2c42972d185801b844959019529d8075680207fe Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 6 Oct 2020 11:30:30 -0700 Subject: [PATCH 295/611] chore: release 1.15.2 (#95) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index d3316660641d..2eb3c7d014b0 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +### [1.15.2](https://www.github.com/googleapis/python-datastore/compare/v1.15.1...v1.15.2) (2020-10-06) + + +### Bug Fixes + +* use version.py instead of pkg_resources.get_distribution ([#94](https://www.github.com/googleapis/python-datastore/issues/94)) ([ea77534](https://www.github.com/googleapis/python-datastore/commit/ea77534bc973e22894357a81420dd17ed8db0027)) + ### [1.15.1](https://www.github.com/googleapis/python-datastore/compare/v1.15.0...v1.15.1) (2020-09-23) From 2e99cedcfba09251ae723cdd1833b7e288fd691d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 6 Oct 2020 12:25:53 -0700 Subject: [PATCH 296/611] chore: release 1.15.2 (#96) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 2eb3c7d014b0..b9bdf727a055 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -7,6 +7,13 @@ ### [1.15.2](https://www.github.com/googleapis/python-datastore/compare/v1.15.1...v1.15.2) (2020-10-06) +### Bug Fixes + +* use version.py instead of pkg_resources.get_distribution ([#94](https://www.github.com/googleapis/python-datastore/issues/94)) ([ea77534](https://www.github.com/googleapis/python-datastore/commit/ea77534bc973e22894357a81420dd17ed8db0027)) + +### [1.15.2](https://www.github.com/googleapis/python-datastore/compare/v1.15.1...v1.15.2) (2020-10-06) + + ### Bug Fixes * use version.py instead of pkg_resources.get_distribution ([#94](https://www.github.com/googleapis/python-datastore/issues/94)) ([ea77534](https://www.github.com/googleapis/python-datastore/commit/ea77534bc973e22894357a81420dd17ed8db0027)) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 622b910d56a1..cc6fcf7fde5a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.15.1" +__version__ = "1.15.2" From ef2f4a7aef77ce422a2122a2552cee9bb53662eb Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 6 Oct 2020 13:52:59 -0700 Subject: [PATCH 297/611] fix: use full path and os.path to version.py in setup.py (#97) --- packages/google-cloud-datastore/setup.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 65047b15ffc4..3a8b88af3b5a 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -22,10 +22,6 @@ name = "google-cloud-datastore" description = "Google Cloud Datastore API client library" -version = {} -with open("google/cloud/datastore/version.py") as fp: - exec(fp.read(), version) -version = version["__version__"] # Should be one of: # 'Development Status :: 3 - Alpha' @@ -43,6 +39,11 @@ package_root = os.path.abspath(os.path.dirname(__file__)) +version = {} +with open(os.path.join(package_root, "google/cloud/datastore/version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + readme_filename = os.path.join(package_root, "README.rst") with io.open(readme_filename, encoding="utf-8") as readme_file: readme = readme_file.read() From f499cbc0d1a3406a38f41118dfb15290ddcac48e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 7 Oct 2020 17:25:00 -0400 Subject: [PATCH 298/611] chore: release 1.15.3 (#98) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index b9bdf727a055..26fa580d0561 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +### [1.15.3](https://www.github.com/googleapis/python-datastore/compare/v1.15.2...v1.15.3) (2020-10-06) + + +### Bug Fixes + +* use full path and os.path to version.py in setup.py ([#97](https://www.github.com/googleapis/python-datastore/issues/97)) ([0f5506f](https://www.github.com/googleapis/python-datastore/commit/0f5506fe8bcb899e64cc7c1cf881edc3d3aaead8)) + ### [1.15.2](https://www.github.com/googleapis/python-datastore/compare/v1.15.1...v1.15.2) (2020-10-06) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index cc6fcf7fde5a..871c958735e7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.15.2" +__version__ = "1.15.3" From 23ff3b1efe7a91819cac6a6395e7ff484d0ee005 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Fri, 30 Oct 2020 10:02:21 -0700 Subject: [PATCH 299/611] feat!: Leverage new generator, proto-plus, for google-cloud-datastore (#104) This uses the new microgenerator as the underlying transport for the cloud datastore client files in services/, as well as tests/gapic, are gen'd Major Changes: Discontinues python 2.7 support. release-as: 2.0.0-dev1 --- packages/google-cloud-datastore/.coveragerc | 22 +- .../.kokoro/samples/python3.6/common.cfg | 6 + .../.kokoro/samples/python3.7/common.cfg | 6 + .../.kokoro/samples/python3.8/common.cfg | 6 + packages/google-cloud-datastore/README.rst | 6 +- .../docs/admin_client.rst | 2 +- .../google/cloud/datastore/_gapic.py | 6 +- .../google/cloud/datastore/_http.py | 6 +- .../google/cloud/datastore/batch.py | 24 +- .../google/cloud/datastore/client.py | 21 +- .../google/cloud/datastore/helpers.py | 67 +- .../google/cloud/datastore/key.py | 6 +- .../google/cloud/datastore/query.py | 79 +- .../google/cloud/datastore/transaction.py | 8 +- .../cloud/datastore_admin_v1/__init__.py | 58 +- .../datastore_admin_v1/gapic/__init__.py | 0 .../gapic/datastore_admin_client.py | 667 ----- .../gapic/datastore_admin_client_config.py | 43 - .../cloud/datastore_admin_v1/gapic/enums.py | 130 - .../gapic/transports/__init__.py | 0 .../datastore_admin_grpc_transport.py | 186 -- .../datastore_admin_v1/proto/__init__.py | 0 .../proto/datastore_admin_pb2.py | 1847 -------------- .../proto/datastore_admin_pb2_grpc.py | 414 ---- .../datastore_admin_v1/proto/index_pb2.py | 430 ---- .../proto/index_pb2_grpc.py | 3 - .../google/cloud/datastore_admin_v1/py.typed | 2 + .../datastore_admin_v1/services/__init__.py | 16 + .../services/datastore_admin/__init__.py | 24 + .../services/datastore_admin/async_client.py | 564 +++++ .../services/datastore_admin/client.py | 700 ++++++ .../services/datastore_admin/pagers.py | 149 ++ .../datastore_admin/transports/__init__.py | 36 + .../datastore_admin/transports/base.py | 194 ++ .../datastore_admin/transports/grpc.py | 431 ++++ .../transports/grpc_asyncio.py | 438 ++++ .../google/cloud/datastore_admin_v1/types.py | 56 - .../datastore_admin_v1/types/__init__.py | 49 + .../types/datastore_admin.py | 408 ++++ .../cloud/datastore_admin_v1/types/index.py | 95 + .../google/cloud/datastore_v1/__init__.py | 92 +- .../cloud/datastore_v1/gapic/__init__.py | 0 .../datastore_v1/gapic/datastore_client.py | 805 ------ .../gapic/datastore_client_config.py | 77 - .../google/cloud/datastore_v1/gapic/enums.py | 165 -- .../datastore_v1/gapic/transports/__init__.py | 0 .../transports/datastore_grpc_transport.py | 205 -- .../cloud/datastore_v1/proto/__init__.py | 0 .../datastore_v1/proto/datastore_admin.proto | 329 --- .../cloud/datastore_v1/proto/datastore_pb2.py | 2159 ----------------- .../datastore_v1/proto/datastore_pb2_grpc.py | 368 --- .../cloud/datastore_v1/proto/entity_pb2.py | 986 -------- .../datastore_v1/proto/entity_pb2_grpc.py | 3 - .../cloud/datastore_v1/proto/index.proto | 122 - .../cloud/datastore_v1/proto/query_pb2.py | 1728 ------------- .../datastore_v1/proto/query_pb2_grpc.py | 3 - .../google/cloud/datastore_v1/py.typed | 2 + .../cloud/datastore_v1/services/__init__.py | 16 + .../services/datastore/__init__.py | 24 + .../services/datastore/async_client.py | 671 +++++ .../datastore_v1/services/datastore/client.py | 806 ++++++ .../services/datastore/transports/__init__.py | 36 + .../services/datastore/transports/base.py | 243 ++ .../services/datastore/transports/grpc.py | 422 ++++ .../datastore/transports/grpc_asyncio.py | 431 ++++ .../google/cloud/datastore_v1/types.py | 55 - .../cloud/datastore_v1/types/__init__.py | 97 + .../cloud/datastore_v1/types/datastore.py | 480 ++++ .../google/cloud/datastore_v1/types/entity.py | 260 ++ .../google/cloud/datastore_v1/types/query.py | 397 +++ packages/google-cloud-datastore/noxfile.py | 6 +- .../fixup_datastore_admin_v1_keywords.py | 181 ++ .../scripts/fixup_datastore_v1_keywords.py | 184 ++ packages/google-cloud-datastore/setup.py | 20 +- .../google-cloud-datastore/synth.metadata | 102 +- packages/google-cloud-datastore/synth.py | 24 +- .../google-cloud-datastore/tests/doctests.py | 3 - .../unit/gapic/datastore_admin_v1/__init__.py | 1 + .../test_datastore_admin.py | 1425 +++++++++++ .../tests/unit/gapic/datastore_v1/__init__.py | 1 + .../unit/gapic/datastore_v1/test_datastore.py | 1817 ++++++++++++++ .../unit/gapic/v1/test_datastore_client_v1.py | 302 --- .../tests/unit/test__gapic.py | 26 +- .../tests/unit/test__http.py | 244 +- .../tests/unit/test_batch.py | 112 +- .../tests/unit/test_client.py | 183 +- .../tests/unit/test_helpers.py | 166 +- .../tests/unit/test_key.py | 2 +- .../tests/unit/test_query.py | 119 +- .../tests/unit/test_transaction.py | 100 +- 90 files changed, 11473 insertions(+), 11732 deletions(-) delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/__init__.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client_config.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/enums.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/transports/__init__.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/transports/datastore_admin_grpc_transport.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/__init__.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/py.typed create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/__init__.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/__init__.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/__init__.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_admin.proto delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2_grpc.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity_pb2_grpc.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/index.proto delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2_grpc.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/py.typed create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/types.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py create mode 100644 packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py create mode 100644 packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py create mode 100644 packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py create mode 100644 packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py create mode 100644 packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py create mode 100644 packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py delete mode 100644 packages/google-cloud-datastore/tests/unit/gapic/v1/test_datastore_client_v1.py diff --git a/packages/google-cloud-datastore/.coveragerc b/packages/google-cloud-datastore/.coveragerc index 0d8e6297dc9c..ce32b3227567 100644 --- a/packages/google-cloud-datastore/.coveragerc +++ b/packages/google-cloud-datastore/.coveragerc @@ -14,25 +14,23 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Generated by synthtool. DO NOT EDIT! [run] branch = True -omit = - google/cloud/__init__.py - [report] fail_under = 100 show_missing = True +omit = + google/cloud/__init__.py + google/cloud/datastore_v1/__init__.py + google/cloud/datastore_admin_v1/__init__.py + */site-packages/*.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py - google/cloud/__init__.py + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.6/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.6/common.cfg index 8417400266d0..a65c0f39be86 100644 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.6/common.cfg +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.6/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.6" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py36" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-datastore/.kokoro/test-samples.sh" diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.7/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.7/common.cfg index 2122ef4c5421..18251bfc8de9 100644 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.7/common.cfg +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.7/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.7" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py37" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-datastore/.kokoro/test-samples.sh" diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.8/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.8/common.cfg index c4ca39f0b0b1..77f73452273b 100644 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.8/common.cfg +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.8/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.8" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py38" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-datastore/.kokoro/test-samples.sh" diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index bb685f04f9c9..0b6470e6725e 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -53,11 +53,7 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.5 - -Deprecated Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. +Python >= 3.6 Mac/Linux diff --git a/packages/google-cloud-datastore/docs/admin_client.rst b/packages/google-cloud-datastore/docs/admin_client.rst index 1c025ca5f132..1eed269656f4 100644 --- a/packages/google-cloud-datastore/docs/admin_client.rst +++ b/packages/google-cloud-datastore/docs/admin_client.rst @@ -1,6 +1,6 @@ Datastore Admin Client ====================== -.. automodule:: google.cloud.datastore_admin_v1.gapic.datastore_admin_client +.. automodule:: google.cloud.datastore_admin_v1.services.datastore_admin.client :members: :show-inheritance: diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py b/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py index 18c2ce917bc2..3200ea3c3ee1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py @@ -19,7 +19,8 @@ from google.cloud._helpers import make_secure_channel from google.cloud._http import DEFAULT_USER_AGENT -from google.cloud.datastore_v1.gapic import datastore_client +from google.cloud.datastore_v1.services.datastore import client as datastore_client +from google.cloud.datastore_v1.services.datastore.transports import grpc def make_datastore_api(client): @@ -38,6 +39,7 @@ def make_datastore_api(client): else: channel = insecure_channel(host) + transport = grpc.DatastoreGrpcTransport(channel=channel) return datastore_client.DatastoreClient( - channel=channel, client_info=client._client_info + transport=transport, client_info=client._client_info ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 0d10035340c6..8f2c9c581686 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -18,7 +18,7 @@ from google.cloud import _http as connection_module from google.cloud import exceptions -from google.cloud.datastore_v1.proto import datastore_pb2 as _datastore_pb2 +from google.cloud.datastore_v1.types import datastore as _datastore_pb2 DATASTORE_API_HOST = "datastore.googleapis.com" @@ -108,9 +108,9 @@ def _rpc(http, project, method, base_url, client_info, request_pb, response_pb_c :rtype: :class:`google.protobuf.message.Message` :returns: The RPC message parsed from the response. """ - req_data = request_pb.SerializeToString() + req_data = request_pb._pb.SerializeToString() response = _request(http, project, method, req_data, base_url, client_info) - return response_pb_cls.FromString(response) + return response_pb_cls.deserialize(response) def build_api_url(project, method, base_url): diff --git a/packages/google-cloud-datastore/google/cloud/datastore/batch.py b/packages/google-cloud-datastore/google/cloud/datastore/batch.py index 294c1b45e9a5..7b0b47589576 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/batch.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/batch.py @@ -22,7 +22,7 @@ """ from google.cloud.datastore import helpers -from google.cloud.datastore_v1.proto import datastore_pb2 as _datastore_pb2 +from google.cloud.datastore_v1.types import datastore as _datastore_pb2 class Batch(object): @@ -219,7 +219,7 @@ def delete(self, key): raise ValueError("Key must be from same project as batch") key_pb = key.to_protobuf() - self._add_delete_key_pb().CopyFrom(key_pb) + self._add_delete_key_pb()._pb.CopyFrom(key_pb._pb) def begin(self): """Begins a batch. @@ -242,9 +242,9 @@ def _commit(self, retry, timeout): This is called by :meth:`commit`. """ if self._id is None: - mode = _datastore_pb2.CommitRequest.NON_TRANSACTIONAL + mode = _datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL else: - mode = _datastore_pb2.CommitRequest.TRANSACTIONAL + mode = _datastore_pb2.CommitRequest.Mode.TRANSACTIONAL kwargs = {} @@ -255,8 +255,15 @@ def _commit(self, retry, timeout): kwargs["timeout"] = timeout commit_response_pb = self._client._datastore_api.commit( - self.project, mode, self._mutations, transaction=self._id, **kwargs + request={ + "project_id": self.project, + "mode": mode, + "transaction": self._id, + "mutations": self._mutations, + }, + **kwargs, ) + _, updated_keys = _parse_commit_response(commit_response_pb) # If the back-end returns without error, we are guaranteed that # ``commit`` will return keys that match (length and @@ -337,11 +344,11 @@ def _assign_entity_to_pb(entity_pb, entity): :param entity: The entity being updated within the batch / transaction. """ bare_entity_pb = helpers.entity_to_protobuf(entity) - bare_entity_pb.key.CopyFrom(bare_entity_pb.key) - entity_pb.CopyFrom(bare_entity_pb) + bare_entity_pb._pb.key.CopyFrom(bare_entity_pb._pb.key) + entity_pb._pb.CopyFrom(bare_entity_pb._pb) -def _parse_commit_response(commit_response_pb): +def _parse_commit_response(commit_response): """Extract response data from a commit response. :type commit_response_pb: :class:`.datastore_pb2.CommitResponse` @@ -352,6 +359,7 @@ def _parse_commit_response(commit_response_pb): :class:`.entity_pb2.Key` for each incomplete key that was completed in the commit. """ + commit_response_pb = commit_response._pb mut_results = commit_response_pb.mutation_results index_updates = commit_response_pb.index_updates completed_keys = [ diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 86e513a80696..e06b8e60e2ce 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -185,7 +185,12 @@ def _extended_lookup( while loop_num < _MAX_LOOPS: # loop against possible deferred. loop_num += 1 lookup_response = datastore_api.lookup( - project, key_pbs, read_options=read_options, **kwargs + request={ + "project_id": project, + "keys": key_pbs, + "read_options": read_options, + }, + **kwargs, ) # Accumulate the new results. @@ -535,7 +540,7 @@ def get_multi( helpers.key_from_protobuf(deferred_pb) for deferred_pb in deferred ] - return [helpers.entity_from_protobuf(entity_pb) for entity_pb in entity_pbs] + return [helpers.entity_from_protobuf(entity_pb._pb) for entity_pb in entity_pbs] def put(self, entity, retry=None, timeout=None): """Save an entity in the Cloud Datastore. @@ -702,7 +707,8 @@ def allocate_ids(self, incomplete_key, num_ids, retry=None, timeout=None): kwargs = _make_retry_timeout_kwargs(retry, timeout) response_pb = self._datastore_api.allocate_ids( - incomplete_key.project, incomplete_key_pbs, **kwargs + request={"project_id": incomplete_key.project, "keys": incomplete_key_pbs}, + **kwargs, ) allocated_ids = [ allocated_key_pb.path[-1].id for allocated_key_pb in response_pb.keys @@ -871,8 +877,9 @@ def reserve_ids_sequential(self, complete_key, num_ids, retry=None, timeout=None key_pbs.append(key.to_protobuf()) kwargs = _make_retry_timeout_kwargs(retry, timeout) - self._datastore_api.reserve_ids(complete_key.project, key_pbs, **kwargs) - + self._datastore_api.reserve_ids( + request={"project_id": complete_key.project, "keys": key_pbs}, **kwargs + ) return None def reserve_ids(self, complete_key, num_ids, retry=None, timeout=None): @@ -921,6 +928,8 @@ def reserve_ids_multi(self, complete_keys, retry=None, timeout=None): kwargs = _make_retry_timeout_kwargs(retry, timeout) key_pbs = [key.to_protobuf() for key in complete_keys] - self._datastore_api.reserve_ids(complete_keys[0].project, key_pbs, **kwargs) + self._datastore_api.reserve_ids( + request={"project_id": complete_keys[0].project, "keys": key_pbs}, **kwargs + ) return None diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index db6f150eff8b..f8b32f38e8e6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -25,9 +25,8 @@ import six from google.cloud._helpers import _datetime_to_pb_timestamp -from google.cloud._helpers import _pb_timestamp_to_datetime -from google.cloud.datastore_v1.proto import datastore_pb2 -from google.cloud.datastore_v1.proto import entity_pb2 +from google.cloud.datastore_v1.types import datastore as datastore_pb2 +from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key @@ -86,7 +85,14 @@ def _new_value_pb(entity_pb, name): :rtype: :class:`.entity_pb2.Value` :returns: The new ``Value`` protobuf that was added to the entity. """ - return entity_pb.properties.get_or_create(name) + properties = entity_pb.properties + try: + properties = properties._pb + except AttributeError: + # TODO(microgenerator): shouldn't need this. the issue is that + # we have wrapped and non-wrapped protos coming here. + pass + return properties.get_or_create(name) def _property_tuples(entity_pb): @@ -114,15 +120,24 @@ def entity_from_protobuf(pb): :rtype: :class:`google.cloud.datastore.entity.Entity` :returns: The entity derived from the protobuf. """ + + if not getattr(pb, "_pb", False): + # Coerce raw pb type into proto-plus pythonic type. + proto_pb = entity_pb2.Entity(pb) + pb = pb + else: + proto_pb = pb + pb = pb._pb + key = None - if pb.HasField("key"): # Message field (Key) - key = key_from_protobuf(pb.key) + if "key" in proto_pb: # Message field (Key) + key = key_from_protobuf(proto_pb.key) entity_props = {} entity_meanings = {} exclude_from_indexes = [] - for prop_name, value_pb in _property_tuples(pb): + for prop_name, value_pb in _property_tuples(proto_pb): value = _get_value_from_value_pb(value_pb) entity_props[prop_name] = value @@ -211,7 +226,7 @@ def entity_to_protobuf(entity): entity_pb = entity_pb2.Entity() if entity.key is not None: key_pb = entity.key.to_protobuf() - entity_pb.key.CopyFrom(key_pb) + entity_pb._pb.key.CopyFrom(key_pb._pb) for name, value in entity.items(): value_is_list = isinstance(value, list) @@ -256,7 +271,7 @@ def get_read_options(eventual, transaction_id): if transaction_id is None: if eventual: return datastore_pb2.ReadOptions( - read_consistency=datastore_pb2.ReadOptions.EVENTUAL + read_consistency=datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL ) else: return datastore_pb2.ReadOptions() @@ -368,7 +383,7 @@ def _pb_attr_value(val): return name + "_value", value -def _get_value_from_value_pb(value_pb): +def _get_value_from_value_pb(value): """Given a protobuf for a Value, get the correct value. The Cloud Datastore Protobuf API returns a Property Protobuf which @@ -386,40 +401,44 @@ def _get_value_from_value_pb(value_pb): :raises: :class:`ValueError ` if no value type has been set. """ - value_type = value_pb.WhichOneof("value_type") + if not getattr(value, "_pb", False): + # Coerce raw pb type into proto-plus pythonic type. + value = entity_pb2.Value(value) + + value_type = value._pb.WhichOneof("value_type") if value_type == "timestamp_value": - result = _pb_timestamp_to_datetime(value_pb.timestamp_value) + result = value.timestamp_value elif value_type == "key_value": - result = key_from_protobuf(value_pb.key_value) + result = key_from_protobuf(value.key_value) elif value_type == "boolean_value": - result = value_pb.boolean_value + result = value.boolean_value elif value_type == "double_value": - result = value_pb.double_value + result = value.double_value elif value_type == "integer_value": - result = value_pb.integer_value + result = value.integer_value elif value_type == "string_value": - result = value_pb.string_value + result = value.string_value elif value_type == "blob_value": - result = value_pb.blob_value + result = value.blob_value elif value_type == "entity_value": - result = entity_from_protobuf(value_pb.entity_value) + result = entity_from_protobuf(value.entity_value) elif value_type == "array_value": result = [ - _get_value_from_value_pb(value) for value in value_pb.array_value.values + _get_value_from_value_pb(value) for value in value._pb.array_value.values ] elif value_type == "geo_point_value": result = GeoPoint( - value_pb.geo_point_value.latitude, value_pb.geo_point_value.longitude + value.geo_point_value.latitude, value.geo_point_value.longitude, ) elif value_type == "null_value": @@ -450,15 +469,15 @@ def _set_protobuf_value(value_pb, val): """ attr, val = _pb_attr_value(val) if attr == "key_value": - value_pb.key_value.CopyFrom(val) + value_pb.key_value.CopyFrom(val._pb) elif attr == "timestamp_value": value_pb.timestamp_value.CopyFrom(val) elif attr == "entity_value": entity_pb = entity_to_protobuf(val) - value_pb.entity_value.CopyFrom(entity_pb) + value_pb.entity_value.CopyFrom(entity_pb._pb) elif attr == "array_value": if len(val) == 0: - array_value = entity_pb2.ArrayValue(values=[]) + array_value = entity_pb2.ArrayValue(values=[])._pb value_pb.array_value.CopyFrom(array_value) else: l_pb = value_pb.array_value.values diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index c988eebd80af..d03359bcabb7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -18,7 +18,7 @@ import copy import six -from google.cloud.datastore_v1.proto import entity_pb2 as _entity_pb2 +from google.cloud.datastore_v1.types import entity as _entity_pb2 from google.cloud._helpers import _to_bytes from google.cloud.datastore import _app_engine_key_pb2 @@ -289,14 +289,14 @@ def to_protobuf(self): key.partition_id.namespace_id = self.namespace for item in self.path: - element = key.path.add() + element = key.PathElement() if "kind" in item: element.kind = item["kind"] if "id" in item: element.id = item["id"] if "name" in item: element.name = item["name"] - + key.path.append(element) return key def to_legacy_urlsafe(self, location_prefix=None): diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 7a4bedeb8082..2f455b6ffce2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -19,19 +19,19 @@ from google.api_core import page_iterator from google.cloud._helpers import _ensure_tuple_or_list -from google.cloud.datastore_v1.proto import entity_pb2 -from google.cloud.datastore_v1.proto import query_pb2 +from google.cloud.datastore_v1.types import entity as entity_pb2 +from google.cloud.datastore_v1.types import query as query_pb2 from google.cloud.datastore import helpers from google.cloud.datastore.key import Key -_NOT_FINISHED = query_pb2.QueryResultBatch.NOT_FINISHED -_NO_MORE_RESULTS = query_pb2.QueryResultBatch.NO_MORE_RESULTS +_NOT_FINISHED = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED +_NO_MORE_RESULTS = query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS _FINISHED = ( _NO_MORE_RESULTS, - query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT, - query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_CURSOR, + query_pb2.QueryResultBatch.MoreResultsType.MORE_RESULTS_AFTER_LIMIT, + query_pb2.QueryResultBatch.MoreResultsType.MORE_RESULTS_AFTER_CURSOR, ) @@ -81,11 +81,11 @@ class Query(object): """ OPERATORS = { - "<=": query_pb2.PropertyFilter.LESS_THAN_OR_EQUAL, - ">=": query_pb2.PropertyFilter.GREATER_THAN_OR_EQUAL, - "<": query_pb2.PropertyFilter.LESS_THAN, - ">": query_pb2.PropertyFilter.GREATER_THAN, - "=": query_pb2.PropertyFilter.EQUAL, + "<=": query_pb2.PropertyFilter.Operator.LESS_THAN_OR_EQUAL, + ">=": query_pb2.PropertyFilter.Operator.GREATER_THAN_OR_EQUAL, + "<": query_pb2.PropertyFilter.Operator.LESS_THAN, + ">": query_pb2.PropertyFilter.Operator.GREATER_THAN, + "=": query_pb2.PropertyFilter.Operator.EQUAL, } """Mapping of operator strings and their protobuf equivalents.""" @@ -506,7 +506,7 @@ def _build_protobuf(self): pb.end_cursor = base64.urlsafe_b64decode(end_cursor) if self.max_results is not None: - pb.limit.value = self.max_results - self.num_results + pb.limit = self.max_results - self.num_results if start_cursor is None and self._offset is not None: # NOTE: We don't need to add an offset to the request protobuf @@ -576,7 +576,13 @@ def _next_page(self): kwargs["timeout"] = self._timeout response_pb = self.client._datastore_api.run_query( - self._query.project, partition_id, read_options, query=query_pb, **kwargs + request={ + "project_id": self._query.project, + "partition_id": partition_id, + "read_options": read_options, + "query": query_pb, + }, + **kwargs, ) while ( @@ -590,13 +596,14 @@ def _next_page(self): query_pb.start_cursor = response_pb.batch.skipped_cursor query_pb.offset -= response_pb.batch.skipped_results response_pb = self.client._datastore_api.run_query( - self._query.project, - partition_id, - read_options, - query=query_pb, - **kwargs + request={ + "project_id": self._query.project, + "partition_id": partition_id, + "read_options": read_options, + "query": query_pb, + }, + **kwargs, ) - entity_pbs = self._process_query_results(response_pb) return page_iterator.Page(self, entity_pbs, self.item_to_value) @@ -615,53 +622,61 @@ def _pb_from_query(query): pb = query_pb2.Query() for projection_name in query.projection: - pb.projection.add().property.name = projection_name + projection = query_pb2.Projection() + projection.property.name = projection_name + pb.projection.append(projection) if query.kind: - pb.kind.add().name = query.kind + kind = query_pb2.KindExpression() + kind.name = query.kind + pb.kind.append(kind) composite_filter = pb.filter.composite_filter - composite_filter.op = query_pb2.CompositeFilter.AND + composite_filter.op = query_pb2.CompositeFilter.Operator.AND if query.ancestor: ancestor_pb = query.ancestor.to_protobuf() # Filter on __key__ HAS_ANCESTOR == ancestor. - ancestor_filter = composite_filter.filters.add().property_filter + ancestor_filter = composite_filter.filters._pb.add().property_filter ancestor_filter.property.name = "__key__" - ancestor_filter.op = query_pb2.PropertyFilter.HAS_ANCESTOR - ancestor_filter.value.key_value.CopyFrom(ancestor_pb) + ancestor_filter.op = query_pb2.PropertyFilter.Operator.HAS_ANCESTOR + ancestor_filter.value.key_value.CopyFrom(ancestor_pb._pb) for property_name, operator, value in query.filters: pb_op_enum = query.OPERATORS.get(operator) # Add the specific filter - property_filter = composite_filter.filters.add().property_filter + property_filter = composite_filter.filters._pb.add().property_filter property_filter.property.name = property_name property_filter.op = pb_op_enum # Set the value to filter on based on the type. if property_name == "__key__": key_pb = value.to_protobuf() - property_filter.value.key_value.CopyFrom(key_pb) + property_filter.value.key_value.CopyFrom(key_pb._pb) else: helpers._set_protobuf_value(property_filter.value, value) if not composite_filter.filters: - pb.ClearField("filter") + pb._pb.ClearField("filter") for prop in query.order: - property_order = pb.order.add() + property_order = query_pb2.PropertyOrder() if prop.startswith("-"): property_order.property.name = prop[1:] - property_order.direction = property_order.DESCENDING + property_order.direction = property_order.Direction.DESCENDING else: property_order.property.name = prop - property_order.direction = property_order.ASCENDING + property_order.direction = property_order.Direction.ASCENDING + + pb.order.append(property_order) for distinct_on_name in query.distinct_on: - pb.distinct_on.add().name = distinct_on_name + ref = query_pb2.PropertyReference() + ref.name = distinct_on_name + pb.distinct_on.append(ref) return pb diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index 705cc059895f..a1eabed53b02 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -233,7 +233,7 @@ def begin(self, retry=None, timeout=None): try: response_pb = self._client._datastore_api.begin_transaction( - self.project, **kwargs + request={"project_id": self.project}, **kwargs ) self._id = response_pb.transaction except: # noqa: E722 do not use bare except, specify exception instead @@ -262,7 +262,9 @@ def rollback(self, retry=None, timeout=None): try: # No need to use the response it contains nothing. - self._client._datastore_api.rollback(self.project, self._id, **kwargs) + self._client._datastore_api.rollback( + request={"project_id": self.project, "transaction": self._id}, **kwargs + ) finally: super(Transaction, self).rollback() # Clear our own ID in case this gets accidentally reused. @@ -311,7 +313,7 @@ def put(self, entity): :raises: :class:`RuntimeError` if the transaction is marked ReadOnly """ - if self._options.HasField("read_only"): + if "read_only" in self._options: raise RuntimeError("Transaction is read only") else: super(Transaction, self).put(entity) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py index a588c3e0fc48..89cac8e11028 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py @@ -1,45 +1,51 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# - -from __future__ import absolute_import -import sys -import warnings - -from google.cloud.datastore_admin_v1 import types -from google.cloud.datastore_admin_v1.gapic import datastore_admin_client -from google.cloud.datastore_admin_v1.gapic import enums - - -if sys.version_info[:2] == (2, 7): - message = ( - "A future version of this library will drop support for Python 2.7. " - "More details about Python 2 support for Google Cloud Client Libraries " - "can be found at https://cloud.google.com/python/docs/python2-sunset/" - ) - warnings.warn(message, DeprecationWarning) - - -class DatastoreAdminClient(datastore_admin_client.DatastoreAdminClient): - __doc__ = datastore_admin_client.DatastoreAdminClient.__doc__ - enums = enums +from .services.datastore_admin import DatastoreAdminClient +from .types.datastore_admin import CommonMetadata +from .types.datastore_admin import EntityFilter +from .types.datastore_admin import ExportEntitiesMetadata +from .types.datastore_admin import ExportEntitiesRequest +from .types.datastore_admin import ExportEntitiesResponse +from .types.datastore_admin import GetIndexRequest +from .types.datastore_admin import ImportEntitiesMetadata +from .types.datastore_admin import ImportEntitiesRequest +from .types.datastore_admin import IndexOperationMetadata +from .types.datastore_admin import ListIndexesRequest +from .types.datastore_admin import ListIndexesResponse +from .types.datastore_admin import OperationType +from .types.datastore_admin import Progress +from .types.index import Index __all__ = ( - "enums", - "types", + "CommonMetadata", + "EntityFilter", + "ExportEntitiesMetadata", + "ExportEntitiesRequest", + "ExportEntitiesResponse", + "GetIndexRequest", + "ImportEntitiesMetadata", + "ImportEntitiesRequest", + "Index", + "IndexOperationMetadata", + "ListIndexesRequest", + "ListIndexesResponse", + "OperationType", + "Progress", "DatastoreAdminClient", ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py deleted file mode 100644 index ffb2b030cb1b..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py +++ /dev/null @@ -1,667 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.datastore.admin.v1 DatastoreAdmin API.""" - -import functools -import os -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.operation -import google.api_core.operations_v1 -import google.api_core.page_iterator -import grpc - -from google.cloud.datastore_admin_v1.gapic import datastore_admin_client_config -from google.cloud.datastore_admin_v1.gapic import enums -from google.cloud.datastore_admin_v1.gapic.transports import ( - datastore_admin_grpc_transport, -) -from google.cloud.datastore_admin_v1.proto import datastore_admin_pb2 -from google.cloud.datastore_admin_v1.proto import datastore_admin_pb2_grpc -from google.cloud.datastore_admin_v1.proto import index_pb2 -from google.longrunning import operations_pb2 -from google.protobuf import empty_pb2 - -# To avoid importing datastore into admin (which would result in a -# circular dependency), We exec to get the version via a dict. -dir_path = os.path.abspath(os.path.dirname(__file__)) -version = {} -with open(os.path.join(dir_path, "../../datastore/version.py")) as fp: - exec(fp.read(), version) - -_GAPIC_LIBRARY_VERSION = version["__version__"] - - -class DatastoreAdminClient(object): - """ - Google Cloud Datastore Admin API - - - The Datastore Admin API provides several admin services for Cloud Datastore. - - ## Concepts - - Project, namespace, kind, and entity as defined in the Google Cloud Datastore - API. - - Operation: An Operation represents work being performed in the background. - - EntityFilter: Allows specifying a subset of entities in a project. This is - specified as a combination of kinds and namespaces (either or both of which - may be all). - - ## Services - - # Export/Import - - The Export/Import service provides the ability to copy all or a subset of - entities to/from Google Cloud Storage. - - Exported data may be imported into Cloud Datastore for any Google Cloud - Platform project. It is not restricted to the export source project. It is - possible to export from one project and then import into another. - - Exported data can also be loaded into Google BigQuery for analysis. - - Exports and imports are performed asynchronously. An Operation resource is - created for each export/import. The state (including any errors encountered) - of the export/import may be queried via the Operation resource. - - # Index - - The index service manages Cloud Datastore composite indexes. - - Index creation and deletion are performed asynchronously. - An Operation resource is created for each such asynchronous operation. - The state of the operation (including any errors encountered) - may be queried via the Operation resource. - - # Operation - - The Operations collection provides a record of actions performed for the - specified project (including any operations in progress). Operations are not - created directly but through calls on other collections or resources. - - An operation that is not yet done may be cancelled. The request to cancel is - asynchronous and the operation may continue to run for some time after the - request to cancel is made. - - An operation that is done may be deleted so that it is no longer listed as - part of the Operation collection. - - ListOperations returns all pending operations, but not completed operations. - - Operations are created by service DatastoreAdmin, - but are accessed via service google.longrunning.Operations. - """ - - SERVICE_ADDRESS = "datastore.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.datastore.admin.v1.DatastoreAdmin" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatastoreAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.DatastoreAdminGrpcTransport, - Callable[[~.Credentials, type], ~.DatastoreAdminGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = datastore_admin_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=datastore_admin_grpc_transport.DatastoreAdminGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = datastore_admin_grpc_transport.DatastoreAdminGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def export_entities( - self, - project_id, - output_url_prefix, - labels=None, - entity_filter=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Exports a copy of all or a subset of entities from Google Cloud Datastore - to another storage system, such as Google Cloud Storage. Recent updates to - entities may not be reflected in the export. The export occurs in the - background and its progress can be monitored and managed via the - Operation resource that is created. The output of an export may only be - used once the associated operation is done. If an export operation is - cancelled before completion it may leave partial data behind in Google - Cloud Storage. - - Example: - >>> from google.cloud import datastore_admin_v1 - >>> - >>> client = datastore_admin_v1.DatastoreAdminClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `output_url_prefix`: - >>> output_url_prefix = '' - >>> - >>> response = client.export_entities(project_id, output_url_prefix) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - project_id (str): Required. Project ID against which to make the request. - output_url_prefix (str): Required. Location for the export metadata and data files. - - The full resource URL of the external storage location. Currently, only - Google Cloud Storage is supported. So output_url_prefix should be of the - form: ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, where ``BUCKET_NAME`` is - the name of the Cloud Storage bucket and ``NAMESPACE_PATH`` is an - optional Cloud Storage namespace path (this is not a Cloud Datastore - namespace). For more information about Cloud Storage namespace paths, - see `Object name - considerations `__. - - The resulting files will be nested deeper than the specified URL prefix. - The final output URL will be provided in the - ``google.datastore.admin.v1.ExportEntitiesResponse.output_url`` field. - That value should be used for subsequent ImportEntities operations. - - By nesting the data files deeper, the same Cloud Storage bucket can be - used in multiple ExportEntities operations without conflict. - labels (dict[str -> str]): Client-assigned labels. - entity_filter (Union[dict, ~google.cloud.datastore_admin_v1.types.EntityFilter]): Description of what data from the project is included in the export. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datastore_admin_v1.types.EntityFilter` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datastore_admin_v1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "export_entities" not in self._inner_api_calls: - self._inner_api_calls[ - "export_entities" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.export_entities, - default_retry=self._method_configs["ExportEntities"].retry, - default_timeout=self._method_configs["ExportEntities"].timeout, - client_info=self._client_info, - ) - - request = datastore_admin_pb2.ExportEntitiesRequest( - project_id=project_id, - output_url_prefix=output_url_prefix, - labels=labels, - entity_filter=entity_filter, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project_id", project_id)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["export_entities"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - datastore_admin_pb2.ExportEntitiesResponse, - metadata_type=datastore_admin_pb2.ExportEntitiesMetadata, - ) - - def import_entities( - self, - project_id, - input_url, - labels=None, - entity_filter=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Imports entities into Google Cloud Datastore. Existing entities with the - same key are overwritten. The import occurs in the background and its - progress can be monitored and managed via the Operation resource that is - created. If an ImportEntities operation is cancelled, it is possible - that a subset of the data has already been imported to Cloud Datastore. - - Example: - >>> from google.cloud import datastore_admin_v1 - >>> - >>> client = datastore_admin_v1.DatastoreAdminClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `input_url`: - >>> input_url = '' - >>> - >>> response = client.import_entities(project_id, input_url) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - project_id (str): Required. Project ID against which to make the request. - input_url (str): Required. The full resource URL of the external storage location. - Currently, only Google Cloud Storage is supported. So input_url should - be of the form: - ``gs://BUCKET_NAME[/NAMESPACE_PATH]/OVERALL_EXPORT_METADATA_FILE``, - where ``BUCKET_NAME`` is the name of the Cloud Storage bucket, - ``NAMESPACE_PATH`` is an optional Cloud Storage namespace path (this is - not a Cloud Datastore namespace), and ``OVERALL_EXPORT_METADATA_FILE`` - is the metadata file written by the ExportEntities operation. For more - information about Cloud Storage namespace paths, see `Object name - considerations `__. - - For more information, see - ``google.datastore.admin.v1.ExportEntitiesResponse.output_url``. - labels (dict[str -> str]): Client-assigned labels. - entity_filter (Union[dict, ~google.cloud.datastore_admin_v1.types.EntityFilter]): Optionally specify which kinds/namespaces are to be imported. If - provided, the list must be a subset of the EntityFilter used in creating - the export, otherwise a FAILED_PRECONDITION error will be returned. If - no filter is specified then all entities from the export are imported. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datastore_admin_v1.types.EntityFilter` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datastore_admin_v1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "import_entities" not in self._inner_api_calls: - self._inner_api_calls[ - "import_entities" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.import_entities, - default_retry=self._method_configs["ImportEntities"].retry, - default_timeout=self._method_configs["ImportEntities"].timeout, - client_info=self._client_info, - ) - - request = datastore_admin_pb2.ImportEntitiesRequest( - project_id=project_id, - input_url=input_url, - labels=labels, - entity_filter=entity_filter, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project_id", project_id)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["import_entities"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - empty_pb2.Empty, - metadata_type=datastore_admin_pb2.ImportEntitiesMetadata, - ) - - def get_index( - self, - project_id=None, - index_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets an index. - - Example: - >>> from google.cloud import datastore_admin_v1 - >>> - >>> client = datastore_admin_v1.DatastoreAdminClient() - >>> - >>> response = client.get_index() - - Args: - project_id (str): Project ID against which to make the request. - index_id (str): The resource ID of the index to get. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datastore_admin_v1.types.Index` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_index" not in self._inner_api_calls: - self._inner_api_calls[ - "get_index" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_index, - default_retry=self._method_configs["GetIndex"].retry, - default_timeout=self._method_configs["GetIndex"].timeout, - client_info=self._client_info, - ) - - request = datastore_admin_pb2.GetIndexRequest( - project_id=project_id, index_id=index_id, - ) - return self._inner_api_calls["get_index"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_indexes( - self, - project_id=None, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the indexes that match the specified filters. Datastore uses an - eventually consistent query to fetch the list of indexes and may - occasionally return stale results. - - Example: - >>> from google.cloud import datastore_admin_v1 - >>> - >>> client = datastore_admin_v1.DatastoreAdminClient() - >>> - >>> # Iterate over all results - >>> for element in client.list_indexes(): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_indexes().pages: - ... for element in page: - ... # process element - ... pass - - Args: - project_id (str): Project ID against which to make the request. - filter_ (str) - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.datastore_admin_v1.types.Index` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_indexes" not in self._inner_api_calls: - self._inner_api_calls[ - "list_indexes" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_indexes, - default_retry=self._method_configs["ListIndexes"].retry, - default_timeout=self._method_configs["ListIndexes"].timeout, - client_info=self._client_info, - ) - - request = datastore_admin_pb2.ListIndexesRequest( - project_id=project_id, filter=filter_, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project_id", project_id)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_indexes"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="indexes", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client_config.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client_config.py deleted file mode 100644 index dbbe2b85cc98..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/datastore_admin_client_config.py +++ /dev/null @@ -1,43 +0,0 @@ -config = { - "interfaces": { - "google.datastore.admin.v1.DatastoreAdmin": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "ExportEntities": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "ImportEntities": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetIndex": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListIndexes": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/enums.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/enums.py deleted file mode 100644 index 77c303fc11b5..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/enums.py +++ /dev/null @@ -1,130 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class OperationType(enum.IntEnum): - """ - Operation types. - - Attributes: - OPERATION_TYPE_UNSPECIFIED (int): Unspecified. - EXPORT_ENTITIES (int): ExportEntities. - IMPORT_ENTITIES (int): ImportEntities. - CREATE_INDEX (int): CreateIndex. - DELETE_INDEX (int): DeleteIndex. - """ - - OPERATION_TYPE_UNSPECIFIED = 0 - EXPORT_ENTITIES = 1 - IMPORT_ENTITIES = 2 - CREATE_INDEX = 3 - DELETE_INDEX = 4 - - -class CommonMetadata(object): - class State(enum.IntEnum): - """ - The various possible states for an ongoing Operation. - - Attributes: - STATE_UNSPECIFIED (int): Unspecified. - INITIALIZING (int): Request is being prepared for processing. - PROCESSING (int): Request is actively being processed. - CANCELLING (int): Request is in the process of being cancelled after user called - google.longrunning.Operations.CancelOperation on the operation. - FINALIZING (int): Request has been processed and is in its finalization stage. - SUCCESSFUL (int): Request has completed successfully. - FAILED (int): Request has finished being processed, but encountered an error. - CANCELLED (int): Request has finished being cancelled after user called - google.longrunning.Operations.CancelOperation. - """ - - STATE_UNSPECIFIED = 0 - INITIALIZING = 1 - PROCESSING = 2 - CANCELLING = 3 - FINALIZING = 4 - SUCCESSFUL = 5 - FAILED = 6 - CANCELLED = 7 - - -class Index(object): - class AncestorMode(enum.IntEnum): - """ - For an ordered index, specifies whether each of the entity's ancestors - will be included. - - Attributes: - ANCESTOR_MODE_UNSPECIFIED (int): The ancestor mode is unspecified. - NONE (int): Do not include the entity's ancestors in the index. - ALL_ANCESTORS (int): Include all the entity's ancestors in the index. - """ - - ANCESTOR_MODE_UNSPECIFIED = 0 - NONE = 1 - ALL_ANCESTORS = 2 - - class Direction(enum.IntEnum): - """ - The direction determines how a property is indexed. - - Attributes: - DIRECTION_UNSPECIFIED (int): The direction is unspecified. - ASCENDING (int): The property's values are indexed so as to support sequencing in - ascending order and also query by <, >, <=, >=, and =. - DESCENDING (int): The property's values are indexed so as to support sequencing in - descending order and also query by <, >, <=, >=, and =. - """ - - DIRECTION_UNSPECIFIED = 0 - ASCENDING = 1 - DESCENDING = 2 - - class State(enum.IntEnum): - """ - The possible set of states of an index. - - Attributes: - STATE_UNSPECIFIED (int): The state is unspecified. - CREATING (int): The index is being created, and cannot be used by queries. - There is an active long-running operation for the index. - The index is updated when writing an entity. - Some index data may exist. - READY (int): The index is ready to be used. - The index is updated when writing an entity. - The index is fully populated from all stored entities it applies to. - DELETING (int): The index is being deleted, and cannot be used by queries. - There is an active long-running operation for the index. - The index is not updated when writing an entity. - Some index data may exist. - ERROR (int): The index was being created or deleted, but something went wrong. - The index cannot by used by queries. - There is no active long-running operation for the index, - and the most recently finished long-running operation failed. - The index is not updated when writing an entity. - Some index data may exist. - """ - - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - DELETING = 3 - ERROR = 4 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/transports/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/transports/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/transports/datastore_admin_grpc_transport.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/transports/datastore_admin_grpc_transport.py deleted file mode 100644 index 11fd92af67d4..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic/transports/datastore_admin_grpc_transport.py +++ /dev/null @@ -1,186 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers -import google.api_core.operations_v1 - -from google.cloud.datastore_admin_v1.proto import datastore_admin_pb2_grpc - - -class DatastoreAdminGrpcTransport(object): - """gRPC transport class providing stubs for - google.datastore.admin.v1 DatastoreAdmin API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ) - - def __init__( - self, channel=None, credentials=None, address="datastore.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "datastore_admin_stub": datastore_admin_pb2_grpc.DatastoreAdminStub( - channel - ), - } - - # Because this API includes a method that returns a - # long-running operation (proto: google.longrunning.Operation), - # instantiate an LRO client. - self._operations_client = google.api_core.operations_v1.OperationsClient( - channel - ) - - @classmethod - def create_channel( - cls, address="datastore.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def export_entities(self): - """Return the gRPC stub for :meth:`DatastoreAdminClient.export_entities`. - - Exports a copy of all or a subset of entities from Google Cloud Datastore - to another storage system, such as Google Cloud Storage. Recent updates to - entities may not be reflected in the export. The export occurs in the - background and its progress can be monitored and managed via the - Operation resource that is created. The output of an export may only be - used once the associated operation is done. If an export operation is - cancelled before completion it may leave partial data behind in Google - Cloud Storage. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["datastore_admin_stub"].ExportEntities - - @property - def import_entities(self): - """Return the gRPC stub for :meth:`DatastoreAdminClient.import_entities`. - - Imports entities into Google Cloud Datastore. Existing entities with the - same key are overwritten. The import occurs in the background and its - progress can be monitored and managed via the Operation resource that is - created. If an ImportEntities operation is cancelled, it is possible - that a subset of the data has already been imported to Cloud Datastore. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["datastore_admin_stub"].ImportEntities - - @property - def get_index(self): - """Return the gRPC stub for :meth:`DatastoreAdminClient.get_index`. - - Gets an index. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["datastore_admin_stub"].GetIndex - - @property - def list_indexes(self): - """Return the gRPC stub for :meth:`DatastoreAdminClient.list_indexes`. - - Lists the indexes that match the specified filters. Datastore uses an - eventually consistent query to fetch the list of indexes and may - occasionally return stale results. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["datastore_admin_stub"].ListIndexes diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py deleted file mode 100644 index f16463bb0475..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py +++ /dev/null @@ -1,1847 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/datastore_admin_v1/proto/datastore_admin.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.cloud.datastore_admin_v1.proto import ( - index_pb2 as google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2, -) -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/datastore_admin_v1/proto/datastore_admin.proto", - package="google.datastore.admin.v1", - syntax="proto3", - serialized_options=b"\n\035com.google.datastore.admin.v1B\023DatastoreAdminProtoP\001Z>google.golang.org/genproto/googleapis/datastore/admin/v1;admin\252\002\037Google.Cloud.Datastore.Admin.V1\352\002#Google::Cloud::Datastore::Admin::V1", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n;google/cloud/datastore_admin_v1/proto/datastore_admin.proto\x12\x19google.datastore.admin.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x31google/cloud/datastore_admin_v1/proto/index.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xf4\x03\n\x0e\x43ommonMetadata\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12@\n\x0eoperation_type\x18\x03 \x01(\x0e\x32(.google.datastore.admin.v1.OperationType\x12\x45\n\x06labels\x18\x04 \x03(\x0b\x32\x35.google.datastore.admin.v1.CommonMetadata.LabelsEntry\x12>\n\x05state\x18\x05 \x01(\x0e\x32/.google.datastore.admin.v1.CommonMetadata.State\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x8b\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x10\n\x0cINITIALIZING\x10\x01\x12\x0e\n\nPROCESSING\x10\x02\x12\x0e\n\nCANCELLING\x10\x03\x12\x0e\n\nFINALIZING\x10\x04\x12\x0e\n\nSUCCESSFUL\x10\x05\x12\n\n\x06\x46\x41ILED\x10\x06\x12\r\n\tCANCELLED\x10\x07":\n\x08Progress\x12\x16\n\x0ework_completed\x18\x01 \x01(\x03\x12\x16\n\x0ework_estimated\x18\x02 \x01(\x03"\x8d\x02\n\x15\x45xportEntitiesRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12L\n\x06labels\x18\x02 \x03(\x0b\x32<.google.datastore.admin.v1.ExportEntitiesRequest.LabelsEntry\x12>\n\rentity_filter\x18\x03 \x01(\x0b\x32\'.google.datastore.admin.v1.EntityFilter\x12\x1e\n\x11output_url_prefix\x18\x04 \x01(\tB\x03\xe0\x41\x02\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x85\x02\n\x15ImportEntitiesRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12L\n\x06labels\x18\x02 \x03(\x0b\x32<.google.datastore.admin.v1.ImportEntitiesRequest.LabelsEntry\x12\x16\n\tinput_url\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12>\n\rentity_filter\x18\x04 \x01(\x0b\x32\'.google.datastore.admin.v1.EntityFilter\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01",\n\x16\x45xportEntitiesResponse\x12\x12\n\noutput_url\x18\x01 \x01(\t"\xab\x02\n\x16\x45xportEntitiesMetadata\x12\x39\n\x06\x63ommon\x18\x01 \x01(\x0b\x32).google.datastore.admin.v1.CommonMetadata\x12>\n\x11progress_entities\x18\x02 \x01(\x0b\x32#.google.datastore.admin.v1.Progress\x12;\n\x0eprogress_bytes\x18\x03 \x01(\x0b\x32#.google.datastore.admin.v1.Progress\x12>\n\rentity_filter\x18\x04 \x01(\x0b\x32\'.google.datastore.admin.v1.EntityFilter\x12\x19\n\x11output_url_prefix\x18\x05 \x01(\t"\xa3\x02\n\x16ImportEntitiesMetadata\x12\x39\n\x06\x63ommon\x18\x01 \x01(\x0b\x32).google.datastore.admin.v1.CommonMetadata\x12>\n\x11progress_entities\x18\x02 \x01(\x0b\x32#.google.datastore.admin.v1.Progress\x12;\n\x0eprogress_bytes\x18\x03 \x01(\x0b\x32#.google.datastore.admin.v1.Progress\x12>\n\rentity_filter\x18\x04 \x01(\x0b\x32\'.google.datastore.admin.v1.EntityFilter\x12\x11\n\tinput_url\x18\x05 \x01(\t"4\n\x0c\x45ntityFilter\x12\r\n\x05kinds\x18\x01 \x03(\t\x12\x15\n\rnamespace_ids\x18\x02 \x03(\t"7\n\x0fGetIndexRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x10\n\x08index_id\x18\x03 \x01(\t"_\n\x12ListIndexesRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"a\n\x13ListIndexesResponse\x12\x31\n\x07indexes\x18\x01 \x03(\x0b\x32 .google.datastore.admin.v1.Index\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xa5\x01\n\x16IndexOperationMetadata\x12\x39\n\x06\x63ommon\x18\x01 \x01(\x0b\x32).google.datastore.admin.v1.CommonMetadata\x12>\n\x11progress_entities\x18\x02 \x01(\x0b\x32#.google.datastore.admin.v1.Progress\x12\x10\n\x08index_id\x18\x03 \x01(\t*}\n\rOperationType\x12\x1e\n\x1aOPERATION_TYPE_UNSPECIFIED\x10\x00\x12\x13\n\x0f\x45XPORT_ENTITIES\x10\x01\x12\x13\n\x0fIMPORT_ENTITIES\x10\x02\x12\x10\n\x0c\x43REATE_INDEX\x10\x03\x12\x10\n\x0c\x44\x45LETE_INDEX\x10\x04\x32\x9c\x07\n\x0e\x44\x61tastoreAdmin\x12\xf6\x01\n\x0e\x45xportEntities\x12\x30.google.datastore.admin.v1.ExportEntitiesRequest\x1a\x1d.google.longrunning.Operation"\x92\x01\x82\xd3\xe4\x93\x02%" /v1/projects/{project_id}:export:\x01*\xda\x41\x31project_id,labels,entity_filter,output_url_prefix\xca\x41\x30\n\x16\x45xportEntitiesResponse\x12\x16\x45xportEntitiesMetadata\x12\xed\x01\n\x0eImportEntities\x12\x30.google.datastore.admin.v1.ImportEntitiesRequest\x1a\x1d.google.longrunning.Operation"\x89\x01\x82\xd3\xe4\x93\x02%" /v1/projects/{project_id}:import:\x01*\xda\x41)project_id,labels,input_url,entity_filter\xca\x41/\n\x15google.protobuf.Empty\x12\x16ImportEntitiesMetadata\x12\x8e\x01\n\x08GetIndex\x12*.google.datastore.admin.v1.GetIndexRequest\x1a .google.datastore.admin.v1.Index"4\x82\xd3\xe4\x93\x02.\x12,/v1/projects/{project_id}/indexes/{index_id}\x12\x97\x01\n\x0bListIndexes\x12-.google.datastore.admin.v1.ListIndexesRequest\x1a..google.datastore.admin.v1.ListIndexesResponse")\x82\xd3\xe4\x93\x02#\x12!/v1/projects/{project_id}/indexes\x1av\xca\x41\x18\x64\x61tastore.googleapis.com\xd2\x41Xhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastoreB\xbe\x01\n\x1d\x63om.google.datastore.admin.v1B\x13\x44\x61tastoreAdminProtoP\x01Z>google.golang.org/genproto/googleapis/datastore/admin/v1;admin\xaa\x02\x1fGoogle.Cloud.Datastore.Admin.V1\xea\x02#Google::Cloud::Datastore::Admin::V1b\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2.DESCRIPTOR, - google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - -_OPERATIONTYPE = _descriptor.EnumDescriptor( - name="OperationType", - full_name="google.datastore.admin.v1.OperationType", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="OPERATION_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="EXPORT_ENTITIES", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IMPORT_ENTITIES", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CREATE_INDEX", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DELETE_INDEX", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2515, - serialized_end=2640, -) -_sym_db.RegisterEnumDescriptor(_OPERATIONTYPE) - -OperationType = enum_type_wrapper.EnumTypeWrapper(_OPERATIONTYPE) -OPERATION_TYPE_UNSPECIFIED = 0 -EXPORT_ENTITIES = 1 -IMPORT_ENTITIES = 2 -CREATE_INDEX = 3 -DELETE_INDEX = 4 - - -_COMMONMETADATA_STATE = _descriptor.EnumDescriptor( - name="State", - full_name="google.datastore.admin.v1.CommonMetadata.State", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="STATE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="INITIALIZING", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PROCESSING", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CANCELLING", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="FINALIZING", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SUCCESSFUL", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="FAILED", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CANCELLED", - index=7, - number=7, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=661, - serialized_end=800, -) -_sym_db.RegisterEnumDescriptor(_COMMONMETADATA_STATE) - - -_COMMONMETADATA_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.datastore.admin.v1.CommonMetadata.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.datastore.admin.v1.CommonMetadata.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.datastore.admin.v1.CommonMetadata.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=613, - serialized_end=658, -) - -_COMMONMETADATA = _descriptor.Descriptor( - name="CommonMetadata", - full_name="google.datastore.admin.v1.CommonMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.datastore.admin.v1.CommonMetadata.start_time", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="end_time", - full_name="google.datastore.admin.v1.CommonMetadata.end_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="operation_type", - full_name="google.datastore.admin.v1.CommonMetadata.operation_type", - index=2, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.datastore.admin.v1.CommonMetadata.labels", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="state", - full_name="google.datastore.admin.v1.CommonMetadata.state", - index=4, - number=5, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_COMMONMETADATA_LABELSENTRY,], - enum_types=[_COMMONMETADATA_STATE,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=300, - serialized_end=800, -) - - -_PROGRESS = _descriptor.Descriptor( - name="Progress", - full_name="google.datastore.admin.v1.Progress", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="work_completed", - full_name="google.datastore.admin.v1.Progress.work_completed", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="work_estimated", - full_name="google.datastore.admin.v1.Progress.work_estimated", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=802, - serialized_end=860, -) - - -_EXPORTENTITIESREQUEST_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.datastore.admin.v1.ExportEntitiesRequest.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.datastore.admin.v1.ExportEntitiesRequest.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.datastore.admin.v1.ExportEntitiesRequest.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=613, - serialized_end=658, -) - -_EXPORTENTITIESREQUEST = _descriptor.Descriptor( - name="ExportEntitiesRequest", - full_name="google.datastore.admin.v1.ExportEntitiesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.datastore.admin.v1.ExportEntitiesRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.datastore.admin.v1.ExportEntitiesRequest.labels", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="entity_filter", - full_name="google.datastore.admin.v1.ExportEntitiesRequest.entity_filter", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="output_url_prefix", - full_name="google.datastore.admin.v1.ExportEntitiesRequest.output_url_prefix", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_EXPORTENTITIESREQUEST_LABELSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=863, - serialized_end=1132, -) - - -_IMPORTENTITIESREQUEST_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.datastore.admin.v1.ImportEntitiesRequest.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.datastore.admin.v1.ImportEntitiesRequest.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.datastore.admin.v1.ImportEntitiesRequest.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=613, - serialized_end=658, -) - -_IMPORTENTITIESREQUEST = _descriptor.Descriptor( - name="ImportEntitiesRequest", - full_name="google.datastore.admin.v1.ImportEntitiesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.datastore.admin.v1.ImportEntitiesRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.datastore.admin.v1.ImportEntitiesRequest.labels", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="input_url", - full_name="google.datastore.admin.v1.ImportEntitiesRequest.input_url", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="entity_filter", - full_name="google.datastore.admin.v1.ImportEntitiesRequest.entity_filter", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_IMPORTENTITIESREQUEST_LABELSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1135, - serialized_end=1396, -) - - -_EXPORTENTITIESRESPONSE = _descriptor.Descriptor( - name="ExportEntitiesResponse", - full_name="google.datastore.admin.v1.ExportEntitiesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="output_url", - full_name="google.datastore.admin.v1.ExportEntitiesResponse.output_url", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1398, - serialized_end=1442, -) - - -_EXPORTENTITIESMETADATA = _descriptor.Descriptor( - name="ExportEntitiesMetadata", - full_name="google.datastore.admin.v1.ExportEntitiesMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="common", - full_name="google.datastore.admin.v1.ExportEntitiesMetadata.common", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="progress_entities", - full_name="google.datastore.admin.v1.ExportEntitiesMetadata.progress_entities", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="progress_bytes", - full_name="google.datastore.admin.v1.ExportEntitiesMetadata.progress_bytes", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="entity_filter", - full_name="google.datastore.admin.v1.ExportEntitiesMetadata.entity_filter", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="output_url_prefix", - full_name="google.datastore.admin.v1.ExportEntitiesMetadata.output_url_prefix", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1445, - serialized_end=1744, -) - - -_IMPORTENTITIESMETADATA = _descriptor.Descriptor( - name="ImportEntitiesMetadata", - full_name="google.datastore.admin.v1.ImportEntitiesMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="common", - full_name="google.datastore.admin.v1.ImportEntitiesMetadata.common", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="progress_entities", - full_name="google.datastore.admin.v1.ImportEntitiesMetadata.progress_entities", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="progress_bytes", - full_name="google.datastore.admin.v1.ImportEntitiesMetadata.progress_bytes", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="entity_filter", - full_name="google.datastore.admin.v1.ImportEntitiesMetadata.entity_filter", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="input_url", - full_name="google.datastore.admin.v1.ImportEntitiesMetadata.input_url", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1747, - serialized_end=2038, -) - - -_ENTITYFILTER = _descriptor.Descriptor( - name="EntityFilter", - full_name="google.datastore.admin.v1.EntityFilter", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="kinds", - full_name="google.datastore.admin.v1.EntityFilter.kinds", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="namespace_ids", - full_name="google.datastore.admin.v1.EntityFilter.namespace_ids", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2040, - serialized_end=2092, -) - - -_GETINDEXREQUEST = _descriptor.Descriptor( - name="GetIndexRequest", - full_name="google.datastore.admin.v1.GetIndexRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.datastore.admin.v1.GetIndexRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="index_id", - full_name="google.datastore.admin.v1.GetIndexRequest.index_id", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2094, - serialized_end=2149, -) - - -_LISTINDEXESREQUEST = _descriptor.Descriptor( - name="ListIndexesRequest", - full_name="google.datastore.admin.v1.ListIndexesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.datastore.admin.v1.ListIndexesRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.datastore.admin.v1.ListIndexesRequest.filter", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.datastore.admin.v1.ListIndexesRequest.page_size", - index=2, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.datastore.admin.v1.ListIndexesRequest.page_token", - index=3, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2151, - serialized_end=2246, -) - - -_LISTINDEXESRESPONSE = _descriptor.Descriptor( - name="ListIndexesResponse", - full_name="google.datastore.admin.v1.ListIndexesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="indexes", - full_name="google.datastore.admin.v1.ListIndexesResponse.indexes", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.datastore.admin.v1.ListIndexesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2248, - serialized_end=2345, -) - - -_INDEXOPERATIONMETADATA = _descriptor.Descriptor( - name="IndexOperationMetadata", - full_name="google.datastore.admin.v1.IndexOperationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="common", - full_name="google.datastore.admin.v1.IndexOperationMetadata.common", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="progress_entities", - full_name="google.datastore.admin.v1.IndexOperationMetadata.progress_entities", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="index_id", - full_name="google.datastore.admin.v1.IndexOperationMetadata.index_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2348, - serialized_end=2513, -) - -_COMMONMETADATA_LABELSENTRY.containing_type = _COMMONMETADATA -_COMMONMETADATA.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_COMMONMETADATA.fields_by_name[ - "end_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_COMMONMETADATA.fields_by_name["operation_type"].enum_type = _OPERATIONTYPE -_COMMONMETADATA.fields_by_name["labels"].message_type = _COMMONMETADATA_LABELSENTRY -_COMMONMETADATA.fields_by_name["state"].enum_type = _COMMONMETADATA_STATE -_COMMONMETADATA_STATE.containing_type = _COMMONMETADATA -_EXPORTENTITIESREQUEST_LABELSENTRY.containing_type = _EXPORTENTITIESREQUEST -_EXPORTENTITIESREQUEST.fields_by_name[ - "labels" -].message_type = _EXPORTENTITIESREQUEST_LABELSENTRY -_EXPORTENTITIESREQUEST.fields_by_name["entity_filter"].message_type = _ENTITYFILTER -_IMPORTENTITIESREQUEST_LABELSENTRY.containing_type = _IMPORTENTITIESREQUEST -_IMPORTENTITIESREQUEST.fields_by_name[ - "labels" -].message_type = _IMPORTENTITIESREQUEST_LABELSENTRY -_IMPORTENTITIESREQUEST.fields_by_name["entity_filter"].message_type = _ENTITYFILTER -_EXPORTENTITIESMETADATA.fields_by_name["common"].message_type = _COMMONMETADATA -_EXPORTENTITIESMETADATA.fields_by_name["progress_entities"].message_type = _PROGRESS -_EXPORTENTITIESMETADATA.fields_by_name["progress_bytes"].message_type = _PROGRESS -_EXPORTENTITIESMETADATA.fields_by_name["entity_filter"].message_type = _ENTITYFILTER -_IMPORTENTITIESMETADATA.fields_by_name["common"].message_type = _COMMONMETADATA -_IMPORTENTITIESMETADATA.fields_by_name["progress_entities"].message_type = _PROGRESS -_IMPORTENTITIESMETADATA.fields_by_name["progress_bytes"].message_type = _PROGRESS -_IMPORTENTITIESMETADATA.fields_by_name["entity_filter"].message_type = _ENTITYFILTER -_LISTINDEXESRESPONSE.fields_by_name[ - "indexes" -].message_type = ( - google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2._INDEX -) -_INDEXOPERATIONMETADATA.fields_by_name["common"].message_type = _COMMONMETADATA -_INDEXOPERATIONMETADATA.fields_by_name["progress_entities"].message_type = _PROGRESS -DESCRIPTOR.message_types_by_name["CommonMetadata"] = _COMMONMETADATA -DESCRIPTOR.message_types_by_name["Progress"] = _PROGRESS -DESCRIPTOR.message_types_by_name["ExportEntitiesRequest"] = _EXPORTENTITIESREQUEST -DESCRIPTOR.message_types_by_name["ImportEntitiesRequest"] = _IMPORTENTITIESREQUEST -DESCRIPTOR.message_types_by_name["ExportEntitiesResponse"] = _EXPORTENTITIESRESPONSE -DESCRIPTOR.message_types_by_name["ExportEntitiesMetadata"] = _EXPORTENTITIESMETADATA -DESCRIPTOR.message_types_by_name["ImportEntitiesMetadata"] = _IMPORTENTITIESMETADATA -DESCRIPTOR.message_types_by_name["EntityFilter"] = _ENTITYFILTER -DESCRIPTOR.message_types_by_name["GetIndexRequest"] = _GETINDEXREQUEST -DESCRIPTOR.message_types_by_name["ListIndexesRequest"] = _LISTINDEXESREQUEST -DESCRIPTOR.message_types_by_name["ListIndexesResponse"] = _LISTINDEXESRESPONSE -DESCRIPTOR.message_types_by_name["IndexOperationMetadata"] = _INDEXOPERATIONMETADATA -DESCRIPTOR.enum_types_by_name["OperationType"] = _OPERATIONTYPE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -CommonMetadata = _reflection.GeneratedProtocolMessageType( - "CommonMetadata", - (_message.Message,), - { - "LabelsEntry": _reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - { - "DESCRIPTOR": _COMMONMETADATA_LABELSENTRY, - "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2" - # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.CommonMetadata.LabelsEntry) - }, - ), - "DESCRIPTOR": _COMMONMETADATA, - "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", - "__doc__": """Metadata common to all Datastore Admin operations. - - Attributes: - start_time: - The time that work began on the operation. - end_time: - The time the operation ended, either successfully or - otherwise. - operation_type: - The type of the operation. Can be used as a filter in - ListOperationsRequest. - labels: - The client-assigned labels which were provided when the - operation was created. May also include additional labels. - state: - The current state of the Operation. - """, - # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.CommonMetadata) - }, -) -_sym_db.RegisterMessage(CommonMetadata) -_sym_db.RegisterMessage(CommonMetadata.LabelsEntry) - -Progress = _reflection.GeneratedProtocolMessageType( - "Progress", - (_message.Message,), - { - "DESCRIPTOR": _PROGRESS, - "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", - "__doc__": """Measures the progress of a particular metric. - - Attributes: - work_completed: - The amount of work that has been completed. Note that this may - be greater than work_estimated. - work_estimated: - An estimate of how much work needs to be performed. May be - zero if the work estimate is unavailable. - """, - # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.Progress) - }, -) -_sym_db.RegisterMessage(Progress) - -ExportEntitiesRequest = _reflection.GeneratedProtocolMessageType( - "ExportEntitiesRequest", - (_message.Message,), - { - "LabelsEntry": _reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - { - "DESCRIPTOR": _EXPORTENTITIESREQUEST_LABELSENTRY, - "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2" - # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ExportEntitiesRequest.LabelsEntry) - }, - ), - "DESCRIPTOR": _EXPORTENTITIESREQUEST, - "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", - "__doc__": """The request for [google.datastore.admin.v1.DatastoreAdmin.ExportEntiti - es][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. - - Attributes: - project_id: - Required. Project ID against which to make the request. - labels: - Client-assigned labels. - entity_filter: - Description of what data from the project is included in the - export. - output_url_prefix: - Required. Location for the export metadata and data files. - The full resource URL of the external storage location. - Currently, only Google Cloud Storage is supported. So - output_url_prefix should be of the form: - ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, where ``BUCKET_NAME`` - is the name of the Cloud Storage bucket and ``NAMESPACE_PATH`` - is an optional Cloud Storage namespace path (this is not a - Cloud Datastore namespace). For more information about Cloud - Storage namespace paths, see `Object name considerations - `__. The resulting files will be nested deeper - than the specified URL prefix. The final output URL will be - provided in the [google.datastore.admin.v1.ExportEntitiesRespo - nse.output_url][google.datastore.admin.v1.ExportEntitiesRespon - se.output_url] field. That value should be used for subsequent - ImportEntities operations. By nesting the data files deeper, - the same Cloud Storage bucket can be used in multiple - ExportEntities operations without conflict. - """, - # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ExportEntitiesRequest) - }, -) -_sym_db.RegisterMessage(ExportEntitiesRequest) -_sym_db.RegisterMessage(ExportEntitiesRequest.LabelsEntry) - -ImportEntitiesRequest = _reflection.GeneratedProtocolMessageType( - "ImportEntitiesRequest", - (_message.Message,), - { - "LabelsEntry": _reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - { - "DESCRIPTOR": _IMPORTENTITIESREQUEST_LABELSENTRY, - "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2" - # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ImportEntitiesRequest.LabelsEntry) - }, - ), - "DESCRIPTOR": _IMPORTENTITIESREQUEST, - "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", - "__doc__": """The request for [google.datastore.admin.v1.DatastoreAdmin.ImportEntiti - es][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. - - Attributes: - project_id: - Required. Project ID against which to make the request. - labels: - Client-assigned labels. - input_url: - Required. The full resource URL of the external storage - location. Currently, only Google Cloud Storage is supported. - So input_url should be of the form: ``gs://BUCKET_NAME[/NAMESP - ACE_PATH]/OVERALL_EXPORT_METADATA_FILE``, where - ``BUCKET_NAME`` is the name of the Cloud Storage bucket, - ``NAMESPACE_PATH`` is an optional Cloud Storage namespace path - (this is not a Cloud Datastore namespace), and - ``OVERALL_EXPORT_METADATA_FILE`` is the metadata file written - by the ExportEntities operation. For more information about - Cloud Storage namespace paths, see `Object name considerations - `__. For more information, see [google.datasto - re.admin.v1.ExportEntitiesResponse.output_url][google.datastor - e.admin.v1.ExportEntitiesResponse.output_url]. - entity_filter: - Optionally specify which kinds/namespaces are to be imported. - If provided, the list must be a subset of the EntityFilter - used in creating the export, otherwise a FAILED_PRECONDITION - error will be returned. If no filter is specified then all - entities from the export are imported. - """, - # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ImportEntitiesRequest) - }, -) -_sym_db.RegisterMessage(ImportEntitiesRequest) -_sym_db.RegisterMessage(ImportEntitiesRequest.LabelsEntry) - -ExportEntitiesResponse = _reflection.GeneratedProtocolMessageType( - "ExportEntitiesResponse", - (_message.Message,), - { - "DESCRIPTOR": _EXPORTENTITIESRESPONSE, - "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", - "__doc__": """The response for [google.datastore.admin.v1.DatastoreAdmin.ExportEntit - ies][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. - - Attributes: - output_url: - Location of the output metadata file. This can be used to - begin an import into Cloud Datastore (this project or another - project). See [google.datastore.admin.v1.ImportEntitiesRequest - .input_url][google.datastore.admin.v1.ImportEntitiesRequest.in - put_url]. Only present if the operation completed - successfully. - """, - # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ExportEntitiesResponse) - }, -) -_sym_db.RegisterMessage(ExportEntitiesResponse) - -ExportEntitiesMetadata = _reflection.GeneratedProtocolMessageType( - "ExportEntitiesMetadata", - (_message.Message,), - { - "DESCRIPTOR": _EXPORTENTITIESMETADATA, - "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", - "__doc__": """Metadata for ExportEntities operations. - - Attributes: - common: - Metadata common to all Datastore Admin operations. - progress_entities: - An estimate of the number of entities processed. - progress_bytes: - An estimate of the number of bytes processed. - entity_filter: - Description of which entities are being exported. - output_url_prefix: - Location for the export metadata and data files. This will be - the same value as the [google.datastore.admin.v1.ExportEntitie - sRequest.output_url_prefix][google.datastore.admin.v1.ExportEn - titiesRequest.output_url_prefix] field. The final output - location is provided in [google.datastore.admin.v1.ExportEntit - iesResponse.output_url][google.datastore.admin.v1.ExportEntiti - esResponse.output_url]. - """, - # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ExportEntitiesMetadata) - }, -) -_sym_db.RegisterMessage(ExportEntitiesMetadata) - -ImportEntitiesMetadata = _reflection.GeneratedProtocolMessageType( - "ImportEntitiesMetadata", - (_message.Message,), - { - "DESCRIPTOR": _IMPORTENTITIESMETADATA, - "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", - "__doc__": """Metadata for ImportEntities operations. - - Attributes: - common: - Metadata common to all Datastore Admin operations. - progress_entities: - An estimate of the number of entities processed. - progress_bytes: - An estimate of the number of bytes processed. - entity_filter: - Description of which entities are being imported. - input_url: - The location of the import metadata file. This will be the - same value as the [google.datastore.admin.v1.ExportEntitiesRes - ponse.output_url][google.datastore.admin.v1.ExportEntitiesResp - onse.output_url] field. - """, - # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ImportEntitiesMetadata) - }, -) -_sym_db.RegisterMessage(ImportEntitiesMetadata) - -EntityFilter = _reflection.GeneratedProtocolMessageType( - "EntityFilter", - (_message.Message,), - { - "DESCRIPTOR": _ENTITYFILTER, - "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", - "__doc__": """Identifies a subset of entities in a project. This is specified as - combinations of kinds and namespaces (either or both of which may be - all, as described in the following examples). Example usage: Entire - project: kinds=[], namespace_ids=[] Kinds Foo and Bar in all - namespaces: kinds=[‘Foo’, ‘Bar’], namespace_ids=[] Kinds Foo and Bar - only in the default namespace: kinds=[‘Foo’, ‘Bar’], - namespace_ids=[’’] Kinds Foo and Bar in both the default and Baz - namespaces: kinds=[‘Foo’, ‘Bar’], namespace_ids=[’‘, ’Baz’] The - entire Baz namespace: kinds=[], namespace_ids=[‘Baz’] - - Attributes: - kinds: - If empty, then this represents all kinds. - namespace_ids: - An empty list represents all namespaces. This is the preferred - usage for projects that don’t use namespaces. An empty string - element represents the default namespace. This should be used - if the project has data in non-default namespaces, but doesn’t - want to include them. Each namespace in this list must be - unique. - """, - # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.EntityFilter) - }, -) -_sym_db.RegisterMessage(EntityFilter) - -GetIndexRequest = _reflection.GeneratedProtocolMessageType( - "GetIndexRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETINDEXREQUEST, - "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", - "__doc__": """The request for [google.datastore.admin.v1.DatastoreAdmin.GetIndex][go - ogle.datastore.admin.v1.DatastoreAdmin.GetIndex]. - - Attributes: - project_id: - Project ID against which to make the request. - index_id: - The resource ID of the index to get. - """, - # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.GetIndexRequest) - }, -) -_sym_db.RegisterMessage(GetIndexRequest) - -ListIndexesRequest = _reflection.GeneratedProtocolMessageType( - "ListIndexesRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTINDEXESREQUEST, - "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", - "__doc__": """The request for [google.datastore.admin.v1.DatastoreAdmin.ListIndexes] - [google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. - - Attributes: - project_id: - Project ID against which to make the request. - page_size: - The maximum number of items to return. If zero, then all - results will be returned. - page_token: - The next_page_token value returned from a previous List - request, if any. - """, - # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ListIndexesRequest) - }, -) -_sym_db.RegisterMessage(ListIndexesRequest) - -ListIndexesResponse = _reflection.GeneratedProtocolMessageType( - "ListIndexesResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTINDEXESRESPONSE, - "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", - "__doc__": """The response for [google.datastore.admin.v1.DatastoreAdmin.ListIndexes - ][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. - - Attributes: - indexes: - The indexes. - next_page_token: - The standard List next-page token. - """, - # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ListIndexesResponse) - }, -) -_sym_db.RegisterMessage(ListIndexesResponse) - -IndexOperationMetadata = _reflection.GeneratedProtocolMessageType( - "IndexOperationMetadata", - (_message.Message,), - { - "DESCRIPTOR": _INDEXOPERATIONMETADATA, - "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", - "__doc__": """Metadata for Index operations. - - Attributes: - common: - Metadata common to all Datastore Admin operations. - progress_entities: - An estimate of the number of entities processed. - index_id: - The index resource ID that this operation is acting on. - """, - # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.IndexOperationMetadata) - }, -) -_sym_db.RegisterMessage(IndexOperationMetadata) - - -DESCRIPTOR._options = None -_COMMONMETADATA_LABELSENTRY._options = None -_EXPORTENTITIESREQUEST_LABELSENTRY._options = None -_EXPORTENTITIESREQUEST.fields_by_name["project_id"]._options = None -_EXPORTENTITIESREQUEST.fields_by_name["output_url_prefix"]._options = None -_IMPORTENTITIESREQUEST_LABELSENTRY._options = None -_IMPORTENTITIESREQUEST.fields_by_name["project_id"]._options = None -_IMPORTENTITIESREQUEST.fields_by_name["input_url"]._options = None - -_DATASTOREADMIN = _descriptor.ServiceDescriptor( - name="DatastoreAdmin", - full_name="google.datastore.admin.v1.DatastoreAdmin", - file=DESCRIPTOR, - index=0, - serialized_options=b"\312A\030datastore.googleapis.com\322AXhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastore", - create_key=_descriptor._internal_create_key, - serialized_start=2643, - serialized_end=3567, - methods=[ - _descriptor.MethodDescriptor( - name="ExportEntities", - full_name="google.datastore.admin.v1.DatastoreAdmin.ExportEntities", - index=0, - containing_service=None, - input_type=_EXPORTENTITIESREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=b'\202\323\344\223\002%" /v1/projects/{project_id}:export:\001*\332A1project_id,labels,entity_filter,output_url_prefix\312A0\n\026ExportEntitiesResponse\022\026ExportEntitiesMetadata', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ImportEntities", - full_name="google.datastore.admin.v1.DatastoreAdmin.ImportEntities", - index=1, - containing_service=None, - input_type=_IMPORTENTITIESREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=b'\202\323\344\223\002%" /v1/projects/{project_id}:import:\001*\332A)project_id,labels,input_url,entity_filter\312A/\n\025google.protobuf.Empty\022\026ImportEntitiesMetadata', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetIndex", - full_name="google.datastore.admin.v1.DatastoreAdmin.GetIndex", - index=2, - containing_service=None, - input_type=_GETINDEXREQUEST, - output_type=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2._INDEX, - serialized_options=b"\202\323\344\223\002.\022,/v1/projects/{project_id}/indexes/{index_id}", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListIndexes", - full_name="google.datastore.admin.v1.DatastoreAdmin.ListIndexes", - index=3, - containing_service=None, - input_type=_LISTINDEXESREQUEST, - output_type=_LISTINDEXESRESPONSE, - serialized_options=b"\202\323\344\223\002#\022!/v1/projects/{project_id}/indexes", - create_key=_descriptor._internal_create_key, - ), - ], -) -_sym_db.RegisterServiceDescriptor(_DATASTOREADMIN) - -DESCRIPTOR.services_by_name["DatastoreAdmin"] = _DATASTOREADMIN - -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py deleted file mode 100644 index 177889e13306..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py +++ /dev/null @@ -1,414 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc - -from google.cloud.datastore_admin_v1.proto import ( - datastore_admin_pb2 as google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2, -) -from google.cloud.datastore_admin_v1.proto import ( - index_pb2 as google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2, -) -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) - - -class DatastoreAdminStub(object): - """Google Cloud Datastore Admin API - - - The Datastore Admin API provides several admin services for Cloud Datastore. - - ----------------------------------------------------------------------------- - ## Concepts - - Project, namespace, kind, and entity as defined in the Google Cloud Datastore - API. - - Operation: An Operation represents work being performed in the background. - - EntityFilter: Allows specifying a subset of entities in a project. This is - specified as a combination of kinds and namespaces (either or both of which - may be all). - - ----------------------------------------------------------------------------- - ## Services - - # Export/Import - - The Export/Import service provides the ability to copy all or a subset of - entities to/from Google Cloud Storage. - - Exported data may be imported into Cloud Datastore for any Google Cloud - Platform project. It is not restricted to the export source project. It is - possible to export from one project and then import into another. - - Exported data can also be loaded into Google BigQuery for analysis. - - Exports and imports are performed asynchronously. An Operation resource is - created for each export/import. The state (including any errors encountered) - of the export/import may be queried via the Operation resource. - - # Index - - The index service manages Cloud Datastore composite indexes. - - Index creation and deletion are performed asynchronously. - An Operation resource is created for each such asynchronous operation. - The state of the operation (including any errors encountered) - may be queried via the Operation resource. - - # Operation - - The Operations collection provides a record of actions performed for the - specified project (including any operations in progress). Operations are not - created directly but through calls on other collections or resources. - - An operation that is not yet done may be cancelled. The request to cancel is - asynchronous and the operation may continue to run for some time after the - request to cancel is made. - - An operation that is done may be deleted so that it is no longer listed as - part of the Operation collection. - - ListOperations returns all pending operations, but not completed operations. - - Operations are created by service DatastoreAdmin, - but are accessed via service google.longrunning.Operations. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ExportEntities = channel.unary_unary( - "/google.datastore.admin.v1.DatastoreAdmin/ExportEntities", - request_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ExportEntitiesRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - self.ImportEntities = channel.unary_unary( - "/google.datastore.admin.v1.DatastoreAdmin/ImportEntities", - request_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ImportEntitiesRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - self.GetIndex = channel.unary_unary( - "/google.datastore.admin.v1.DatastoreAdmin/GetIndex", - request_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.GetIndexRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2.Index.FromString, - ) - self.ListIndexes = channel.unary_unary( - "/google.datastore.admin.v1.DatastoreAdmin/ListIndexes", - request_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesResponse.FromString, - ) - - -class DatastoreAdminServicer(object): - """Google Cloud Datastore Admin API - - - The Datastore Admin API provides several admin services for Cloud Datastore. - - ----------------------------------------------------------------------------- - ## Concepts - - Project, namespace, kind, and entity as defined in the Google Cloud Datastore - API. - - Operation: An Operation represents work being performed in the background. - - EntityFilter: Allows specifying a subset of entities in a project. This is - specified as a combination of kinds and namespaces (either or both of which - may be all). - - ----------------------------------------------------------------------------- - ## Services - - # Export/Import - - The Export/Import service provides the ability to copy all or a subset of - entities to/from Google Cloud Storage. - - Exported data may be imported into Cloud Datastore for any Google Cloud - Platform project. It is not restricted to the export source project. It is - possible to export from one project and then import into another. - - Exported data can also be loaded into Google BigQuery for analysis. - - Exports and imports are performed asynchronously. An Operation resource is - created for each export/import. The state (including any errors encountered) - of the export/import may be queried via the Operation resource. - - # Index - - The index service manages Cloud Datastore composite indexes. - - Index creation and deletion are performed asynchronously. - An Operation resource is created for each such asynchronous operation. - The state of the operation (including any errors encountered) - may be queried via the Operation resource. - - # Operation - - The Operations collection provides a record of actions performed for the - specified project (including any operations in progress). Operations are not - created directly but through calls on other collections or resources. - - An operation that is not yet done may be cancelled. The request to cancel is - asynchronous and the operation may continue to run for some time after the - request to cancel is made. - - An operation that is done may be deleted so that it is no longer listed as - part of the Operation collection. - - ListOperations returns all pending operations, but not completed operations. - - Operations are created by service DatastoreAdmin, - but are accessed via service google.longrunning.Operations. - """ - - def ExportEntities(self, request, context): - """Exports a copy of all or a subset of entities from Google Cloud Datastore - to another storage system, such as Google Cloud Storage. Recent updates to - entities may not be reflected in the export. The export occurs in the - background and its progress can be monitored and managed via the - Operation resource that is created. The output of an export may only be - used once the associated operation is done. If an export operation is - cancelled before completion it may leave partial data behind in Google - Cloud Storage. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ImportEntities(self, request, context): - """Imports entities into Google Cloud Datastore. Existing entities with the - same key are overwritten. The import occurs in the background and its - progress can be monitored and managed via the Operation resource that is - created. If an ImportEntities operation is cancelled, it is possible - that a subset of the data has already been imported to Cloud Datastore. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetIndex(self, request, context): - """Gets an index. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListIndexes(self, request, context): - """Lists the indexes that match the specified filters. Datastore uses an - eventually consistent query to fetch the list of indexes and may - occasionally return stale results. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_DatastoreAdminServicer_to_server(servicer, server): - rpc_method_handlers = { - "ExportEntities": grpc.unary_unary_rpc_method_handler( - servicer.ExportEntities, - request_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ExportEntitiesRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - "ImportEntities": grpc.unary_unary_rpc_method_handler( - servicer.ImportEntities, - request_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ImportEntitiesRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - "GetIndex": grpc.unary_unary_rpc_method_handler( - servicer.GetIndex, - request_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.GetIndexRequest.FromString, - response_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2.Index.SerializeToString, - ), - "ListIndexes": grpc.unary_unary_rpc_method_handler( - servicer.ListIndexes, - request_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesRequest.FromString, - response_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.datastore.admin.v1.DatastoreAdmin", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) - - -# This class is part of an EXPERIMENTAL API. -class DatastoreAdmin(object): - """Google Cloud Datastore Admin API - - - The Datastore Admin API provides several admin services for Cloud Datastore. - - ----------------------------------------------------------------------------- - ## Concepts - - Project, namespace, kind, and entity as defined in the Google Cloud Datastore - API. - - Operation: An Operation represents work being performed in the background. - - EntityFilter: Allows specifying a subset of entities in a project. This is - specified as a combination of kinds and namespaces (either or both of which - may be all). - - ----------------------------------------------------------------------------- - ## Services - - # Export/Import - - The Export/Import service provides the ability to copy all or a subset of - entities to/from Google Cloud Storage. - - Exported data may be imported into Cloud Datastore for any Google Cloud - Platform project. It is not restricted to the export source project. It is - possible to export from one project and then import into another. - - Exported data can also be loaded into Google BigQuery for analysis. - - Exports and imports are performed asynchronously. An Operation resource is - created for each export/import. The state (including any errors encountered) - of the export/import may be queried via the Operation resource. - - # Index - - The index service manages Cloud Datastore composite indexes. - - Index creation and deletion are performed asynchronously. - An Operation resource is created for each such asynchronous operation. - The state of the operation (including any errors encountered) - may be queried via the Operation resource. - - # Operation - - The Operations collection provides a record of actions performed for the - specified project (including any operations in progress). Operations are not - created directly but through calls on other collections or resources. - - An operation that is not yet done may be cancelled. The request to cancel is - asynchronous and the operation may continue to run for some time after the - request to cancel is made. - - An operation that is done may be deleted so that it is no longer listed as - part of the Operation collection. - - ListOperations returns all pending operations, but not completed operations. - - Operations are created by service DatastoreAdmin, - but are accessed via service google.longrunning.Operations. - """ - - @staticmethod - def ExportEntities( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.datastore.admin.v1.DatastoreAdmin/ExportEntities", - google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ExportEntitiesRequest.SerializeToString, - google_dot_longrunning_dot_operations__pb2.Operation.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ImportEntities( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.datastore.admin.v1.DatastoreAdmin/ImportEntities", - google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ImportEntitiesRequest.SerializeToString, - google_dot_longrunning_dot_operations__pb2.Operation.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetIndex( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.datastore.admin.v1.DatastoreAdmin/GetIndex", - google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.GetIndexRequest.SerializeToString, - google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2.Index.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListIndexes( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.datastore.admin.v1.DatastoreAdmin/ListIndexes", - google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesRequest.SerializeToString, - google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2.py deleted file mode 100644 index c1ccb034c511..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2.py +++ /dev/null @@ -1,430 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/datastore_admin_v1/proto/index.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/datastore_admin_v1/proto/index.proto", - package="google.datastore.admin.v1", - syntax="proto3", - serialized_options=b"\n\035com.google.datastore.admin.v1B\nIndexProtoP\001Z>google.golang.org/genproto/googleapis/datastore/admin/v1;admin\252\002\037Google.Cloud.Datastore.Admin.V1\352\002#Google::Cloud::Datastore::Admin::V1", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n1google/cloud/datastore_admin_v1/proto/index.proto\x12\x19google.datastore.admin.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1cgoogle/api/annotations.proto"\xe6\x04\n\x05Index\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x15\n\x08index_id\x18\x03 \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04kind\x18\x04 \x01(\tB\x03\xe0\x41\x02\x12\x44\n\x08\x61ncestor\x18\x05 \x01(\x0e\x32-.google.datastore.admin.v1.Index.AncestorModeB\x03\xe0\x41\x02\x12I\n\nproperties\x18\x06 \x03(\x0b\x32\x30.google.datastore.admin.v1.Index.IndexedPropertyB\x03\xe0\x41\x02\x12:\n\x05state\x18\x07 \x01(\x0e\x32&.google.datastore.admin.v1.Index.StateB\x03\xe0\x41\x03\x1ah\n\x0fIndexedProperty\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x42\n\tdirection\x18\x02 \x01(\x0e\x32*.google.datastore.admin.v1.Index.DirectionB\x03\xe0\x41\x02"J\n\x0c\x41ncestorMode\x12\x1d\n\x19\x41NCESTOR_MODE_UNSPECIFIED\x10\x00\x12\x08\n\x04NONE\x10\x01\x12\x11\n\rALL_ANCESTORS\x10\x02"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02"P\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\t\n\x05READY\x10\x02\x12\x0c\n\x08\x44\x45LETING\x10\x03\x12\t\n\x05\x45RROR\x10\x04\x42\xb5\x01\n\x1d\x63om.google.datastore.admin.v1B\nIndexProtoP\x01Z>google.golang.org/genproto/googleapis/datastore/admin/v1;admin\xaa\x02\x1fGoogle.Cloud.Datastore.Admin.V1\xea\x02#Google::Cloud::Datastore::Admin::V1b\x06proto3', - dependencies=[ - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_INDEX_ANCESTORMODE = _descriptor.EnumDescriptor( - name="AncestorMode", - full_name="google.datastore.admin.v1.Index.AncestorMode", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="ANCESTOR_MODE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NONE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALL_ANCESTORS", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=531, - serialized_end=605, -) -_sym_db.RegisterEnumDescriptor(_INDEX_ANCESTORMODE) - -_INDEX_DIRECTION = _descriptor.EnumDescriptor( - name="Direction", - full_name="google.datastore.admin.v1.Index.Direction", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="DIRECTION_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ASCENDING", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DESCENDING", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=607, - serialized_end=676, -) -_sym_db.RegisterEnumDescriptor(_INDEX_DIRECTION) - -_INDEX_STATE = _descriptor.EnumDescriptor( - name="State", - full_name="google.datastore.admin.v1.Index.State", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="STATE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CREATING", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="READY", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DELETING", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ERROR", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=678, - serialized_end=758, -) -_sym_db.RegisterEnumDescriptor(_INDEX_STATE) - - -_INDEX_INDEXEDPROPERTY = _descriptor.Descriptor( - name="IndexedProperty", - full_name="google.datastore.admin.v1.Index.IndexedProperty", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.datastore.admin.v1.Index.IndexedProperty.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="direction", - full_name="google.datastore.admin.v1.Index.IndexedProperty.direction", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=425, - serialized_end=529, -) - -_INDEX = _descriptor.Descriptor( - name="Index", - full_name="google.datastore.admin.v1.Index", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.datastore.admin.v1.Index.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="index_id", - full_name="google.datastore.admin.v1.Index.index_id", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="kind", - full_name="google.datastore.admin.v1.Index.kind", - index=2, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ancestor", - full_name="google.datastore.admin.v1.Index.ancestor", - index=3, - number=5, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="properties", - full_name="google.datastore.admin.v1.Index.properties", - index=4, - number=6, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="state", - full_name="google.datastore.admin.v1.Index.state", - index=5, - number=7, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_INDEX_INDEXEDPROPERTY,], - enum_types=[_INDEX_ANCESTORMODE, _INDEX_DIRECTION, _INDEX_STATE,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=144, - serialized_end=758, -) - -_INDEX_INDEXEDPROPERTY.fields_by_name["direction"].enum_type = _INDEX_DIRECTION -_INDEX_INDEXEDPROPERTY.containing_type = _INDEX -_INDEX.fields_by_name["ancestor"].enum_type = _INDEX_ANCESTORMODE -_INDEX.fields_by_name["properties"].message_type = _INDEX_INDEXEDPROPERTY -_INDEX.fields_by_name["state"].enum_type = _INDEX_STATE -_INDEX_ANCESTORMODE.containing_type = _INDEX -_INDEX_DIRECTION.containing_type = _INDEX -_INDEX_STATE.containing_type = _INDEX -DESCRIPTOR.message_types_by_name["Index"] = _INDEX -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Index = _reflection.GeneratedProtocolMessageType( - "Index", - (_message.Message,), - { - "IndexedProperty": _reflection.GeneratedProtocolMessageType( - "IndexedProperty", - (_message.Message,), - { - "DESCRIPTOR": _INDEX_INDEXEDPROPERTY, - "__module__": "google.cloud.datastore_admin_v1.proto.index_pb2", - "__doc__": """A property of an index. - - Attributes: - name: - Required. The property name to index. - direction: - Required. The indexed property’s direction. Must not be - DIRECTION_UNSPECIFIED. - """, - # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.Index.IndexedProperty) - }, - ), - "DESCRIPTOR": _INDEX, - "__module__": "google.cloud.datastore_admin_v1.proto.index_pb2", - "__doc__": """A minimal index definition. - - Attributes: - project_id: - Output only. Project ID. - index_id: - Output only. The resource ID of the index. - kind: - Required. The entity kind to which this index applies. - ancestor: - Required. The index’s ancestor mode. Must not be - ANCESTOR_MODE_UNSPECIFIED. - properties: - Required. An ordered sequence of property names and their - index attributes. - state: - Output only. The state of the index. - """, - # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.Index) - }, -) -_sym_db.RegisterMessage(Index) -_sym_db.RegisterMessage(Index.IndexedProperty) - - -DESCRIPTOR._options = None -_INDEX_INDEXEDPROPERTY.fields_by_name["name"]._options = None -_INDEX_INDEXEDPROPERTY.fields_by_name["direction"]._options = None -_INDEX.fields_by_name["project_id"]._options = None -_INDEX.fields_by_name["index_id"]._options = None -_INDEX.fields_by_name["kind"]._options = None -_INDEX.fields_by_name["ancestor"]._options = None -_INDEX.fields_by_name["properties"]._options = None -_INDEX.fields_by_name["state"]._options = None -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py deleted file mode 100644 index 8a9393943bdf..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/py.typed b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/py.typed new file mode 100644 index 000000000000..dc48a544f252 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-datastore-admin package uses inline types. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py new file mode 100644 index 000000000000..42ffdf2bc43d --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py new file mode 100644 index 000000000000..a004406b5070 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .client import DatastoreAdminClient +from .async_client import DatastoreAdminAsyncClient + +__all__ = ( + "DatastoreAdminClient", + "DatastoreAdminAsyncClient", +) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py new file mode 100644 index 000000000000..fd9589b6d468 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -0,0 +1,564 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation +from google.api_core import operation_async +from google.cloud.datastore_admin_v1.services.datastore_admin import pagers +from google.cloud.datastore_admin_v1.types import datastore_admin +from google.cloud.datastore_admin_v1.types import index +from google.protobuf import empty_pb2 as empty # type: ignore + +from .transports.base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DatastoreAdminGrpcAsyncIOTransport +from .client import DatastoreAdminClient + + +class DatastoreAdminAsyncClient: + """Google Cloud Datastore Admin API + The Datastore Admin API provides several admin services for + Cloud Datastore. + ----------------------------------------------------------------------------- + ## Concepts + + Project, namespace, kind, and entity as defined in the Google + Cloud Datastore API. + + Operation: An Operation represents work being performed in the + background. + EntityFilter: Allows specifying a subset of entities in a + project. This is specified as a combination of kinds and + namespaces (either or both of which may be all). + + ----------------------------------------------------------------------------- + ## Services + + # Export/Import + + The Export/Import service provides the ability to copy all or a + subset of entities to/from Google Cloud Storage. + + Exported data may be imported into Cloud Datastore for any + Google Cloud Platform project. It is not restricted to the + export source project. It is possible to export from one project + and then import into another. + Exported data can also be loaded into Google BigQuery for + analysis. + Exports and imports are performed asynchronously. An Operation + resource is created for each export/import. The state (including + any errors encountered) of the export/import may be queried via + the Operation resource. + # Index + + The index service manages Cloud Datastore composite indexes. + Index creation and deletion are performed asynchronously. An + Operation resource is created for each such asynchronous + operation. The state of the operation (including any errors + encountered) may be queried via the Operation resource. + + # Operation + + The Operations collection provides a record of actions performed + for the specified project (including any operations in + progress). Operations are not created directly but through calls + on other collections or resources. + An operation that is not yet done may be cancelled. The request + to cancel is asynchronous and the operation may continue to run + for some time after the request to cancel is made. + + An operation that is done may be deleted so that it is no longer + listed as part of the Operation collection. + + ListOperations returns all pending operations, but not completed + operations. + Operations are created by service DatastoreAdmin, + but are accessed via service google.longrunning.Operations. + """ + + _client: DatastoreAdminClient + + DEFAULT_ENDPOINT = DatastoreAdminClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DatastoreAdminClient.DEFAULT_MTLS_ENDPOINT + + from_service_account_file = DatastoreAdminClient.from_service_account_file + from_service_account_json = from_service_account_file + + get_transport_class = functools.partial( + type(DatastoreAdminClient).get_transport_class, type(DatastoreAdminClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, DatastoreAdminTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the datastore admin client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DatastoreAdminTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = DatastoreAdminClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def export_entities( + self, + request: datastore_admin.ExportEntitiesRequest = None, + *, + project_id: str = None, + labels: Sequence[datastore_admin.ExportEntitiesRequest.LabelsEntry] = None, + entity_filter: datastore_admin.EntityFilter = None, + output_url_prefix: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports a copy of all or a subset of entities from + Google Cloud Datastore to another storage system, such + as Google Cloud Storage. Recent updates to entities may + not be reflected in the export. The export occurs in the + background and its progress can be monitored and managed + via the Operation resource that is created. The output + of an export may only be used once the associated + operation is done. If an export operation is cancelled + before completion it may leave partial data behind in + Google Cloud Storage. + + Args: + request (:class:`~.datastore_admin.ExportEntitiesRequest`): + The request object. The request for + [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. + project_id (:class:`str`): + Required. Project ID against which to + make the request. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + labels (:class:`Sequence[~.datastore_admin.ExportEntitiesRequest.LabelsEntry]`): + Client-assigned labels. + This corresponds to the ``labels`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entity_filter (:class:`~.datastore_admin.EntityFilter`): + Description of what data from the + project is included in the export. + This corresponds to the ``entity_filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + output_url_prefix (:class:`str`): + Required. Location for the export metadata and data + files. + + The full resource URL of the external storage location. + Currently, only Google Cloud Storage is supported. So + output_url_prefix should be of the form: + ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, where + ``BUCKET_NAME`` is the name of the Cloud Storage bucket + and ``NAMESPACE_PATH`` is an optional Cloud Storage + namespace path (this is not a Cloud Datastore + namespace). For more information about Cloud Storage + namespace paths, see `Object name + considerations `__. + + The resulting files will be nested deeper than the + specified URL prefix. The final output URL will be + provided in the + [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url] + field. That value should be used for subsequent + ImportEntities operations. + + By nesting the data files deeper, the same Cloud Storage + bucket can be used in multiple ExportEntities operations + without conflict. + This corresponds to the ``output_url_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.datastore_admin.ExportEntitiesResponse``: The + response for + [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any( + [project_id, labels, entity_filter, output_url_prefix] + ): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datastore_admin.ExportEntitiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project_id is not None: + request.project_id = project_id + if labels is not None: + request.labels = labels + if entity_filter is not None: + request.entity_filter = entity_filter + if output_url_prefix is not None: + request.output_url_prefix = output_url_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_entities, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + datastore_admin.ExportEntitiesResponse, + metadata_type=datastore_admin.ExportEntitiesMetadata, + ) + + # Done; return the response. + return response + + async def import_entities( + self, + request: datastore_admin.ImportEntitiesRequest = None, + *, + project_id: str = None, + labels: Sequence[datastore_admin.ImportEntitiesRequest.LabelsEntry] = None, + input_url: str = None, + entity_filter: datastore_admin.EntityFilter = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports entities into Google Cloud Datastore. + Existing entities with the same key are overwritten. The + import occurs in the background and its progress can be + monitored and managed via the Operation resource that is + created. If an ImportEntities operation is cancelled, it + is possible that a subset of the data has already been + imported to Cloud Datastore. + + Args: + request (:class:`~.datastore_admin.ImportEntitiesRequest`): + The request object. The request for + [google.datastore.admin.v1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. + project_id (:class:`str`): + Required. Project ID against which to + make the request. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + labels (:class:`Sequence[~.datastore_admin.ImportEntitiesRequest.LabelsEntry]`): + Client-assigned labels. + This corresponds to the ``labels`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + input_url (:class:`str`): + Required. The full resource URL of the external storage + location. Currently, only Google Cloud Storage is + supported. So input_url should be of the form: + ``gs://BUCKET_NAME[/NAMESPACE_PATH]/OVERALL_EXPORT_METADATA_FILE``, + where ``BUCKET_NAME`` is the name of the Cloud Storage + bucket, ``NAMESPACE_PATH`` is an optional Cloud Storage + namespace path (this is not a Cloud Datastore + namespace), and ``OVERALL_EXPORT_METADATA_FILE`` is the + metadata file written by the ExportEntities operation. + For more information about Cloud Storage namespace + paths, see `Object name + considerations `__. + + For more information, see + [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. + This corresponds to the ``input_url`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entity_filter (:class:`~.datastore_admin.EntityFilter`): + Optionally specify which kinds/namespaces are to be + imported. If provided, the list must be a subset of the + EntityFilter used in creating the export, otherwise a + FAILED_PRECONDITION error will be returned. If no filter + is specified then all entities from the export are + imported. + This corresponds to the ``entity_filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.empty.Empty``: A generic empty message that + you can re-use to avoid defining duplicated empty + messages in your APIs. A typical example is to use it as + the request or the response type of an API method. For + instance: + + :: + + service Foo { + rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); + } + + The JSON representation for ``Empty`` is empty JSON + object ``{}``. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([project_id, labels, input_url, entity_filter]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datastore_admin.ImportEntitiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project_id is not None: + request.project_id = project_id + if labels is not None: + request.labels = labels + if input_url is not None: + request.input_url = input_url + if entity_filter is not None: + request.entity_filter = entity_filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_entities, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty.Empty, + metadata_type=datastore_admin.ImportEntitiesMetadata, + ) + + # Done; return the response. + return response + + async def get_index( + self, + request: datastore_admin.GetIndexRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> index.Index: + r"""Gets an index. + + Args: + request (:class:`~.datastore_admin.GetIndexRequest`): + The request object. The request for + [google.datastore.admin.v1.DatastoreAdmin.GetIndex][google.datastore.admin.v1.DatastoreAdmin.GetIndex]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.index.Index: + A minimal index definition. + """ + # Create or coerce a protobuf request object. + + request = datastore_admin.GetIndexRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_index, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_indexes( + self, + request: datastore_admin.ListIndexesRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListIndexesAsyncPager: + r"""Lists the indexes that match the specified filters. + Datastore uses an eventually consistent query to fetch + the list of indexes and may occasionally return stale + results. + + Args: + request (:class:`~.datastore_admin.ListIndexesRequest`): + The request object. The request for + [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListIndexesAsyncPager: + The response for + [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + + request = datastore_admin.ListIndexesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_indexes, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListIndexesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-datastore-admin", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("DatastoreAdminAsyncClient",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py new file mode 100644 index 000000000000..0ebed21e0971 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -0,0 +1,700 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation +from google.api_core import operation_async +from google.cloud.datastore_admin_v1.services.datastore_admin import pagers +from google.cloud.datastore_admin_v1.types import datastore_admin +from google.cloud.datastore_admin_v1.types import index +from google.protobuf import empty_pb2 as empty # type: ignore + +from .transports.base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DatastoreAdminGrpcTransport +from .transports.grpc_asyncio import DatastoreAdminGrpcAsyncIOTransport + + +class DatastoreAdminClientMeta(type): + """Metaclass for the DatastoreAdmin client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[DatastoreAdminTransport]] + _transport_registry["grpc"] = DatastoreAdminGrpcTransport + _transport_registry["grpc_asyncio"] = DatastoreAdminGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[DatastoreAdminTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DatastoreAdminClient(metaclass=DatastoreAdminClientMeta): + """Google Cloud Datastore Admin API + The Datastore Admin API provides several admin services for + Cloud Datastore. + ----------------------------------------------------------------------------- + ## Concepts + + Project, namespace, kind, and entity as defined in the Google + Cloud Datastore API. + + Operation: An Operation represents work being performed in the + background. + EntityFilter: Allows specifying a subset of entities in a + project. This is specified as a combination of kinds and + namespaces (either or both of which may be all). + + ## Services + + # Export/Import + + The Export/Import service provides the ability to copy all or a + subset of entities to/from Google Cloud Storage. + + Exported data may be imported into Cloud Datastore for any + Google Cloud Platform project. It is not restricted to the + export source project. It is possible to export from one project + and then import into another. + Exported data can also be loaded into Google BigQuery for + analysis. + Exports and imports are performed asynchronously. An Operation + resource is created for each export/import. The state (including + any errors encountered) of the export/import may be queried via + the Operation resource. + # Index + + The index service manages Cloud Datastore composite indexes. + Index creation and deletion are performed asynchronously. An + Operation resource is created for each such asynchronous + operation. The state of the operation (including any errors + encountered) may be queried via the Operation resource. + + # Operation + + The Operations collection provides a record of actions performed + for the specified project (including any operations in + progress). Operations are not created directly but through calls + on other collections or resources. + An operation that is not yet done may be cancelled. The request + to cancel is asynchronous and the operation may continue to run + for some time after the request to cancel is made. + + An operation that is done may be deleted so that it is no longer + listed as part of the Operation collection. + + ListOperations returns all pending operations, but not completed + operations. + Operations are created by service DatastoreAdmin, + but are accessed via service google.longrunning.Operations. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "datastore.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, DatastoreAdminTransport] = None, + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the datastore admin client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DatastoreAdminTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = ClientOptions.from_dict(client_options) + if client_options is None: + client_options = ClientOptions.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DatastoreAdminTransport): + # transport is a DatastoreAdminTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + ssl_channel_credentials=ssl_credentials, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def export_entities( + self, + request: datastore_admin.ExportEntitiesRequest = None, + *, + project_id: str = None, + labels: Sequence[datastore_admin.ExportEntitiesRequest.LabelsEntry] = None, + entity_filter: datastore_admin.EntityFilter = None, + output_url_prefix: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Exports a copy of all or a subset of entities from + Google Cloud Datastore to another storage system, such + as Google Cloud Storage. Recent updates to entities may + not be reflected in the export. The export occurs in the + background and its progress can be monitored and managed + via the Operation resource that is created. The output + of an export may only be used once the associated + operation is done. If an export operation is cancelled + before completion it may leave partial data behind in + Google Cloud Storage. + + Args: + request (:class:`~.datastore_admin.ExportEntitiesRequest`): + The request object. The request for + [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. + project_id (:class:`str`): + Required. Project ID against which to + make the request. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + labels (:class:`Sequence[~.datastore_admin.ExportEntitiesRequest.LabelsEntry]`): + Client-assigned labels. + This corresponds to the ``labels`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entity_filter (:class:`~.datastore_admin.EntityFilter`): + Description of what data from the + project is included in the export. + This corresponds to the ``entity_filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + output_url_prefix (:class:`str`): + Required. Location for the export metadata and data + files. + + The full resource URL of the external storage location. + Currently, only Google Cloud Storage is supported. So + output_url_prefix should be of the form: + ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, where + ``BUCKET_NAME`` is the name of the Cloud Storage bucket + and ``NAMESPACE_PATH`` is an optional Cloud Storage + namespace path (this is not a Cloud Datastore + namespace). For more information about Cloud Storage + namespace paths, see `Object name + considerations `__. + + The resulting files will be nested deeper than the + specified URL prefix. The final output URL will be + provided in the + [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url] + field. That value should be used for subsequent + ImportEntities operations. + + By nesting the data files deeper, the same Cloud Storage + bucket can be used in multiple ExportEntities operations + without conflict. + This corresponds to the ``output_url_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.datastore_admin.ExportEntitiesResponse``: The + response for + [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project_id, labels, entity_filter, output_url_prefix] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datastore_admin.ExportEntitiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datastore_admin.ExportEntitiesRequest): + request = datastore_admin.ExportEntitiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project_id is not None: + request.project_id = project_id + if labels is not None: + request.labels = labels + if entity_filter is not None: + request.entity_filter = entity_filter + if output_url_prefix is not None: + request.output_url_prefix = output_url_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_entities] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + datastore_admin.ExportEntitiesResponse, + metadata_type=datastore_admin.ExportEntitiesMetadata, + ) + + # Done; return the response. + return response + + def import_entities( + self, + request: datastore_admin.ImportEntitiesRequest = None, + *, + project_id: str = None, + labels: Sequence[datastore_admin.ImportEntitiesRequest.LabelsEntry] = None, + input_url: str = None, + entity_filter: datastore_admin.EntityFilter = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports entities into Google Cloud Datastore. + Existing entities with the same key are overwritten. The + import occurs in the background and its progress can be + monitored and managed via the Operation resource that is + created. If an ImportEntities operation is cancelled, it + is possible that a subset of the data has already been + imported to Cloud Datastore. + + Args: + request (:class:`~.datastore_admin.ImportEntitiesRequest`): + The request object. The request for + [google.datastore.admin.v1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. + project_id (:class:`str`): + Required. Project ID against which to + make the request. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + labels (:class:`Sequence[~.datastore_admin.ImportEntitiesRequest.LabelsEntry]`): + Client-assigned labels. + This corresponds to the ``labels`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + input_url (:class:`str`): + Required. The full resource URL of the external storage + location. Currently, only Google Cloud Storage is + supported. So input_url should be of the form: + ``gs://BUCKET_NAME[/NAMESPACE_PATH]/OVERALL_EXPORT_METADATA_FILE``, + where ``BUCKET_NAME`` is the name of the Cloud Storage + bucket, ``NAMESPACE_PATH`` is an optional Cloud Storage + namespace path (this is not a Cloud Datastore + namespace), and ``OVERALL_EXPORT_METADATA_FILE`` is the + metadata file written by the ExportEntities operation. + For more information about Cloud Storage namespace + paths, see `Object name + considerations `__. + + For more information, see + [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. + This corresponds to the ``input_url`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entity_filter (:class:`~.datastore_admin.EntityFilter`): + Optionally specify which kinds/namespaces are to be + imported. If provided, the list must be a subset of the + EntityFilter used in creating the export, otherwise a + FAILED_PRECONDITION error will be returned. If no filter + is specified then all entities from the export are + imported. + This corresponds to the ``entity_filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.empty.Empty``: A generic empty message that + you can re-use to avoid defining duplicated empty + messages in your APIs. A typical example is to use it as + the request or the response type of an API method. For + instance: + + :: + + service Foo { + rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); + } + + The JSON representation for ``Empty`` is empty JSON + object ``{}``. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, labels, input_url, entity_filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datastore_admin.ImportEntitiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datastore_admin.ImportEntitiesRequest): + request = datastore_admin.ImportEntitiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project_id is not None: + request.project_id = project_id + if labels is not None: + request.labels = labels + if input_url is not None: + request.input_url = input_url + if entity_filter is not None: + request.entity_filter = entity_filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_entities] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty.Empty, + metadata_type=datastore_admin.ImportEntitiesMetadata, + ) + + # Done; return the response. + return response + + def get_index( + self, + request: datastore_admin.GetIndexRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> index.Index: + r"""Gets an index. + + Args: + request (:class:`~.datastore_admin.GetIndexRequest`): + The request object. The request for + [google.datastore.admin.v1.DatastoreAdmin.GetIndex][google.datastore.admin.v1.DatastoreAdmin.GetIndex]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.index.Index: + A minimal index definition. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a datastore_admin.GetIndexRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datastore_admin.GetIndexRequest): + request = datastore_admin.GetIndexRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_index] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_indexes( + self, + request: datastore_admin.ListIndexesRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListIndexesPager: + r"""Lists the indexes that match the specified filters. + Datastore uses an eventually consistent query to fetch + the list of indexes and may occasionally return stale + results. + + Args: + request (:class:`~.datastore_admin.ListIndexesRequest`): + The request object. The request for + [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListIndexesPager: + The response for + [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a datastore_admin.ListIndexesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datastore_admin.ListIndexesRequest): + request = datastore_admin.ListIndexesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_indexes] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListIndexesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-datastore-admin", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("DatastoreAdminClient",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py new file mode 100644 index 000000000000..7c176fce7a25 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple + +from google.cloud.datastore_admin_v1.types import datastore_admin +from google.cloud.datastore_admin_v1.types import index + + +class ListIndexesPager: + """A pager for iterating through ``list_indexes`` requests. + + This class thinly wraps an initial + :class:`~.datastore_admin.ListIndexesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``indexes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListIndexes`` requests and continue to iterate + through the ``indexes`` field on the + corresponding responses. + + All the usual :class:`~.datastore_admin.ListIndexesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., datastore_admin.ListIndexesResponse], + request: datastore_admin.ListIndexesRequest, + response: datastore_admin.ListIndexesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.datastore_admin.ListIndexesRequest`): + The initial request object. + response (:class:`~.datastore_admin.ListIndexesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datastore_admin.ListIndexesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[datastore_admin.ListIndexesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[index.Index]: + for page in self.pages: + yield from page.indexes + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListIndexesAsyncPager: + """A pager for iterating through ``list_indexes`` requests. + + This class thinly wraps an initial + :class:`~.datastore_admin.ListIndexesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``indexes`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListIndexes`` requests and continue to iterate + through the ``indexes`` field on the + corresponding responses. + + All the usual :class:`~.datastore_admin.ListIndexesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[datastore_admin.ListIndexesResponse]], + request: datastore_admin.ListIndexesRequest, + response: datastore_admin.ListIndexesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.datastore_admin.ListIndexesRequest`): + The initial request object. + response (:class:`~.datastore_admin.ListIndexesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datastore_admin.ListIndexesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[datastore_admin.ListIndexesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[index.Index]: + async def async_generator(): + async for page in self.pages: + for response in page.indexes: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py new file mode 100644 index 000000000000..41b72bc31b4f --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import DatastoreAdminTransport +from .grpc import DatastoreAdminGrpcTransport +from .grpc_asyncio import DatastoreAdminGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DatastoreAdminTransport]] +_transport_registry["grpc"] = DatastoreAdminGrpcTransport +_transport_registry["grpc_asyncio"] = DatastoreAdminGrpcAsyncIOTransport + + +__all__ = ( + "DatastoreAdminTransport", + "DatastoreAdminGrpcTransport", + "DatastoreAdminGrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py new file mode 100644 index 000000000000..6049d54623fd --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -0,0 +1,194 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.datastore_admin_v1.types import datastore_admin +from google.cloud.datastore_admin_v1.types import index +from google.longrunning import operations_pb2 as operations # type: ignore + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-datastore-admin", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class DatastoreAdminTransport(abc.ABC): + """Abstract transport class for DatastoreAdmin.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ) + + def __init__( + self, + *, + host: str = "datastore.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.export_entities: gapic_v1.method.wrap_method( + self.export_entities, default_timeout=60.0, client_info=client_info, + ), + self.import_entities: gapic_v1.method.wrap_method( + self.import_entities, default_timeout=60.0, client_info=client_info, + ), + self.get_index: gapic_v1.method.wrap_method( + self.get_index, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_indexes: gapic_v1.method.wrap_method( + self.list_indexes, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def export_entities( + self, + ) -> typing.Callable[ + [datastore_admin.ExportEntitiesRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + @property + def import_entities( + self, + ) -> typing.Callable[ + [datastore_admin.ImportEntitiesRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + @property + def get_index( + self, + ) -> typing.Callable[ + [datastore_admin.GetIndexRequest], + typing.Union[index.Index, typing.Awaitable[index.Index]], + ]: + raise NotImplementedError() + + @property + def list_indexes( + self, + ) -> typing.Callable[ + [datastore_admin.ListIndexesRequest], + typing.Union[ + datastore_admin.ListIndexesResponse, + typing.Awaitable[datastore_admin.ListIndexesResponse], + ], + ]: + raise NotImplementedError() + + +__all__ = ("DatastoreAdminTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py new file mode 100644 index 000000000000..b478b75ad331 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -0,0 +1,431 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.datastore_admin_v1.types import datastore_admin +from google.cloud.datastore_admin_v1.types import index +from google.longrunning import operations_pb2 as operations # type: ignore + +from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO + + +class DatastoreAdminGrpcTransport(DatastoreAdminTransport): + """gRPC backend transport for DatastoreAdmin. + + Google Cloud Datastore Admin API + The Datastore Admin API provides several admin services for + Cloud Datastore. + ----------------------------------------------------------------------------- + ## Concepts + + Project, namespace, kind, and entity as defined in the Google + Cloud Datastore API. + + Operation: An Operation represents work being performed in the + background. + EntityFilter: Allows specifying a subset of entities in a + project. This is specified as a combination of kinds and + namespaces (either or both of which may be all). + + ----------------------------------------------------------------------------- + ## Services + + # Export/Import + + The Export/Import service provides the ability to copy all or a + subset of entities to/from Google Cloud Storage. + + Exported data may be imported into Cloud Datastore for any + Google Cloud Platform project. It is not restricted to the + export source project. It is possible to export from one project + and then import into another. + Exported data can also be loaded into Google BigQuery for + analysis. + Exports and imports are performed asynchronously. An Operation + resource is created for each export/import. The state (including + any errors encountered) of the export/import may be queried via + the Operation resource. + # Index + + The index service manages Cloud Datastore composite indexes. + Index creation and deletion are performed asynchronously. An + Operation resource is created for each such asynchronous + operation. The state of the operation (including any errors + encountered) may be queried via the Operation resource. + + # Operation + + The Operations collection provides a record of actions performed + for the specified project (including any operations in + progress). Operations are not created directly but through calls + on other collections or resources. + An operation that is not yet done may be cancelled. The request + to cancel is asynchronous and the operation may continue to run + for some time after the request to cancel is made. + + An operation that is done may be deleted so that it is no longer + listed as part of the Operation collection. + + ListOperations returns all pending operations, but not completed + operations. + Operations are created by service DatastoreAdmin, + but are accessed via service google.longrunning.Operations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "datastore.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + @classmethod + def create_channel( + cls, + host: str = "datastore.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if "operations_client" not in self.__dict__: + self.__dict__["operations_client"] = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self.__dict__["operations_client"] + + @property + def export_entities( + self, + ) -> Callable[[datastore_admin.ExportEntitiesRequest], operations.Operation]: + r"""Return a callable for the export entities method over gRPC. + + Exports a copy of all or a subset of entities from + Google Cloud Datastore to another storage system, such + as Google Cloud Storage. Recent updates to entities may + not be reflected in the export. The export occurs in the + background and its progress can be monitored and managed + via the Operation resource that is created. The output + of an export may only be used once the associated + operation is done. If an export operation is cancelled + before completion it may leave partial data behind in + Google Cloud Storage. + + Returns: + Callable[[~.ExportEntitiesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_entities" not in self._stubs: + self._stubs["export_entities"] = self.grpc_channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/ExportEntities", + request_serializer=datastore_admin.ExportEntitiesRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["export_entities"] + + @property + def import_entities( + self, + ) -> Callable[[datastore_admin.ImportEntitiesRequest], operations.Operation]: + r"""Return a callable for the import entities method over gRPC. + + Imports entities into Google Cloud Datastore. + Existing entities with the same key are overwritten. The + import occurs in the background and its progress can be + monitored and managed via the Operation resource that is + created. If an ImportEntities operation is cancelled, it + is possible that a subset of the data has already been + imported to Cloud Datastore. + + Returns: + Callable[[~.ImportEntitiesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_entities" not in self._stubs: + self._stubs["import_entities"] = self.grpc_channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/ImportEntities", + request_serializer=datastore_admin.ImportEntitiesRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["import_entities"] + + @property + def get_index(self) -> Callable[[datastore_admin.GetIndexRequest], index.Index]: + r"""Return a callable for the get index method over gRPC. + + Gets an index. + + Returns: + Callable[[~.GetIndexRequest], + ~.Index]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_index" not in self._stubs: + self._stubs["get_index"] = self.grpc_channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/GetIndex", + request_serializer=datastore_admin.GetIndexRequest.serialize, + response_deserializer=index.Index.deserialize, + ) + return self._stubs["get_index"] + + @property + def list_indexes( + self, + ) -> Callable[ + [datastore_admin.ListIndexesRequest], datastore_admin.ListIndexesResponse + ]: + r"""Return a callable for the list indexes method over gRPC. + + Lists the indexes that match the specified filters. + Datastore uses an eventually consistent query to fetch + the list of indexes and may occasionally return stale + results. + + Returns: + Callable[[~.ListIndexesRequest], + ~.ListIndexesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_indexes" not in self._stubs: + self._stubs["list_indexes"] = self.grpc_channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/ListIndexes", + request_serializer=datastore_admin.ListIndexesRequest.serialize, + response_deserializer=datastore_admin.ListIndexesResponse.deserialize, + ) + return self._stubs["list_indexes"] + + +__all__ = ("DatastoreAdminGrpcTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py new file mode 100644 index 000000000000..f80c7da93006 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -0,0 +1,438 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import operations_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.datastore_admin_v1.types import datastore_admin +from google.cloud.datastore_admin_v1.types import index +from google.longrunning import operations_pb2 as operations # type: ignore + +from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO +from .grpc import DatastoreAdminGrpcTransport + + +class DatastoreAdminGrpcAsyncIOTransport(DatastoreAdminTransport): + """gRPC AsyncIO backend transport for DatastoreAdmin. + + Google Cloud Datastore Admin API + The Datastore Admin API provides several admin services for + Cloud Datastore. + ----------------------------------------------------------------------------- + ## Concepts + + Project, namespace, kind, and entity as defined in the Google + Cloud Datastore API. + + Operation: An Operation represents work being performed in the + background. + EntityFilter: Allows specifying a subset of entities in a + project. This is specified as a combination of kinds and + namespaces (either or both of which may be all). + + ----------------------------------------------------------------------------- + ## Services + + # Export/Import + + The Export/Import service provides the ability to copy all or a + subset of entities to/from Google Cloud Storage. + + Exported data may be imported into Cloud Datastore for any + Google Cloud Platform project. It is not restricted to the + export source project. It is possible to export from one project + and then import into another. + Exported data can also be loaded into Google BigQuery for + analysis. + Exports and imports are performed asynchronously. An Operation + resource is created for each export/import. The state (including + any errors encountered) of the export/import may be queried via + the Operation resource. + # Index + + The index service manages Cloud Datastore composite indexes. + Index creation and deletion are performed asynchronously. An + Operation resource is created for each such asynchronous + operation. The state of the operation (including any errors + encountered) may be queried via the Operation resource. + + # Operation + + The Operations collection provides a record of actions performed + for the specified project (including any operations in + progress). Operations are not created directly but through calls + on other collections or resources. + An operation that is not yet done may be cancelled. The request + to cancel is asynchronous and the operation may continue to run + for some time after the request to cancel is made. + + An operation that is done may be deleted so that it is no longer + listed as part of the Operation collection. + + ListOperations returns all pending operations, but not completed + operations. + Operations are created by service DatastoreAdmin, + but are accessed via service google.longrunning.Operations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "datastore.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "datastore.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if "operations_client" not in self.__dict__: + self.__dict__["operations_client"] = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self.__dict__["operations_client"] + + @property + def export_entities( + self, + ) -> Callable[ + [datastore_admin.ExportEntitiesRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the export entities method over gRPC. + + Exports a copy of all or a subset of entities from + Google Cloud Datastore to another storage system, such + as Google Cloud Storage. Recent updates to entities may + not be reflected in the export. The export occurs in the + background and its progress can be monitored and managed + via the Operation resource that is created. The output + of an export may only be used once the associated + operation is done. If an export operation is cancelled + before completion it may leave partial data behind in + Google Cloud Storage. + + Returns: + Callable[[~.ExportEntitiesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_entities" not in self._stubs: + self._stubs["export_entities"] = self.grpc_channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/ExportEntities", + request_serializer=datastore_admin.ExportEntitiesRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["export_entities"] + + @property + def import_entities( + self, + ) -> Callable[ + [datastore_admin.ImportEntitiesRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the import entities method over gRPC. + + Imports entities into Google Cloud Datastore. + Existing entities with the same key are overwritten. The + import occurs in the background and its progress can be + monitored and managed via the Operation resource that is + created. If an ImportEntities operation is cancelled, it + is possible that a subset of the data has already been + imported to Cloud Datastore. + + Returns: + Callable[[~.ImportEntitiesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_entities" not in self._stubs: + self._stubs["import_entities"] = self.grpc_channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/ImportEntities", + request_serializer=datastore_admin.ImportEntitiesRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["import_entities"] + + @property + def get_index( + self, + ) -> Callable[[datastore_admin.GetIndexRequest], Awaitable[index.Index]]: + r"""Return a callable for the get index method over gRPC. + + Gets an index. + + Returns: + Callable[[~.GetIndexRequest], + Awaitable[~.Index]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_index" not in self._stubs: + self._stubs["get_index"] = self.grpc_channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/GetIndex", + request_serializer=datastore_admin.GetIndexRequest.serialize, + response_deserializer=index.Index.deserialize, + ) + return self._stubs["get_index"] + + @property + def list_indexes( + self, + ) -> Callable[ + [datastore_admin.ListIndexesRequest], + Awaitable[datastore_admin.ListIndexesResponse], + ]: + r"""Return a callable for the list indexes method over gRPC. + + Lists the indexes that match the specified filters. + Datastore uses an eventually consistent query to fetch + the list of indexes and may occasionally return stale + results. + + Returns: + Callable[[~.ListIndexesRequest], + Awaitable[~.ListIndexesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_indexes" not in self._stubs: + self._stubs["list_indexes"] = self.grpc_channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/ListIndexes", + request_serializer=datastore_admin.ListIndexesRequest.serialize, + response_deserializer=datastore_admin.ListIndexesResponse.deserialize, + ) + return self._stubs["list_indexes"] + + +__all__ = ("DatastoreAdminGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types.py deleted file mode 100644 index 17ae2d27e8cb..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys - -from google.api_core.protobuf_helpers import get_messages - -from google.cloud.datastore_admin_v1.proto import datastore_admin_pb2 -from google.cloud.datastore_admin_v1.proto import index_pb2 -from google.longrunning import operations_pb2 -from google.protobuf import any_pb2 -from google.protobuf import timestamp_pb2 -from google.rpc import status_pb2 - - -_shared_modules = [ - operations_pb2, - any_pb2, - timestamp_pb2, - status_pb2, -] - -_local_modules = [ - datastore_admin_pb2, - index_pb2, -] - -names = [] - -for module in _shared_modules: # pragma: NO COVER - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.datastore_admin_v1.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - - -__all__ = tuple(sorted(names)) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py new file mode 100644 index 000000000000..b3bf63d88309 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .index import Index +from .datastore_admin import ( + CommonMetadata, + Progress, + ExportEntitiesRequest, + ImportEntitiesRequest, + ExportEntitiesResponse, + ExportEntitiesMetadata, + ImportEntitiesMetadata, + EntityFilter, + GetIndexRequest, + ListIndexesRequest, + ListIndexesResponse, + IndexOperationMetadata, +) + + +__all__ = ( + "Index", + "CommonMetadata", + "Progress", + "ExportEntitiesRequest", + "ImportEntitiesRequest", + "ExportEntitiesResponse", + "ExportEntitiesMetadata", + "ImportEntitiesMetadata", + "EntityFilter", + "GetIndexRequest", + "ListIndexesRequest", + "ListIndexesResponse", + "IndexOperationMetadata", +) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py new file mode 100644 index 000000000000..8f60bfe16b0f --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -0,0 +1,408 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.datastore_admin_v1.types import index +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="google.datastore.admin.v1", + manifest={ + "OperationType", + "CommonMetadata", + "Progress", + "ExportEntitiesRequest", + "ImportEntitiesRequest", + "ExportEntitiesResponse", + "ExportEntitiesMetadata", + "ImportEntitiesMetadata", + "EntityFilter", + "GetIndexRequest", + "ListIndexesRequest", + "ListIndexesResponse", + "IndexOperationMetadata", + }, +) + + +class OperationType(proto.Enum): + r"""Operation types.""" + OPERATION_TYPE_UNSPECIFIED = 0 + EXPORT_ENTITIES = 1 + IMPORT_ENTITIES = 2 + CREATE_INDEX = 3 + DELETE_INDEX = 4 + + +class CommonMetadata(proto.Message): + r"""Metadata common to all Datastore Admin operations. + + Attributes: + start_time (~.timestamp.Timestamp): + The time that work began on the operation. + end_time (~.timestamp.Timestamp): + The time the operation ended, either + successfully or otherwise. + operation_type (~.datastore_admin.OperationType): + The type of the operation. Can be used as a + filter in ListOperationsRequest. + labels (Sequence[~.datastore_admin.CommonMetadata.LabelsEntry]): + The client-assigned labels which were + provided when the operation was created. May + also include additional labels. + state (~.datastore_admin.CommonMetadata.State): + The current state of the Operation. + """ + + class State(proto.Enum): + r"""The various possible states for an ongoing Operation.""" + STATE_UNSPECIFIED = 0 + INITIALIZING = 1 + PROCESSING = 2 + CANCELLING = 3 + FINALIZING = 4 + SUCCESSFUL = 5 + FAILED = 6 + CANCELLED = 7 + + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) + + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + operation_type = proto.Field(proto.ENUM, number=3, enum="OperationType",) + + labels = proto.MapField(proto.STRING, proto.STRING, number=4) + + state = proto.Field(proto.ENUM, number=5, enum=State,) + + +class Progress(proto.Message): + r"""Measures the progress of a particular metric. + + Attributes: + work_completed (int): + The amount of work that has been completed. Note that this + may be greater than work_estimated. + work_estimated (int): + An estimate of how much work needs to be + performed. May be zero if the work estimate is + unavailable. + """ + + work_completed = proto.Field(proto.INT64, number=1) + + work_estimated = proto.Field(proto.INT64, number=2) + + +class ExportEntitiesRequest(proto.Message): + r"""The request for + [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. + + Attributes: + project_id (str): + Required. Project ID against which to make + the request. + labels (Sequence[~.datastore_admin.ExportEntitiesRequest.LabelsEntry]): + Client-assigned labels. + entity_filter (~.datastore_admin.EntityFilter): + Description of what data from the project is + included in the export. + output_url_prefix (str): + Required. Location for the export metadata and data files. + + The full resource URL of the external storage location. + Currently, only Google Cloud Storage is supported. So + output_url_prefix should be of the form: + ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, where ``BUCKET_NAME`` + is the name of the Cloud Storage bucket and + ``NAMESPACE_PATH`` is an optional Cloud Storage namespace + path (this is not a Cloud Datastore namespace). For more + information about Cloud Storage namespace paths, see `Object + name + considerations `__. + + The resulting files will be nested deeper than the specified + URL prefix. The final output URL will be provided in the + [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url] + field. That value should be used for subsequent + ImportEntities operations. + + By nesting the data files deeper, the same Cloud Storage + bucket can be used in multiple ExportEntities operations + without conflict. + """ + + project_id = proto.Field(proto.STRING, number=1) + + labels = proto.MapField(proto.STRING, proto.STRING, number=2) + + entity_filter = proto.Field(proto.MESSAGE, number=3, message="EntityFilter",) + + output_url_prefix = proto.Field(proto.STRING, number=4) + + +class ImportEntitiesRequest(proto.Message): + r"""The request for + [google.datastore.admin.v1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. + + Attributes: + project_id (str): + Required. Project ID against which to make + the request. + labels (Sequence[~.datastore_admin.ImportEntitiesRequest.LabelsEntry]): + Client-assigned labels. + input_url (str): + Required. The full resource URL of the external storage + location. Currently, only Google Cloud Storage is supported. + So input_url should be of the form: + ``gs://BUCKET_NAME[/NAMESPACE_PATH]/OVERALL_EXPORT_METADATA_FILE``, + where ``BUCKET_NAME`` is the name of the Cloud Storage + bucket, ``NAMESPACE_PATH`` is an optional Cloud Storage + namespace path (this is not a Cloud Datastore namespace), + and ``OVERALL_EXPORT_METADATA_FILE`` is the metadata file + written by the ExportEntities operation. For more + information about Cloud Storage namespace paths, see `Object + name + considerations `__. + + For more information, see + [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. + entity_filter (~.datastore_admin.EntityFilter): + Optionally specify which kinds/namespaces are to be + imported. If provided, the list must be a subset of the + EntityFilter used in creating the export, otherwise a + FAILED_PRECONDITION error will be returned. If no filter is + specified then all entities from the export are imported. + """ + + project_id = proto.Field(proto.STRING, number=1) + + labels = proto.MapField(proto.STRING, proto.STRING, number=2) + + input_url = proto.Field(proto.STRING, number=3) + + entity_filter = proto.Field(proto.MESSAGE, number=4, message="EntityFilter",) + + +class ExportEntitiesResponse(proto.Message): + r"""The response for + [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. + + Attributes: + output_url (str): + Location of the output metadata file. This can be used to + begin an import into Cloud Datastore (this project or + another project). See + [google.datastore.admin.v1.ImportEntitiesRequest.input_url][google.datastore.admin.v1.ImportEntitiesRequest.input_url]. + Only present if the operation completed successfully. + """ + + output_url = proto.Field(proto.STRING, number=1) + + +class ExportEntitiesMetadata(proto.Message): + r"""Metadata for ExportEntities operations. + + Attributes: + common (~.datastore_admin.CommonMetadata): + Metadata common to all Datastore Admin + operations. + progress_entities (~.datastore_admin.Progress): + An estimate of the number of entities + processed. + progress_bytes (~.datastore_admin.Progress): + An estimate of the number of bytes processed. + entity_filter (~.datastore_admin.EntityFilter): + Description of which entities are being + exported. + output_url_prefix (str): + Location for the export metadata and data files. This will + be the same value as the + [google.datastore.admin.v1.ExportEntitiesRequest.output_url_prefix][google.datastore.admin.v1.ExportEntitiesRequest.output_url_prefix] + field. The final output location is provided in + [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. + """ + + common = proto.Field(proto.MESSAGE, number=1, message=CommonMetadata,) + + progress_entities = proto.Field(proto.MESSAGE, number=2, message=Progress,) + + progress_bytes = proto.Field(proto.MESSAGE, number=3, message=Progress,) + + entity_filter = proto.Field(proto.MESSAGE, number=4, message="EntityFilter",) + + output_url_prefix = proto.Field(proto.STRING, number=5) + + +class ImportEntitiesMetadata(proto.Message): + r"""Metadata for ImportEntities operations. + + Attributes: + common (~.datastore_admin.CommonMetadata): + Metadata common to all Datastore Admin + operations. + progress_entities (~.datastore_admin.Progress): + An estimate of the number of entities + processed. + progress_bytes (~.datastore_admin.Progress): + An estimate of the number of bytes processed. + entity_filter (~.datastore_admin.EntityFilter): + Description of which entities are being + imported. + input_url (str): + The location of the import metadata file. This will be the + same value as the + [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url] + field. + """ + + common = proto.Field(proto.MESSAGE, number=1, message=CommonMetadata,) + + progress_entities = proto.Field(proto.MESSAGE, number=2, message=Progress,) + + progress_bytes = proto.Field(proto.MESSAGE, number=3, message=Progress,) + + entity_filter = proto.Field(proto.MESSAGE, number=4, message="EntityFilter",) + + input_url = proto.Field(proto.STRING, number=5) + + +class EntityFilter(proto.Message): + r"""Identifies a subset of entities in a project. This is specified as + combinations of kinds and namespaces (either or both of which may be + all, as described in the following examples). Example usage: + + Entire project: kinds=[], namespace_ids=[] + + Kinds Foo and Bar in all namespaces: kinds=['Foo', 'Bar'], + namespace_ids=[] + + Kinds Foo and Bar only in the default namespace: kinds=['Foo', + 'Bar'], namespace_ids=[''] + + Kinds Foo and Bar in both the default and Baz namespaces: + kinds=['Foo', 'Bar'], namespace_ids=['', 'Baz'] + + The entire Baz namespace: kinds=[], namespace_ids=['Baz'] + + Attributes: + kinds (Sequence[str]): + If empty, then this represents all kinds. + namespace_ids (Sequence[str]): + An empty list represents all namespaces. This + is the preferred usage for projects that don't + use namespaces. + An empty string element represents the default + namespace. This should be used if the project + has data in non-default namespaces, but doesn't + want to include them. + Each namespace in this list must be unique. + """ + + kinds = proto.RepeatedField(proto.STRING, number=1) + + namespace_ids = proto.RepeatedField(proto.STRING, number=2) + + +class GetIndexRequest(proto.Message): + r"""The request for + [google.datastore.admin.v1.DatastoreAdmin.GetIndex][google.datastore.admin.v1.DatastoreAdmin.GetIndex]. + + Attributes: + project_id (str): + Project ID against which to make the request. + index_id (str): + The resource ID of the index to get. + """ + + project_id = proto.Field(proto.STRING, number=1) + + index_id = proto.Field(proto.STRING, number=3) + + +class ListIndexesRequest(proto.Message): + r"""The request for + [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. + + Attributes: + project_id (str): + Project ID against which to make the request. + filter (str): + + page_size (int): + The maximum number of items to return. If + zero, then all results will be returned. + page_token (str): + The next_page_token value returned from a previous List + request, if any. + """ + + project_id = proto.Field(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=3) + + page_size = proto.Field(proto.INT32, number=4) + + page_token = proto.Field(proto.STRING, number=5) + + +class ListIndexesResponse(proto.Message): + r"""The response for + [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. + + Attributes: + indexes (Sequence[~.index.Index]): + The indexes. + next_page_token (str): + The standard List next-page token. + """ + + @property + def raw_page(self): + return self + + indexes = proto.RepeatedField(proto.MESSAGE, number=1, message=index.Index,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class IndexOperationMetadata(proto.Message): + r"""Metadata for Index operations. + + Attributes: + common (~.datastore_admin.CommonMetadata): + Metadata common to all Datastore Admin + operations. + progress_entities (~.datastore_admin.Progress): + An estimate of the number of entities + processed. + index_id (str): + The index resource ID that this operation is + acting on. + """ + + common = proto.Field(proto.MESSAGE, number=1, message=CommonMetadata,) + + progress_entities = proto.Field(proto.MESSAGE, number=2, message=Progress,) + + index_id = proto.Field(proto.STRING, number=3) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py new file mode 100644 index 000000000000..e11a27a584cd --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +__protobuf__ = proto.module(package="google.datastore.admin.v1", manifest={"Index",},) + + +class Index(proto.Message): + r"""A minimal index definition. + + Attributes: + project_id (str): + Output only. Project ID. + index_id (str): + Output only. The resource ID of the index. + kind (str): + Required. The entity kind to which this index + applies. + ancestor (~.index.Index.AncestorMode): + Required. The index's ancestor mode. Must not be + ANCESTOR_MODE_UNSPECIFIED. + properties (Sequence[~.index.Index.IndexedProperty]): + Required. An ordered sequence of property + names and their index attributes. + state (~.index.Index.State): + Output only. The state of the index. + """ + + class AncestorMode(proto.Enum): + r"""For an ordered index, specifies whether each of the entity's + ancestors will be included. + """ + ANCESTOR_MODE_UNSPECIFIED = 0 + NONE = 1 + ALL_ANCESTORS = 2 + + class Direction(proto.Enum): + r"""The direction determines how a property is indexed.""" + DIRECTION_UNSPECIFIED = 0 + ASCENDING = 1 + DESCENDING = 2 + + class State(proto.Enum): + r"""The possible set of states of an index.""" + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + DELETING = 3 + ERROR = 4 + + class IndexedProperty(proto.Message): + r"""A property of an index. + + Attributes: + name (str): + Required. The property name to index. + direction (~.index.Index.Direction): + Required. The indexed property's direction. Must not be + DIRECTION_UNSPECIFIED. + """ + + name = proto.Field(proto.STRING, number=1) + + direction = proto.Field(proto.ENUM, number=2, enum="Index.Direction",) + + project_id = proto.Field(proto.STRING, number=1) + + index_id = proto.Field(proto.STRING, number=3) + + kind = proto.Field(proto.STRING, number=4) + + ancestor = proto.Field(proto.ENUM, number=5, enum=AncestorMode,) + + properties = proto.RepeatedField(proto.MESSAGE, number=6, message=IndexedProperty,) + + state = proto.Field(proto.ENUM, number=7, enum=State,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py index 0308f10c0a6a..a4b5de763013 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py @@ -1,27 +1,93 @@ -# Copyright 2018 Google LLC +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# -from __future__ import absolute_import - -from google.cloud.datastore_v1 import types -from google.cloud.datastore_v1.gapic import datastore_client -from google.cloud.datastore_v1.gapic import enums - - -class DatastoreClient(datastore_client.DatastoreClient): - __doc__ = datastore_client.DatastoreClient.__doc__ - enums = enums +from .services.datastore import DatastoreClient +from .types.datastore import AllocateIdsRequest +from .types.datastore import AllocateIdsResponse +from .types.datastore import BeginTransactionRequest +from .types.datastore import BeginTransactionResponse +from .types.datastore import CommitRequest +from .types.datastore import CommitResponse +from .types.datastore import LookupRequest +from .types.datastore import LookupResponse +from .types.datastore import Mutation +from .types.datastore import MutationResult +from .types.datastore import ReadOptions +from .types.datastore import ReserveIdsRequest +from .types.datastore import ReserveIdsResponse +from .types.datastore import RollbackRequest +from .types.datastore import RollbackResponse +from .types.datastore import RunQueryRequest +from .types.datastore import RunQueryResponse +from .types.datastore import TransactionOptions +from .types.entity import ArrayValue +from .types.entity import Entity +from .types.entity import Key +from .types.entity import PartitionId +from .types.entity import Value +from .types.query import CompositeFilter +from .types.query import EntityResult +from .types.query import Filter +from .types.query import GqlQuery +from .types.query import GqlQueryParameter +from .types.query import KindExpression +from .types.query import Projection +from .types.query import PropertyFilter +from .types.query import PropertyOrder +from .types.query import PropertyReference +from .types.query import Query +from .types.query import QueryResultBatch -__all__ = ("enums", "types", "DatastoreClient") +__all__ = ( + "AllocateIdsRequest", + "AllocateIdsResponse", + "ArrayValue", + "BeginTransactionRequest", + "BeginTransactionResponse", + "CommitRequest", + "CommitResponse", + "CompositeFilter", + "Entity", + "EntityResult", + "Filter", + "GqlQuery", + "GqlQueryParameter", + "Key", + "KindExpression", + "LookupRequest", + "LookupResponse", + "Mutation", + "MutationResult", + "PartitionId", + "Projection", + "PropertyFilter", + "PropertyOrder", + "PropertyReference", + "Query", + "QueryResultBatch", + "ReadOptions", + "ReserveIdsRequest", + "ReserveIdsResponse", + "RollbackRequest", + "RollbackResponse", + "RunQueryRequest", + "RunQueryResponse", + "TransactionOptions", + "Value", + "DatastoreClient", +) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py deleted file mode 100644 index ac61c12bfac2..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client.py +++ /dev/null @@ -1,805 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.datastore.v1 Datastore API.""" - -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.protobuf_helpers -import grpc - -from google.cloud.datastore_v1.gapic import datastore_client_config -from google.cloud.datastore_v1.gapic import enums -from google.cloud.datastore_v1.gapic.transports import datastore_grpc_transport -from google.cloud.datastore_v1.proto import datastore_pb2 -from google.cloud.datastore_v1.proto import datastore_pb2_grpc -from google.cloud.datastore_v1.proto import entity_pb2 -from google.cloud.datastore_v1.proto import query_pb2 -from google.cloud.datastore.version import __version__ - - -_GAPIC_LIBRARY_VERSION = __version__ - - -class DatastoreClient(object): - """ - Each RPC normalizes the partition IDs of the keys in its input entities, - and always returns entities with keys with normalized partition IDs. - This applies to all keys and entities, including those in values, except keys - with both an empty path and an empty or unset partition ID. Normalization of - input keys sets the project ID (if not already set) to the project ID from - the request. - """ - - SERVICE_ADDRESS = "datastore.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.datastore.v1.Datastore" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatastoreClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.DatastoreGrpcTransport, - Callable[[~.Credentials, type], ~.DatastoreGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = datastore_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=datastore_grpc_transport.DatastoreGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = datastore_grpc_transport.DatastoreGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def lookup( - self, - project_id, - keys, - read_options=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Looks up entities by key. - - Example: - >>> from google.cloud import datastore_v1 - >>> - >>> client = datastore_v1.DatastoreClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `keys`: - >>> keys = [] - >>> - >>> response = client.lookup(project_id, keys) - - Args: - project_id (str): Required. The ID of the project against which to make the request. - keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): Required. Keys of entities to look up. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datastore_v1.types.Key` - read_options (Union[dict, ~google.cloud.datastore_v1.types.ReadOptions]): The options for this lookup request. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datastore_v1.types.ReadOptions` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datastore_v1.types.LookupResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "lookup" not in self._inner_api_calls: - self._inner_api_calls[ - "lookup" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.lookup, - default_retry=self._method_configs["Lookup"].retry, - default_timeout=self._method_configs["Lookup"].timeout, - client_info=self._client_info, - ) - - request = datastore_pb2.LookupRequest( - project_id=project_id, keys=keys, read_options=read_options, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project_id", project_id)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["lookup"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def run_query( - self, - project_id, - partition_id=None, - read_options=None, - query=None, - gql_query=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Queries for entities. - - Example: - >>> from google.cloud import datastore_v1 - >>> - >>> client = datastore_v1.DatastoreClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> response = client.run_query(project_id) - - Args: - project_id (str): Required. The ID of the project against which to make the request. - partition_id (Union[dict, ~google.cloud.datastore_v1.types.PartitionId]): Entities are partitioned into subsets, identified by a partition ID. - Queries are scoped to a single partition. - This partition ID is normalized with the standard default context - partition ID. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datastore_v1.types.PartitionId` - read_options (Union[dict, ~google.cloud.datastore_v1.types.ReadOptions]): The options for this query. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datastore_v1.types.ReadOptions` - query (Union[dict, ~google.cloud.datastore_v1.types.Query]): The query to run. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datastore_v1.types.Query` - gql_query (Union[dict, ~google.cloud.datastore_v1.types.GqlQuery]): The GQL query to run. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datastore_v1.types.GqlQuery` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datastore_v1.types.RunQueryResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "run_query" not in self._inner_api_calls: - self._inner_api_calls[ - "run_query" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.run_query, - default_retry=self._method_configs["RunQuery"].retry, - default_timeout=self._method_configs["RunQuery"].timeout, - client_info=self._client_info, - ) - - # Sanity check: We have some fields which are mutually exclusive; - # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof( - query=query, gql_query=gql_query, - ) - - request = datastore_pb2.RunQueryRequest( - project_id=project_id, - partition_id=partition_id, - read_options=read_options, - query=query, - gql_query=gql_query, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project_id", project_id)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["run_query"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def reserve_ids( - self, - project_id, - keys, - database_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Prevents the supplied keys' IDs from being auto-allocated by Cloud - Datastore. - - Example: - >>> from google.cloud import datastore_v1 - >>> - >>> client = datastore_v1.DatastoreClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `keys`: - >>> keys = [] - >>> - >>> response = client.reserve_ids(project_id, keys) - - Args: - project_id (str): Required. The ID of the project against which to make the request. - keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): Required. A list of keys with complete key paths whose numeric IDs should not be - auto-allocated. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datastore_v1.types.Key` - database_id (str): If not empty, the ID of the database against which to make the request. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datastore_v1.types.ReserveIdsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "reserve_ids" not in self._inner_api_calls: - self._inner_api_calls[ - "reserve_ids" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.reserve_ids, - default_retry=self._method_configs["ReserveIds"].retry, - default_timeout=self._method_configs["ReserveIds"].timeout, - client_info=self._client_info, - ) - - request = datastore_pb2.ReserveIdsRequest( - project_id=project_id, keys=keys, database_id=database_id, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project_id", project_id)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["reserve_ids"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def begin_transaction( - self, - project_id, - transaction_options=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Begins a new transaction. - - Example: - >>> from google.cloud import datastore_v1 - >>> - >>> client = datastore_v1.DatastoreClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> response = client.begin_transaction(project_id) - - Args: - project_id (str): Required. The ID of the project against which to make the request. - transaction_options (Union[dict, ~google.cloud.datastore_v1.types.TransactionOptions]): Options for a new transaction. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datastore_v1.types.TransactionOptions` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datastore_v1.types.BeginTransactionResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "begin_transaction" not in self._inner_api_calls: - self._inner_api_calls[ - "begin_transaction" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.begin_transaction, - default_retry=self._method_configs["BeginTransaction"].retry, - default_timeout=self._method_configs["BeginTransaction"].timeout, - client_info=self._client_info, - ) - - request = datastore_pb2.BeginTransactionRequest( - project_id=project_id, transaction_options=transaction_options, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project_id", project_id)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["begin_transaction"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def commit( - self, - project_id, - mode=None, - mutations=None, - transaction=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Commits a transaction, optionally creating, deleting or modifying some - entities. - - Example: - >>> from google.cloud import datastore_v1 - >>> - >>> client = datastore_v1.DatastoreClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> response = client.commit(project_id) - - Args: - project_id (str): Required. The ID of the project against which to make the request. - mode (~google.cloud.datastore_v1.types.Mode): The type of commit to perform. Defaults to ``TRANSACTIONAL``. - transaction (bytes): The identifier of the transaction associated with the commit. A - transaction identifier is returned by a call to - ``Datastore.BeginTransaction``. - mutations (list[Union[dict, ~google.cloud.datastore_v1.types.Mutation]]): The mutations to perform. - - When mode is ``TRANSACTIONAL``, mutations affecting a single entity are - applied in order. The following sequences of mutations affecting a - single entity are not permitted in a single ``Commit`` request: - - - ``insert`` followed by ``insert`` - - ``update`` followed by ``insert`` - - ``upsert`` followed by ``insert`` - - ``delete`` followed by ``update`` - - When mode is ``NON_TRANSACTIONAL``, no two mutations may affect a single - entity. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datastore_v1.types.Mutation` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datastore_v1.types.CommitResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "commit" not in self._inner_api_calls: - self._inner_api_calls[ - "commit" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.commit, - default_retry=self._method_configs["Commit"].retry, - default_timeout=self._method_configs["Commit"].timeout, - client_info=self._client_info, - ) - - # Sanity check: We have some fields which are mutually exclusive; - # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof(transaction=transaction,) - - request = datastore_pb2.CommitRequest( - project_id=project_id, - mode=mode, - transaction=transaction, - mutations=mutations, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project_id", project_id)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["commit"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def rollback( - self, - project_id, - transaction, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Rolls back a transaction. - - Example: - >>> from google.cloud import datastore_v1 - >>> - >>> client = datastore_v1.DatastoreClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `transaction`: - >>> transaction = b'' - >>> - >>> response = client.rollback(project_id, transaction) - - Args: - project_id (str): Required. The ID of the project against which to make the request. - transaction (bytes): Required. The transaction identifier, returned by a call to - ``Datastore.BeginTransaction``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datastore_v1.types.RollbackResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "rollback" not in self._inner_api_calls: - self._inner_api_calls[ - "rollback" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.rollback, - default_retry=self._method_configs["Rollback"].retry, - default_timeout=self._method_configs["Rollback"].timeout, - client_info=self._client_info, - ) - - request = datastore_pb2.RollbackRequest( - project_id=project_id, transaction=transaction, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project_id", project_id)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["rollback"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def allocate_ids( - self, - project_id, - keys, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Allocates IDs for the given keys, which is useful for referencing an entity - before it is inserted. - - Example: - >>> from google.cloud import datastore_v1 - >>> - >>> client = datastore_v1.DatastoreClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `keys`: - >>> keys = [] - >>> - >>> response = client.allocate_ids(project_id, keys) - - Args: - project_id (str): Required. The ID of the project against which to make the request. - keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): Required. A list of keys with incomplete key paths for which to allocate IDs. - No key may be reserved/read-only. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datastore_v1.types.Key` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datastore_v1.types.AllocateIdsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "allocate_ids" not in self._inner_api_calls: - self._inner_api_calls[ - "allocate_ids" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.allocate_ids, - default_retry=self._method_configs["AllocateIds"].retry, - default_timeout=self._method_configs["AllocateIds"].timeout, - client_info=self._client_info, - ) - - request = datastore_pb2.AllocateIdsRequest(project_id=project_id, keys=keys,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project_id", project_id)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["allocate_ids"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py deleted file mode 100644 index b1f7991ef30a..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py +++ /dev/null @@ -1,77 +0,0 @@ -config = { - "interfaces": { - "google.datastore.v1.Datastore": { - "retry_codes": { - "retry_policy_1_codes": ["UNAVAILABLE", "DEADLINE_EXCEEDED"], - "no_retry_codes": [], - "no_retry_1_codes": [], - }, - "retry_params": { - "retry_policy_1_params": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 60000, - }, - "no_retry_params": { - "initial_retry_delay_millis": 0, - "retry_delay_multiplier": 0.0, - "max_retry_delay_millis": 0, - "initial_rpc_timeout_millis": 0, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 0, - "total_timeout_millis": 0, - }, - "no_retry_1_params": { - "initial_retry_delay_millis": 0, - "retry_delay_multiplier": 0.0, - "max_retry_delay_millis": 0, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 60000, - }, - }, - "methods": { - "Lookup": { - "timeout_millis": 60000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "RunQuery": { - "timeout_millis": 60000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ReserveIds": { - "timeout_millis": 60000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "BeginTransaction": { - "timeout_millis": 60000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "Commit": { - "timeout_millis": 60000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "Rollback": { - "timeout_millis": 60000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "AllocateIds": { - "timeout_millis": 60000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - }, - } - } -} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py deleted file mode 100644 index f84538a3c665..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/enums.py +++ /dev/null @@ -1,165 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class NullValue(enum.IntEnum): - """ - ``NullValue`` is a singleton enumeration to represent the null value - for the ``Value`` type union. - - The JSON representation for ``NullValue`` is JSON ``null``. - - Attributes: - NULL_VALUE (int): Null value. - """ - - NULL_VALUE = 0 - - -class CommitRequest(object): - class Mode(enum.IntEnum): - """ - The modes available for commits. - - Attributes: - MODE_UNSPECIFIED (int): Unspecified. This value must not be used. - TRANSACTIONAL (int): Transactional: The mutations are either all applied, or none are - applied. Learn about transactions - `here `__. - NON_TRANSACTIONAL (int): Non-transactional: The mutations may not apply as all or none. - """ - - MODE_UNSPECIFIED = 0 - TRANSACTIONAL = 1 - NON_TRANSACTIONAL = 2 - - -class CompositeFilter(object): - class Operator(enum.IntEnum): - """ - A composite filter operator. - - Attributes: - OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used. - AND (int): The results are required to satisfy each of the combined filters. - """ - - OPERATOR_UNSPECIFIED = 0 - AND = 1 - - -class EntityResult(object): - class ResultType(enum.IntEnum): - """ - Specifies what data the 'entity' field contains. A ``ResultType`` is - either implied (for example, in ``LookupResponse.missing`` from - ``datastore.proto``, it is always ``KEY_ONLY``) or specified by context - (for example, in message ``QueryResultBatch``, field - ``entity_result_type`` specifies a ``ResultType`` for all the values in - field ``entity_results``). - - Attributes: - RESULT_TYPE_UNSPECIFIED (int): Unspecified. This value is never used. - FULL (int): The key and properties. - PROJECTION (int): A projected subset of properties. The entity may have no key. - KEY_ONLY (int): Only the key. - """ - - RESULT_TYPE_UNSPECIFIED = 0 - FULL = 1 - PROJECTION = 2 - KEY_ONLY = 3 - - -class PropertyFilter(object): - class Operator(enum.IntEnum): - """ - A property filter operator. - - Attributes: - OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used. - LESS_THAN (int): Less than. - LESS_THAN_OR_EQUAL (int): Less than or equal. - GREATER_THAN (int): Greater than. - GREATER_THAN_OR_EQUAL (int): Greater than or equal. - EQUAL (int): Equal. - HAS_ANCESTOR (int): Has ancestor. - """ - - OPERATOR_UNSPECIFIED = 0 - LESS_THAN = 1 - LESS_THAN_OR_EQUAL = 2 - GREATER_THAN = 3 - GREATER_THAN_OR_EQUAL = 4 - EQUAL = 5 - HAS_ANCESTOR = 11 - - -class PropertyOrder(object): - class Direction(enum.IntEnum): - """ - The sort direction. - - Attributes: - DIRECTION_UNSPECIFIED (int): Unspecified. This value must not be used. - ASCENDING (int): Ascending. - DESCENDING (int): Descending. - """ - - DIRECTION_UNSPECIFIED = 0 - ASCENDING = 1 - DESCENDING = 2 - - -class QueryResultBatch(object): - class MoreResultsType(enum.IntEnum): - """ - The possible values for the ``more_results`` field. - - Attributes: - MORE_RESULTS_TYPE_UNSPECIFIED (int): Unspecified. This value is never used. - NOT_FINISHED (int): There may be additional batches to fetch from this query. - MORE_RESULTS_AFTER_LIMIT (int): The query is finished, but there may be more results after the limit. - MORE_RESULTS_AFTER_CURSOR (int): The query is finished, but there may be more results after the end - cursor. - NO_MORE_RESULTS (int): The query is finished, and there are no more results. - """ - - MORE_RESULTS_TYPE_UNSPECIFIED = 0 - NOT_FINISHED = 1 - MORE_RESULTS_AFTER_LIMIT = 2 - MORE_RESULTS_AFTER_CURSOR = 4 - NO_MORE_RESULTS = 3 - - -class ReadOptions(object): - class ReadConsistency(enum.IntEnum): - """ - The possible values for read consistencies. - - Attributes: - READ_CONSISTENCY_UNSPECIFIED (int): Unspecified. This value must not be used. - STRONG (int): Strong consistency. - EVENTUAL (int): Eventual consistency. - """ - - READ_CONSISTENCY_UNSPECIFIED = 0 - STRONG = 1 - EVENTUAL = 2 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py deleted file mode 100644 index 74552d8a32d6..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py +++ /dev/null @@ -1,205 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.datastore_v1.proto import datastore_pb2_grpc - - -class DatastoreGrpcTransport(object): - """gRPC transport class providing stubs for - google.datastore.v1 Datastore API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ) - - def __init__( - self, channel=None, credentials=None, address="datastore.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "datastore_stub": datastore_pb2_grpc.DatastoreStub(channel), - } - - @classmethod - def create_channel( - cls, address="datastore.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def lookup(self): - """Return the gRPC stub for :meth:`DatastoreClient.lookup`. - - Looks up entities by key. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["datastore_stub"].Lookup - - @property - def run_query(self): - """Return the gRPC stub for :meth:`DatastoreClient.run_query`. - - Queries for entities. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["datastore_stub"].RunQuery - - @property - def reserve_ids(self): - """Return the gRPC stub for :meth:`DatastoreClient.reserve_ids`. - - Prevents the supplied keys' IDs from being auto-allocated by Cloud - Datastore. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["datastore_stub"].ReserveIds - - @property - def begin_transaction(self): - """Return the gRPC stub for :meth:`DatastoreClient.begin_transaction`. - - Begins a new transaction. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["datastore_stub"].BeginTransaction - - @property - def commit(self): - """Return the gRPC stub for :meth:`DatastoreClient.commit`. - - Commits a transaction, optionally creating, deleting or modifying some - entities. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["datastore_stub"].Commit - - @property - def rollback(self): - """Return the gRPC stub for :meth:`DatastoreClient.rollback`. - - Rolls back a transaction. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["datastore_stub"].Rollback - - @property - def allocate_ids(self): - """Return the gRPC stub for :meth:`DatastoreClient.allocate_ids`. - - Allocates IDs for the given keys, which is useful for referencing an entity - before it is inserted. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["datastore_stub"].AllocateIds diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_admin.proto b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_admin.proto deleted file mode 100644 index c730de79c0f3..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_admin.proto +++ /dev/null @@ -1,329 +0,0 @@ -// Copyright 2018 Google Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.datastore.admin.v1beta1; - -import "google/api/annotations.proto"; -import "google/longrunning/operations.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.Datastore.Admin.V1Beta1"; -option go_package = "google.golang.org/genproto/googleapis/datastore/admin/v1beta1;admin"; -option java_multiple_files = true; -option java_outer_classname = "DatastoreAdminProto"; -option java_package = "com.google.datastore.admin.v1beta1"; - - -// Google Cloud Datastore Admin API -// -// The Datastore Admin API provides several admin services for Cloud Datastore. -// -// ----------------------------------------------------------------------------- -// ## Concepts -// -// Project, namespace, kind, and entity as defined in the Google Cloud Datastore -// API. -// -// Operation: An Operation represents work being performed in the background. -// -// EntityFilter: Allows specifying a subset of entities in a project. This is -// specified as a combination of kinds and namespaces (either or both of which -// may be all). -// -// ----------------------------------------------------------------------------- -// ## Services -// -// # Export/Import -// -// The Export/Import service provides the ability to copy all or a subset of -// entities to/from Google Cloud Storage. -// -// Exported data may be imported into Cloud Datastore for any Google Cloud -// Platform project. It is not restricted to the export source project. It is -// possible to export from one project and then import into another. -// -// Exported data can also be loaded into Google BigQuery for analysis. -// -// Exports and imports are performed asynchronously. An Operation resource is -// created for each export/import. The state (including any errors encountered) -// of the export/import may be queried via the Operation resource. -// -// # Operation -// -// The Operations collection provides a record of actions performed for the -// specified project (including any operations in progress). Operations are not -// created directly but through calls on other collections or resources. -// -// An operation that is not yet done may be cancelled. The request to cancel is -// asynchronous and the operation may continue to run for some time after the -// request to cancel is made. -// -// An operation that is done may be deleted so that it is no longer listed as -// part of the Operation collection. -// -// ListOperations returns all pending operations, but not completed operations. -// -// Operations are created by service DatastoreAdmin, -// but are accessed via service google.longrunning.Operations. -service DatastoreAdmin { - // Exports a copy of all or a subset of entities from Google Cloud Datastore - // to another storage system, such as Google Cloud Storage. Recent updates to - // entities may not be reflected in the export. The export occurs in the - // background and its progress can be monitored and managed via the - // Operation resource that is created. The output of an export may only be - // used once the associated operation is done. If an export operation is - // cancelled before completion it may leave partial data behind in Google - // Cloud Storage. - rpc ExportEntities(ExportEntitiesRequest) returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v1beta1/projects/{project_id}:export" - body: "*" - }; - } - - // Imports entities into Google Cloud Datastore. Existing entities with the - // same key are overwritten. The import occurs in the background and its - // progress can be monitored and managed via the Operation resource that is - // created. If an ImportEntities operation is cancelled, it is possible - // that a subset of the data has already been imported to Cloud Datastore. - rpc ImportEntities(ImportEntitiesRequest) returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v1beta1/projects/{project_id}:import" - body: "*" - }; - } -} - -// Metadata common to all Datastore Admin operations. -message CommonMetadata { - // The various possible states for an ongoing Operation. - enum State { - // Unspecified. - STATE_UNSPECIFIED = 0; - - // Request is being prepared for processing. - INITIALIZING = 1; - - // Request is actively being processed. - PROCESSING = 2; - - // Request is in the process of being cancelled after user called - // google.longrunning.Operations.CancelOperation on the operation. - CANCELLING = 3; - - // Request has been processed and is in its finalization stage. - FINALIZING = 4; - - // Request has completed successfully. - SUCCESSFUL = 5; - - // Request has finished being processed, but encountered an error. - FAILED = 6; - - // Request has finished being cancelled after user called - // google.longrunning.Operations.CancelOperation. - CANCELLED = 7; - } - - // The time that work began on the operation. - google.protobuf.Timestamp start_time = 1; - - // The time the operation ended, either successfully or otherwise. - google.protobuf.Timestamp end_time = 2; - - // The type of the operation. Can be used as a filter in - // ListOperationsRequest. - OperationType operation_type = 3; - - // The client-assigned labels which were provided when the operation was - // created. May also include additional labels. - map labels = 4; - - // The current state of the Operation. - State state = 5; -} - -// Measures the progress of a particular metric. -message Progress { - // The amount of work that has been completed. Note that this may be greater - // than work_estimated. - int64 work_completed = 1; - - // An estimate of how much work needs to be performed. May be zero if the - // work estimate is unavailable. - int64 work_estimated = 2; -} - -// The request for -// [google.datastore.admin.v1beta1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1beta1.DatastoreAdmin.ExportEntities]. -message ExportEntitiesRequest { - // Project ID against which to make the request. - string project_id = 1; - - // Client-assigned labels. - map labels = 2; - - // Description of what data from the project is included in the export. - EntityFilter entity_filter = 3; - - // Location for the export metadata and data files. - // - // The full resource URL of the external storage location. Currently, only - // Google Cloud Storage is supported. So output_url_prefix should be of the - // form: `gs://BUCKET_NAME[/NAMESPACE_PATH]`, where `BUCKET_NAME` is the - // name of the Cloud Storage bucket and `NAMESPACE_PATH` is an optional Cloud - // Storage namespace path (this is not a Cloud Datastore namespace). For more - // information about Cloud Storage namespace paths, see - // [Object name - // considerations](https://cloud.google.com/storage/docs/naming#object-considerations). - // - // The resulting files will be nested deeper than the specified URL prefix. - // The final output URL will be provided in the - // [google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url][google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url] - // field. That value should be used for subsequent ImportEntities operations. - // - // By nesting the data files deeper, the same Cloud Storage bucket can be used - // in multiple ExportEntities operations without conflict. - string output_url_prefix = 4; -} - -// The request for -// [google.datastore.admin.v1beta1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1beta1.DatastoreAdmin.ImportEntities]. -message ImportEntitiesRequest { - // Project ID against which to make the request. - string project_id = 1; - - // Client-assigned labels. - map labels = 2; - - // The full resource URL of the external storage location. Currently, only - // Google Cloud Storage is supported. So input_url should be of the form: - // `gs://BUCKET_NAME[/NAMESPACE_PATH]/OVERALL_EXPORT_METADATA_FILE`, where - // `BUCKET_NAME` is the name of the Cloud Storage bucket, `NAMESPACE_PATH` is - // an optional Cloud Storage namespace path (this is not a Cloud Datastore - // namespace), and `OVERALL_EXPORT_METADATA_FILE` is the metadata file written - // by the ExportEntities operation. For more information about Cloud Storage - // namespace paths, see - // [Object name - // considerations](https://cloud.google.com/storage/docs/naming#object-considerations). - // - // For more information, see - // [google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url][google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url]. - string input_url = 3; - - // Optionally specify which kinds/namespaces are to be imported. If provided, - // the list must be a subset of the EntityFilter used in creating the export, - // otherwise a FAILED_PRECONDITION error will be returned. If no filter is - // specified then all entities from the export are imported. - EntityFilter entity_filter = 4; -} - -// The response for -// [google.datastore.admin.v1beta1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1beta1.DatastoreAdmin.ExportEntities]. -message ExportEntitiesResponse { - // Location of the output metadata file. This can be used to begin an import - // into Cloud Datastore (this project or another project). See - // [google.datastore.admin.v1beta1.ImportEntitiesRequest.input_url][google.datastore.admin.v1beta1.ImportEntitiesRequest.input_url]. - // Only present if the operation completed successfully. - string output_url = 1; -} - -// Metadata for ExportEntities operations. -message ExportEntitiesMetadata { - // Metadata common to all Datastore Admin operations. - CommonMetadata common = 1; - - // An estimate of the number of entities processed. - Progress progress_entities = 2; - - // An estimate of the number of bytes processed. - Progress progress_bytes = 3; - - // Description of which entities are being exported. - EntityFilter entity_filter = 4; - - // Location for the export metadata and data files. This will be the same - // value as the - // [google.datastore.admin.v1beta1.ExportEntitiesRequest.output_url_prefix][google.datastore.admin.v1beta1.ExportEntitiesRequest.output_url_prefix] - // field. The final output location is provided in - // [google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url][google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url]. - string output_url_prefix = 5; -} - -// Metadata for ImportEntities operations. -message ImportEntitiesMetadata { - // Metadata common to all Datastore Admin operations. - CommonMetadata common = 1; - - // An estimate of the number of entities processed. - Progress progress_entities = 2; - - // An estimate of the number of bytes processed. - Progress progress_bytes = 3; - - // Description of which entities are being imported. - EntityFilter entity_filter = 4; - - // The location of the import metadata file. This will be the same value as - // the [google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url][google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url] - // field. - string input_url = 5; -} - -// Identifies a subset of entities in a project. This is specified as -// combinations of kinds and namespaces (either or both of which may be all, as -// described in the following examples). -// Example usage: -// -// Entire project: -// kinds=[], namespace_ids=[] -// -// Kinds Foo and Bar in all namespaces: -// kinds=['Foo', 'Bar'], namespace_ids=[] -// -// Kinds Foo and Bar only in the default namespace: -// kinds=['Foo', 'Bar'], namespace_ids=[''] -// -// Kinds Foo and Bar in both the default and Baz namespaces: -// kinds=['Foo', 'Bar'], namespace_ids=['', 'Baz'] -// -// The entire Baz namespace: -// kinds=[], namespace_ids=['Baz'] -message EntityFilter { - // If empty, then this represents all kinds. - repeated string kinds = 1; - - // An empty list represents all namespaces. This is the preferred - // usage for projects that don't use namespaces. - // - // An empty string element represents the default namespace. This should be - // used if the project has data in non-default namespaces, but doesn't want to - // include them. - // Each namespace in this list must be unique. - repeated string namespace_ids = 2; -} - -// Operation types. -enum OperationType { - // Unspecified. - OPERATION_TYPE_UNSPECIFIED = 0; - - // ExportEntities. - EXPORT_ENTITIES = 1; - - // ImportEntities. - IMPORT_ENTITIES = 2; -} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py deleted file mode 100644 index cf7a3cfd1168..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore_pb2.py +++ /dev/null @@ -1,2159 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/datastore_v1/proto/datastore.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.cloud.datastore_v1.proto import ( - entity_pb2 as google_dot_cloud_dot_datastore__v1_dot_proto_dot_entity__pb2, -) -from google.cloud.datastore_v1.proto import ( - query_pb2 as google_dot_cloud_dot_datastore__v1_dot_proto_dot_query__pb2, -) - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/datastore_v1/proto/datastore.proto", - package="google.datastore.v1", - syntax="proto3", - serialized_options=b"\n\027com.google.datastore.v1B\016DatastoreProtoP\001Z, <=, >=, and =. - ASCENDING = 1; - - // The property's values are indexed so as to support sequencing in - // descending order and also query by <, >, <=, >=, and =. - DESCENDING = 2; - } - - // The possible set of states of an index. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The index is being created, and cannot be used by queries. - // There is an active long-running operation for the index. - // The index is updated when writing an entity. - // Some index data may exist. - CREATING = 1; - - // The index is ready to be used. - // The index is updated when writing an entity. - // The index is fully populated from all stored entities it applies to. - READY = 2; - - // The index is being deleted, and cannot be used by queries. - // There is an active long-running operation for the index. - // The index is not updated when writing an entity. - // Some index data may exist. - DELETING = 3; - - // The index was being created or deleted, but something went wrong. - // The index cannot by used by queries. - // There is no active long-running operation for the index, - // and the most recently finished long-running operation failed. - // The index is not updated when writing an entity. - // Some index data may exist. - ERROR = 4; - } - - // Project ID. - // Output only. - string project_id = 1; - - // The resource ID of the index. - // Output only. - string index_id = 3; - - // The entity kind to which this index applies. - // Required. - string kind = 4; - - // The index's ancestor mode. Must not be ANCESTOR_MODE_UNSPECIFIED. - // Required. - AncestorMode ancestor = 5; - - // An ordered sequence of property names and their index attributes. - // Required. - repeated IndexedProperty properties = 6; - - // The state of the index. - // Output only. - State state = 7; -} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2.py deleted file mode 100644 index e3bd1141e7f0..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2.py +++ /dev/null @@ -1,1728 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/datastore_v1/proto/query.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.datastore_v1.proto import ( - entity_pb2 as google_dot_cloud_dot_datastore__v1_dot_proto_dot_entity__pb2, -) -from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/datastore_v1/proto/query.proto", - package="google.datastore.v1", - syntax="proto3", - serialized_options=b"\n\027com.google.datastore.v1B\nQueryProtoP\001Z\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32#.google.datastore.v1.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type"\xa9\x01\n\x0f\x43ompositeFilter\x12\x39\n\x02op\x18\x01 \x01(\x0e\x32-.google.datastore.v1.CompositeFilter.Operator\x12,\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x1b.google.datastore.v1.Filter"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01"\xc7\x02\n\x0ePropertyFilter\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x38\n\x02op\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyFilter.Operator\x12)\n\x05value\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.Value"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b"\xa5\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12H\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x30.google.datastore.v1.GqlQuery.NamedBindingsEntry\x12\x43\n\x13positional_bindings\x18\x04 \x03(\x0b\x32&.google.datastore.v1.GqlQueryParameter\x1a\\\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.datastore.v1.GqlQueryParameter:\x02\x38\x01"d\n\x11GqlQueryParameter\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type"\xde\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12H\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32,.google.datastore.v1.EntityResult.ResultType\x12\x39\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12K\n\x0cmore_results\x18\x05 \x01(\x0e\x32\x35.google.datastore.v1.QueryResultBatch.MoreResultsType\x12\x18\n\x10snapshot_version\x18\x07 \x01(\x03"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42\xbc\x01\n\x17\x63om.google.datastore.v1B\nQueryProtoP\x01Z`__. - end_cursor: - An ending point for the query results. Query cursors are - returned in query result batches and `can only be used to - limit the same query `__. - offset: - The number of results to skip. Applies before limit, but after - all other constraints. Optional. Must be >= 0 if specified. - limit: - The maximum number of results to return. Applies after all - other constraints. Optional. Unspecified is interpreted as no - limit. Must be >= 0 if specified. - """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.Query) - }, -) -_sym_db.RegisterMessage(Query) - -KindExpression = _reflection.GeneratedProtocolMessageType( - "KindExpression", - (_message.Message,), - { - "DESCRIPTOR": _KINDEXPRESSION, - "__module__": "google.cloud.datastore_v1.proto.query_pb2", - "__doc__": """A representation of a kind. - - Attributes: - name: - The name of the kind. - """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.KindExpression) - }, -) -_sym_db.RegisterMessage(KindExpression) - -PropertyReference = _reflection.GeneratedProtocolMessageType( - "PropertyReference", - (_message.Message,), - { - "DESCRIPTOR": _PROPERTYREFERENCE, - "__module__": "google.cloud.datastore_v1.proto.query_pb2", - "__doc__": """A reference to a property relative to the kind expressions. - - Attributes: - name: - The name of the property. If name includes “.”s, it may be - interpreted as a property name path. - """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyReference) - }, -) -_sym_db.RegisterMessage(PropertyReference) - -Projection = _reflection.GeneratedProtocolMessageType( - "Projection", - (_message.Message,), - { - "DESCRIPTOR": _PROJECTION, - "__module__": "google.cloud.datastore_v1.proto.query_pb2", - "__doc__": """A representation of a property in a projection. - - Attributes: - property: - The property to project. - """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.Projection) - }, -) -_sym_db.RegisterMessage(Projection) - -PropertyOrder = _reflection.GeneratedProtocolMessageType( - "PropertyOrder", - (_message.Message,), - { - "DESCRIPTOR": _PROPERTYORDER, - "__module__": "google.cloud.datastore_v1.proto.query_pb2", - "__doc__": """The desired order for a specific property. - - Attributes: - property: - The property to order by. - direction: - The direction to order by. Defaults to ``ASCENDING``. - """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyOrder) - }, -) -_sym_db.RegisterMessage(PropertyOrder) - -Filter = _reflection.GeneratedProtocolMessageType( - "Filter", - (_message.Message,), - { - "DESCRIPTOR": _FILTER, - "__module__": "google.cloud.datastore_v1.proto.query_pb2", - "__doc__": """A holder for any type of filter. - - Attributes: - filter_type: - The type of filter. - composite_filter: - A composite filter. - property_filter: - A filter on a property. - """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.Filter) - }, -) -_sym_db.RegisterMessage(Filter) - -CompositeFilter = _reflection.GeneratedProtocolMessageType( - "CompositeFilter", - (_message.Message,), - { - "DESCRIPTOR": _COMPOSITEFILTER, - "__module__": "google.cloud.datastore_v1.proto.query_pb2", - "__doc__": """A filter that merges multiple other filters using the given operator. - - Attributes: - op: - The operator for combining multiple filters. - filters: - The list of filters to combine. Must contain at least one - filter. - """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.CompositeFilter) - }, -) -_sym_db.RegisterMessage(CompositeFilter) - -PropertyFilter = _reflection.GeneratedProtocolMessageType( - "PropertyFilter", - (_message.Message,), - { - "DESCRIPTOR": _PROPERTYFILTER, - "__module__": "google.cloud.datastore_v1.proto.query_pb2", - "__doc__": """A filter on a specific property. - - Attributes: - property: - The property to filter by. - op: - The operator to filter by. - value: - The value to compare the property to. - """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyFilter) - }, -) -_sym_db.RegisterMessage(PropertyFilter) - -GqlQuery = _reflection.GeneratedProtocolMessageType( - "GqlQuery", - (_message.Message,), - { - "NamedBindingsEntry": _reflection.GeneratedProtocolMessageType( - "NamedBindingsEntry", - (_message.Message,), - { - "DESCRIPTOR": _GQLQUERY_NAMEDBINDINGSENTRY, - "__module__": "google.cloud.datastore_v1.proto.query_pb2" - # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery.NamedBindingsEntry) - }, - ), - "DESCRIPTOR": _GQLQUERY, - "__module__": "google.cloud.datastore_v1.proto.query_pb2", - "__doc__": """A `GQL query - `__. - - Attributes: - query_string: - A string of the format described `here `__. - allow_literals: - When false, the query string must not contain any literals and - instead must bind all values. For example, ``SELECT * FROM - Kind WHERE a = 'string literal'`` is not allowed, while - ``SELECT * FROM Kind WHERE a = @value`` is. - named_bindings: - For each non-reserved named binding site in the query string, - there must be a named parameter with that name, but not - necessarily the inverse. Key must match regex ``[A-Za- - z_$][A-Za-z_$0-9]*``, must not match regex ``__.*__``, and - must not be ``""``. - positional_bindings: - Numbered binding site @1 references the first numbered - parameter, effectively using 1-based indexing, rather than the - usual 0. For each binding site numbered i in - ``query_string``, there must be an i-th numbered parameter. - The inverse must also be true. - """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery) - }, -) -_sym_db.RegisterMessage(GqlQuery) -_sym_db.RegisterMessage(GqlQuery.NamedBindingsEntry) - -GqlQueryParameter = _reflection.GeneratedProtocolMessageType( - "GqlQueryParameter", - (_message.Message,), - { - "DESCRIPTOR": _GQLQUERYPARAMETER, - "__module__": "google.cloud.datastore_v1.proto.query_pb2", - "__doc__": """A binding parameter for a GQL query. - - Attributes: - parameter_type: - The type of parameter. - value: - A value parameter. - cursor: - A query cursor. Query cursors are returned in query result - batches. - """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQueryParameter) - }, -) -_sym_db.RegisterMessage(GqlQueryParameter) - -QueryResultBatch = _reflection.GeneratedProtocolMessageType( - "QueryResultBatch", - (_message.Message,), - { - "DESCRIPTOR": _QUERYRESULTBATCH, - "__module__": "google.cloud.datastore_v1.proto.query_pb2", - "__doc__": """A batch of results produced by a query. - - Attributes: - skipped_results: - The number of results skipped, typically because of an offset. - skipped_cursor: - A cursor that points to the position after the last skipped - result. Will be set when ``skipped_results`` != 0. - entity_result_type: - The result type for every entity in ``entity_results``. - entity_results: - The results for this batch. - end_cursor: - A cursor that points to the position after the last result in - the batch. - more_results: - The state of the query after the current batch. - snapshot_version: - The version number of the snapshot this batch was returned - from. This applies to the range of results from the query’s - ``start_cursor`` (or the beginning of the query if no cursor - was given) to this batch’s ``end_cursor`` (not the query’s - ``end_cursor``). In a single transaction, subsequent query - result batches for the same query can have a greater snapshot - version number. Each batch’s snapshot version is valid for all - preceding batches. The value will be zero for eventually - consistent queries. - """, - # @@protoc_insertion_point(class_scope:google.datastore.v1.QueryResultBatch) - }, -) -_sym_db.RegisterMessage(QueryResultBatch) - - -DESCRIPTOR._options = None -_GQLQUERY_NAMEDBINDINGSENTRY._options = None -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2_grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2_grpc.py deleted file mode 100644 index 8a9393943bdf..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/py.typed b/packages/google-cloud-datastore/google/cloud/datastore_v1/py.typed new file mode 100644 index 000000000000..e82a93198619 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-datastore package uses inline types. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py new file mode 100644 index 000000000000..42ffdf2bc43d --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py new file mode 100644 index 000000000000..a8a828864640 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .client import DatastoreClient +from .async_client import DatastoreAsyncClient + +__all__ = ( + "DatastoreClient", + "DatastoreAsyncClient", +) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py new file mode 100644 index 000000000000..cc1760e0cca5 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -0,0 +1,671 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.datastore_v1.types import datastore +from google.cloud.datastore_v1.types import entity +from google.cloud.datastore_v1.types import query + +from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DatastoreGrpcAsyncIOTransport +from .client import DatastoreClient + + +class DatastoreAsyncClient: + """Each RPC normalizes the partition IDs of the keys in its + input entities, and always returns entities with keys with + normalized partition IDs. This applies to all keys and entities, + including those in values, except keys with both an empty path + and an empty or unset partition ID. Normalization of input keys + sets the project ID (if not already set) to the project ID from + the request. + """ + + _client: DatastoreClient + + DEFAULT_ENDPOINT = DatastoreClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DatastoreClient.DEFAULT_MTLS_ENDPOINT + + from_service_account_file = DatastoreClient.from_service_account_file + from_service_account_json = from_service_account_file + + get_transport_class = functools.partial( + type(DatastoreClient).get_transport_class, type(DatastoreClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, DatastoreTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the datastore client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DatastoreTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = DatastoreClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def lookup( + self, + request: datastore.LookupRequest = None, + *, + project_id: str = None, + read_options: datastore.ReadOptions = None, + keys: Sequence[entity.Key] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.LookupResponse: + r"""Looks up entities by key. + + Args: + request (:class:`~.datastore.LookupRequest`): + The request object. The request for + [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. + project_id (:class:`str`): + Required. The ID of the project + against which to make the request. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + read_options (:class:`~.datastore.ReadOptions`): + The options for this lookup request. + This corresponds to the ``read_options`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + keys (:class:`Sequence[~.entity.Key]`): + Required. Keys of entities to look + up. + This corresponds to the ``keys`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.LookupResponse: + The response for + [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([project_id, read_options, keys]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datastore.LookupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project_id is not None: + request.project_id = project_id + if read_options is not None: + request.read_options = read_options + if keys is not None: + request.keys = keys + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.lookup, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def run_query( + self, + request: datastore.RunQueryRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.RunQueryResponse: + r"""Queries for entities. + + Args: + request (:class:`~.datastore.RunQueryRequest`): + The request object. The request for + [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.RunQueryResponse: + The response for + [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. + + """ + # Create or coerce a protobuf request object. + + request = datastore.RunQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_query, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def begin_transaction( + self, + request: datastore.BeginTransactionRequest = None, + *, + project_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.BeginTransactionResponse: + r"""Begins a new transaction. + + Args: + request (:class:`~.datastore.BeginTransactionRequest`): + The request object. The request for + [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + project_id (:class:`str`): + Required. The ID of the project + against which to make the request. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.BeginTransactionResponse: + The response for + [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([project_id]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datastore.BeginTransactionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project_id is not None: + request.project_id = project_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.begin_transaction, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def commit( + self, + request: datastore.CommitRequest = None, + *, + project_id: str = None, + mode: datastore.CommitRequest.Mode = None, + transaction: bytes = None, + mutations: Sequence[datastore.Mutation] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.CommitResponse: + r"""Commits a transaction, optionally creating, deleting + or modifying some entities. + + Args: + request (:class:`~.datastore.CommitRequest`): + The request object. The request for + [Datastore.Commit][google.datastore.v1.Datastore.Commit]. + project_id (:class:`str`): + Required. The ID of the project + against which to make the request. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mode (:class:`~.datastore.CommitRequest.Mode`): + The type of commit to perform. Defaults to + ``TRANSACTIONAL``. + This corresponds to the ``mode`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transaction (:class:`bytes`): + The identifier of the transaction associated with the + commit. A transaction identifier is returned by a call + to + [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + This corresponds to the ``transaction`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mutations (:class:`Sequence[~.datastore.Mutation]`): + The mutations to perform. + + When mode is ``TRANSACTIONAL``, mutations affecting a + single entity are applied in order. The following + sequences of mutations affecting a single entity are not + permitted in a single ``Commit`` request: + + - ``insert`` followed by ``insert`` + - ``update`` followed by ``insert`` + - ``upsert`` followed by ``insert`` + - ``delete`` followed by ``update`` + + When mode is ``NON_TRANSACTIONAL``, no two mutations may + affect a single entity. + This corresponds to the ``mutations`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.CommitResponse: + The response for + [Datastore.Commit][google.datastore.v1.Datastore.Commit]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([project_id, mode, transaction, mutations]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datastore.CommitRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project_id is not None: + request.project_id = project_id + if mode is not None: + request.mode = mode + if transaction is not None: + request.transaction = transaction + if mutations is not None: + request.mutations = mutations + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.commit, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def rollback( + self, + request: datastore.RollbackRequest = None, + *, + project_id: str = None, + transaction: bytes = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.RollbackResponse: + r"""Rolls back a transaction. + + Args: + request (:class:`~.datastore.RollbackRequest`): + The request object. The request for + [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. + project_id (:class:`str`): + Required. The ID of the project + against which to make the request. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transaction (:class:`bytes`): + Required. The transaction identifier, returned by a call + to + [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + This corresponds to the ``transaction`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.RollbackResponse: + The response for + [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. + (an empty message). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([project_id, transaction]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datastore.RollbackRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project_id is not None: + request.project_id = project_id + if transaction is not None: + request.transaction = transaction + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rollback, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def allocate_ids( + self, + request: datastore.AllocateIdsRequest = None, + *, + project_id: str = None, + keys: Sequence[entity.Key] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.AllocateIdsResponse: + r"""Allocates IDs for the given keys, which is useful for + referencing an entity before it is inserted. + + Args: + request (:class:`~.datastore.AllocateIdsRequest`): + The request object. The request for + [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. + project_id (:class:`str`): + Required. The ID of the project + against which to make the request. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + keys (:class:`Sequence[~.entity.Key]`): + Required. A list of keys with + incomplete key paths for which to + allocate IDs. No key may be + reserved/read-only. + This corresponds to the ``keys`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.AllocateIdsResponse: + The response for + [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([project_id, keys]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datastore.AllocateIdsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project_id is not None: + request.project_id = project_id + if keys is not None: + request.keys = keys + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.allocate_ids, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def reserve_ids( + self, + request: datastore.ReserveIdsRequest = None, + *, + project_id: str = None, + keys: Sequence[entity.Key] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.ReserveIdsResponse: + r"""Prevents the supplied keys' IDs from being auto- + llocated by Cloud Datastore. + + Args: + request (:class:`~.datastore.ReserveIdsRequest`): + The request object. The request for + [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. + project_id (:class:`str`): + Required. The ID of the project + against which to make the request. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + keys (:class:`Sequence[~.entity.Key]`): + Required. A list of keys with + complete key paths whose numeric IDs + should not be auto-allocated. + This corresponds to the ``keys`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.ReserveIdsResponse: + The response for + [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([project_id, keys]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datastore.ReserveIdsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project_id is not None: + request.project_id = project_id + if keys is not None: + request.keys = keys + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reserve_ids, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-datastore",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("DatastoreAsyncClient",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py new file mode 100644 index 000000000000..5271a96a7874 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -0,0 +1,806 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.datastore_v1.types import datastore +from google.cloud.datastore_v1.types import entity +from google.cloud.datastore_v1.types import query + +from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DatastoreGrpcTransport +from .transports.grpc_asyncio import DatastoreGrpcAsyncIOTransport + + +class DatastoreClientMeta(type): + """Metaclass for the Datastore client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[DatastoreTransport]] + _transport_registry["grpc"] = DatastoreGrpcTransport + _transport_registry["grpc_asyncio"] = DatastoreGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[DatastoreTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DatastoreClient(metaclass=DatastoreClientMeta): + """Each RPC normalizes the partition IDs of the keys in its + input entities, and always returns entities with keys with + normalized partition IDs. This applies to all keys and entities, + including those in values, except keys with both an empty path + and an empty or unset partition ID. Normalization of input keys + sets the project ID (if not already set) to the project ID from + the request. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "datastore.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, DatastoreTransport] = None, + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the datastore client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DatastoreTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = ClientOptions.from_dict(client_options) + if client_options is None: + client_options = ClientOptions.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DatastoreTransport): + # transport is a DatastoreTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + ssl_channel_credentials=ssl_credentials, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def lookup( + self, + request: datastore.LookupRequest = None, + *, + project_id: str = None, + read_options: datastore.ReadOptions = None, + keys: Sequence[entity.Key] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.LookupResponse: + r"""Looks up entities by key. + + Args: + request (:class:`~.datastore.LookupRequest`): + The request object. The request for + [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. + project_id (:class:`str`): + Required. The ID of the project + against which to make the request. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + read_options (:class:`~.datastore.ReadOptions`): + The options for this lookup request. + This corresponds to the ``read_options`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + keys (:class:`Sequence[~.entity.Key]`): + Required. Keys of entities to look + up. + This corresponds to the ``keys`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.LookupResponse: + The response for + [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, read_options, keys]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datastore.LookupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datastore.LookupRequest): + request = datastore.LookupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project_id is not None: + request.project_id = project_id + if read_options is not None: + request.read_options = read_options + if keys is not None: + request.keys = keys + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.lookup] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def run_query( + self, + request: datastore.RunQueryRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.RunQueryResponse: + r"""Queries for entities. + + Args: + request (:class:`~.datastore.RunQueryRequest`): + The request object. The request for + [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.RunQueryResponse: + The response for + [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a datastore.RunQueryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datastore.RunQueryRequest): + request = datastore.RunQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_query] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def begin_transaction( + self, + request: datastore.BeginTransactionRequest = None, + *, + project_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.BeginTransactionResponse: + r"""Begins a new transaction. + + Args: + request (:class:`~.datastore.BeginTransactionRequest`): + The request object. The request for + [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + project_id (:class:`str`): + Required. The ID of the project + against which to make the request. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.BeginTransactionResponse: + The response for + [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datastore.BeginTransactionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datastore.BeginTransactionRequest): + request = datastore.BeginTransactionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project_id is not None: + request.project_id = project_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.begin_transaction] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def commit( + self, + request: datastore.CommitRequest = None, + *, + project_id: str = None, + mode: datastore.CommitRequest.Mode = None, + transaction: bytes = None, + mutations: Sequence[datastore.Mutation] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.CommitResponse: + r"""Commits a transaction, optionally creating, deleting + or modifying some entities. + + Args: + request (:class:`~.datastore.CommitRequest`): + The request object. The request for + [Datastore.Commit][google.datastore.v1.Datastore.Commit]. + project_id (:class:`str`): + Required. The ID of the project + against which to make the request. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mode (:class:`~.datastore.CommitRequest.Mode`): + The type of commit to perform. Defaults to + ``TRANSACTIONAL``. + This corresponds to the ``mode`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transaction (:class:`bytes`): + The identifier of the transaction associated with the + commit. A transaction identifier is returned by a call + to + [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + This corresponds to the ``transaction`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mutations (:class:`Sequence[~.datastore.Mutation]`): + The mutations to perform. + + When mode is ``TRANSACTIONAL``, mutations affecting a + single entity are applied in order. The following + sequences of mutations affecting a single entity are not + permitted in a single ``Commit`` request: + + - ``insert`` followed by ``insert`` + - ``update`` followed by ``insert`` + - ``upsert`` followed by ``insert`` + - ``delete`` followed by ``update`` + + When mode is ``NON_TRANSACTIONAL``, no two mutations may + affect a single entity. + This corresponds to the ``mutations`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.CommitResponse: + The response for + [Datastore.Commit][google.datastore.v1.Datastore.Commit]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, mode, transaction, mutations]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datastore.CommitRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datastore.CommitRequest): + request = datastore.CommitRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project_id is not None: + request.project_id = project_id + if mode is not None: + request.mode = mode + if transaction is not None: + request.transaction = transaction + if mutations is not None: + request.mutations = mutations + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.commit] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def rollback( + self, + request: datastore.RollbackRequest = None, + *, + project_id: str = None, + transaction: bytes = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.RollbackResponse: + r"""Rolls back a transaction. + + Args: + request (:class:`~.datastore.RollbackRequest`): + The request object. The request for + [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. + project_id (:class:`str`): + Required. The ID of the project + against which to make the request. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transaction (:class:`bytes`): + Required. The transaction identifier, returned by a call + to + [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + This corresponds to the ``transaction`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.RollbackResponse: + The response for + [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. + (an empty message). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, transaction]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datastore.RollbackRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datastore.RollbackRequest): + request = datastore.RollbackRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project_id is not None: + request.project_id = project_id + if transaction is not None: + request.transaction = transaction + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rollback] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def allocate_ids( + self, + request: datastore.AllocateIdsRequest = None, + *, + project_id: str = None, + keys: Sequence[entity.Key] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.AllocateIdsResponse: + r"""Allocates IDs for the given keys, which is useful for + referencing an entity before it is inserted. + + Args: + request (:class:`~.datastore.AllocateIdsRequest`): + The request object. The request for + [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. + project_id (:class:`str`): + Required. The ID of the project + against which to make the request. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + keys (:class:`Sequence[~.entity.Key]`): + Required. A list of keys with + incomplete key paths for which to + allocate IDs. No key may be + reserved/read-only. + This corresponds to the ``keys`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.AllocateIdsResponse: + The response for + [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, keys]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datastore.AllocateIdsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datastore.AllocateIdsRequest): + request = datastore.AllocateIdsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project_id is not None: + request.project_id = project_id + if keys is not None: + request.keys = keys + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.allocate_ids] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def reserve_ids( + self, + request: datastore.ReserveIdsRequest = None, + *, + project_id: str = None, + keys: Sequence[entity.Key] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.ReserveIdsResponse: + r"""Prevents the supplied keys' IDs from being auto- + llocated by Cloud Datastore. + + Args: + request (:class:`~.datastore.ReserveIdsRequest`): + The request object. The request for + [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. + project_id (:class:`str`): + Required. The ID of the project + against which to make the request. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + keys (:class:`Sequence[~.entity.Key]`): + Required. A list of keys with + complete key paths whose numeric IDs + should not be auto-allocated. + This corresponds to the ``keys`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.ReserveIdsResponse: + The response for + [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, keys]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datastore.ReserveIdsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datastore.ReserveIdsRequest): + request = datastore.ReserveIdsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project_id is not None: + request.project_id = project_id + if keys is not None: + request.keys = keys + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reserve_ids] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-datastore",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("DatastoreClient",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py new file mode 100644 index 000000000000..2d0659d9b786 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import DatastoreTransport +from .grpc import DatastoreGrpcTransport +from .grpc_asyncio import DatastoreGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DatastoreTransport]] +_transport_registry["grpc"] = DatastoreGrpcTransport +_transport_registry["grpc_asyncio"] = DatastoreGrpcAsyncIOTransport + + +__all__ = ( + "DatastoreTransport", + "DatastoreGrpcTransport", + "DatastoreGrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py new file mode 100644 index 000000000000..ad00b33f5f8f --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -0,0 +1,243 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.datastore_v1.types import datastore + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-datastore",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class DatastoreTransport(abc.ABC): + """Abstract transport class for Datastore.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ) + + def __init__( + self, + *, + host: str = "datastore.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.lookup: gapic_v1.method.wrap_method( + self.lookup, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.run_query: gapic_v1.method.wrap_method( + self.run_query, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.begin_transaction: gapic_v1.method.wrap_method( + self.begin_transaction, default_timeout=60.0, client_info=client_info, + ), + self.commit: gapic_v1.method.wrap_method( + self.commit, default_timeout=60.0, client_info=client_info, + ), + self.rollback: gapic_v1.method.wrap_method( + self.rollback, default_timeout=60.0, client_info=client_info, + ), + self.allocate_ids: gapic_v1.method.wrap_method( + self.allocate_ids, default_timeout=60.0, client_info=client_info, + ), + self.reserve_ids: gapic_v1.method.wrap_method( + self.reserve_ids, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + @property + def lookup( + self, + ) -> typing.Callable[ + [datastore.LookupRequest], + typing.Union[ + datastore.LookupResponse, typing.Awaitable[datastore.LookupResponse] + ], + ]: + raise NotImplementedError() + + @property + def run_query( + self, + ) -> typing.Callable[ + [datastore.RunQueryRequest], + typing.Union[ + datastore.RunQueryResponse, typing.Awaitable[datastore.RunQueryResponse] + ], + ]: + raise NotImplementedError() + + @property + def begin_transaction( + self, + ) -> typing.Callable[ + [datastore.BeginTransactionRequest], + typing.Union[ + datastore.BeginTransactionResponse, + typing.Awaitable[datastore.BeginTransactionResponse], + ], + ]: + raise NotImplementedError() + + @property + def commit( + self, + ) -> typing.Callable[ + [datastore.CommitRequest], + typing.Union[ + datastore.CommitResponse, typing.Awaitable[datastore.CommitResponse] + ], + ]: + raise NotImplementedError() + + @property + def rollback( + self, + ) -> typing.Callable[ + [datastore.RollbackRequest], + typing.Union[ + datastore.RollbackResponse, typing.Awaitable[datastore.RollbackResponse] + ], + ]: + raise NotImplementedError() + + @property + def allocate_ids( + self, + ) -> typing.Callable[ + [datastore.AllocateIdsRequest], + typing.Union[ + datastore.AllocateIdsResponse, + typing.Awaitable[datastore.AllocateIdsResponse], + ], + ]: + raise NotImplementedError() + + @property + def reserve_ids( + self, + ) -> typing.Callable[ + [datastore.ReserveIdsRequest], + typing.Union[ + datastore.ReserveIdsResponse, typing.Awaitable[datastore.ReserveIdsResponse] + ], + ]: + raise NotImplementedError() + + +__all__ = ("DatastoreTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py new file mode 100644 index 000000000000..f8f18768f380 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -0,0 +1,422 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.datastore_v1.types import datastore + +from .base import DatastoreTransport, DEFAULT_CLIENT_INFO + + +class DatastoreGrpcTransport(DatastoreTransport): + """gRPC backend transport for Datastore. + + Each RPC normalizes the partition IDs of the keys in its + input entities, and always returns entities with keys with + normalized partition IDs. This applies to all keys and entities, + including those in values, except keys with both an empty path + and an empty or unset partition ID. Normalization of input keys + sets the project ID (if not already set) to the project ID from + the request. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "datastore.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + @classmethod + def create_channel( + cls, + host: str = "datastore.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def lookup(self) -> Callable[[datastore.LookupRequest], datastore.LookupResponse]: + r"""Return a callable for the lookup method over gRPC. + + Looks up entities by key. + + Returns: + Callable[[~.LookupRequest], + ~.LookupResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "lookup" not in self._stubs: + self._stubs["lookup"] = self.grpc_channel.unary_unary( + "/google.datastore.v1.Datastore/Lookup", + request_serializer=datastore.LookupRequest.serialize, + response_deserializer=datastore.LookupResponse.deserialize, + ) + return self._stubs["lookup"] + + @property + def run_query( + self, + ) -> Callable[[datastore.RunQueryRequest], datastore.RunQueryResponse]: + r"""Return a callable for the run query method over gRPC. + + Queries for entities. + + Returns: + Callable[[~.RunQueryRequest], + ~.RunQueryResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_query" not in self._stubs: + self._stubs["run_query"] = self.grpc_channel.unary_unary( + "/google.datastore.v1.Datastore/RunQuery", + request_serializer=datastore.RunQueryRequest.serialize, + response_deserializer=datastore.RunQueryResponse.deserialize, + ) + return self._stubs["run_query"] + + @property + def begin_transaction( + self, + ) -> Callable[ + [datastore.BeginTransactionRequest], datastore.BeginTransactionResponse + ]: + r"""Return a callable for the begin transaction method over gRPC. + + Begins a new transaction. + + Returns: + Callable[[~.BeginTransactionRequest], + ~.BeginTransactionResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "begin_transaction" not in self._stubs: + self._stubs["begin_transaction"] = self.grpc_channel.unary_unary( + "/google.datastore.v1.Datastore/BeginTransaction", + request_serializer=datastore.BeginTransactionRequest.serialize, + response_deserializer=datastore.BeginTransactionResponse.deserialize, + ) + return self._stubs["begin_transaction"] + + @property + def commit(self) -> Callable[[datastore.CommitRequest], datastore.CommitResponse]: + r"""Return a callable for the commit method over gRPC. + + Commits a transaction, optionally creating, deleting + or modifying some entities. + + Returns: + Callable[[~.CommitRequest], + ~.CommitResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "commit" not in self._stubs: + self._stubs["commit"] = self.grpc_channel.unary_unary( + "/google.datastore.v1.Datastore/Commit", + request_serializer=datastore.CommitRequest.serialize, + response_deserializer=datastore.CommitResponse.deserialize, + ) + return self._stubs["commit"] + + @property + def rollback( + self, + ) -> Callable[[datastore.RollbackRequest], datastore.RollbackResponse]: + r"""Return a callable for the rollback method over gRPC. + + Rolls back a transaction. + + Returns: + Callable[[~.RollbackRequest], + ~.RollbackResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rollback" not in self._stubs: + self._stubs["rollback"] = self.grpc_channel.unary_unary( + "/google.datastore.v1.Datastore/Rollback", + request_serializer=datastore.RollbackRequest.serialize, + response_deserializer=datastore.RollbackResponse.deserialize, + ) + return self._stubs["rollback"] + + @property + def allocate_ids( + self, + ) -> Callable[[datastore.AllocateIdsRequest], datastore.AllocateIdsResponse]: + r"""Return a callable for the allocate ids method over gRPC. + + Allocates IDs for the given keys, which is useful for + referencing an entity before it is inserted. + + Returns: + Callable[[~.AllocateIdsRequest], + ~.AllocateIdsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "allocate_ids" not in self._stubs: + self._stubs["allocate_ids"] = self.grpc_channel.unary_unary( + "/google.datastore.v1.Datastore/AllocateIds", + request_serializer=datastore.AllocateIdsRequest.serialize, + response_deserializer=datastore.AllocateIdsResponse.deserialize, + ) + return self._stubs["allocate_ids"] + + @property + def reserve_ids( + self, + ) -> Callable[[datastore.ReserveIdsRequest], datastore.ReserveIdsResponse]: + r"""Return a callable for the reserve ids method over gRPC. + + Prevents the supplied keys' IDs from being auto- + llocated by Cloud Datastore. + + Returns: + Callable[[~.ReserveIdsRequest], + ~.ReserveIdsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reserve_ids" not in self._stubs: + self._stubs["reserve_ids"] = self.grpc_channel.unary_unary( + "/google.datastore.v1.Datastore/ReserveIds", + request_serializer=datastore.ReserveIdsRequest.serialize, + response_deserializer=datastore.ReserveIdsResponse.deserialize, + ) + return self._stubs["reserve_ids"] + + +__all__ = ("DatastoreGrpcTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a9c5611f8c28 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -0,0 +1,431 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.datastore_v1.types import datastore + +from .base import DatastoreTransport, DEFAULT_CLIENT_INFO +from .grpc import DatastoreGrpcTransport + + +class DatastoreGrpcAsyncIOTransport(DatastoreTransport): + """gRPC AsyncIO backend transport for Datastore. + + Each RPC normalizes the partition IDs of the keys in its + input entities, and always returns entities with keys with + normalized partition IDs. This applies to all keys and entities, + including those in values, except keys with both an empty path + and an empty or unset partition ID. Normalization of input keys + sets the project ID (if not already set) to the project ID from + the request. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "datastore.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "datastore.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def lookup( + self, + ) -> Callable[[datastore.LookupRequest], Awaitable[datastore.LookupResponse]]: + r"""Return a callable for the lookup method over gRPC. + + Looks up entities by key. + + Returns: + Callable[[~.LookupRequest], + Awaitable[~.LookupResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "lookup" not in self._stubs: + self._stubs["lookup"] = self.grpc_channel.unary_unary( + "/google.datastore.v1.Datastore/Lookup", + request_serializer=datastore.LookupRequest.serialize, + response_deserializer=datastore.LookupResponse.deserialize, + ) + return self._stubs["lookup"] + + @property + def run_query( + self, + ) -> Callable[[datastore.RunQueryRequest], Awaitable[datastore.RunQueryResponse]]: + r"""Return a callable for the run query method over gRPC. + + Queries for entities. + + Returns: + Callable[[~.RunQueryRequest], + Awaitable[~.RunQueryResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_query" not in self._stubs: + self._stubs["run_query"] = self.grpc_channel.unary_unary( + "/google.datastore.v1.Datastore/RunQuery", + request_serializer=datastore.RunQueryRequest.serialize, + response_deserializer=datastore.RunQueryResponse.deserialize, + ) + return self._stubs["run_query"] + + @property + def begin_transaction( + self, + ) -> Callable[ + [datastore.BeginTransactionRequest], + Awaitable[datastore.BeginTransactionResponse], + ]: + r"""Return a callable for the begin transaction method over gRPC. + + Begins a new transaction. + + Returns: + Callable[[~.BeginTransactionRequest], + Awaitable[~.BeginTransactionResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "begin_transaction" not in self._stubs: + self._stubs["begin_transaction"] = self.grpc_channel.unary_unary( + "/google.datastore.v1.Datastore/BeginTransaction", + request_serializer=datastore.BeginTransactionRequest.serialize, + response_deserializer=datastore.BeginTransactionResponse.deserialize, + ) + return self._stubs["begin_transaction"] + + @property + def commit( + self, + ) -> Callable[[datastore.CommitRequest], Awaitable[datastore.CommitResponse]]: + r"""Return a callable for the commit method over gRPC. + + Commits a transaction, optionally creating, deleting + or modifying some entities. + + Returns: + Callable[[~.CommitRequest], + Awaitable[~.CommitResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "commit" not in self._stubs: + self._stubs["commit"] = self.grpc_channel.unary_unary( + "/google.datastore.v1.Datastore/Commit", + request_serializer=datastore.CommitRequest.serialize, + response_deserializer=datastore.CommitResponse.deserialize, + ) + return self._stubs["commit"] + + @property + def rollback( + self, + ) -> Callable[[datastore.RollbackRequest], Awaitable[datastore.RollbackResponse]]: + r"""Return a callable for the rollback method over gRPC. + + Rolls back a transaction. + + Returns: + Callable[[~.RollbackRequest], + Awaitable[~.RollbackResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rollback" not in self._stubs: + self._stubs["rollback"] = self.grpc_channel.unary_unary( + "/google.datastore.v1.Datastore/Rollback", + request_serializer=datastore.RollbackRequest.serialize, + response_deserializer=datastore.RollbackResponse.deserialize, + ) + return self._stubs["rollback"] + + @property + def allocate_ids( + self, + ) -> Callable[ + [datastore.AllocateIdsRequest], Awaitable[datastore.AllocateIdsResponse] + ]: + r"""Return a callable for the allocate ids method over gRPC. + + Allocates IDs for the given keys, which is useful for + referencing an entity before it is inserted. + + Returns: + Callable[[~.AllocateIdsRequest], + Awaitable[~.AllocateIdsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "allocate_ids" not in self._stubs: + self._stubs["allocate_ids"] = self.grpc_channel.unary_unary( + "/google.datastore.v1.Datastore/AllocateIds", + request_serializer=datastore.AllocateIdsRequest.serialize, + response_deserializer=datastore.AllocateIdsResponse.deserialize, + ) + return self._stubs["allocate_ids"] + + @property + def reserve_ids( + self, + ) -> Callable[ + [datastore.ReserveIdsRequest], Awaitable[datastore.ReserveIdsResponse] + ]: + r"""Return a callable for the reserve ids method over gRPC. + + Prevents the supplied keys' IDs from being auto- + llocated by Cloud Datastore. + + Returns: + Callable[[~.ReserveIdsRequest], + Awaitable[~.ReserveIdsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reserve_ids" not in self._stubs: + self._stubs["reserve_ids"] = self.grpc_channel.unary_unary( + "/google.datastore.v1.Datastore/ReserveIds", + request_serializer=datastore.ReserveIdsRequest.serialize, + response_deserializer=datastore.ReserveIdsResponse.deserialize, + ) + return self._stubs["reserve_ids"] + + +__all__ = ("DatastoreGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types.py deleted file mode 100644 index ac154bda0e67..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -import sys - -from google.api import http_pb2 -from google.protobuf import descriptor_pb2 -from google.protobuf import struct_pb2 -from google.protobuf import timestamp_pb2 -from google.protobuf import wrappers_pb2 -from google.type import latlng_pb2 - -from google.api_core.protobuf_helpers import get_messages -from google.cloud.datastore_v1.proto import datastore_pb2 -from google.cloud.datastore_v1.proto import entity_pb2 -from google.cloud.datastore_v1.proto import query_pb2 - - -_shared_modules = [ - http_pb2, - descriptor_pb2, - struct_pb2, - timestamp_pb2, - wrappers_pb2, - latlng_pb2, -] - -_local_modules = [datastore_pb2, entity_pb2, query_pb2] - -names = [] - -for module in _shared_modules: - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) - -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.datastore_v1.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - -__all__ = tuple(sorted(names)) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py new file mode 100644 index 000000000000..2148caa00287 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .entity import ( + PartitionId, + Key, + ArrayValue, + Value, + Entity, +) +from .query import ( + EntityResult, + Query, + KindExpression, + PropertyReference, + Projection, + PropertyOrder, + Filter, + CompositeFilter, + PropertyFilter, + GqlQuery, + GqlQueryParameter, + QueryResultBatch, +) +from .datastore import ( + LookupRequest, + LookupResponse, + RunQueryRequest, + RunQueryResponse, + BeginTransactionRequest, + BeginTransactionResponse, + RollbackRequest, + RollbackResponse, + CommitRequest, + CommitResponse, + AllocateIdsRequest, + AllocateIdsResponse, + ReserveIdsRequest, + ReserveIdsResponse, + Mutation, + MutationResult, + ReadOptions, + TransactionOptions, +) + + +__all__ = ( + "PartitionId", + "Key", + "ArrayValue", + "Value", + "Entity", + "EntityResult", + "Query", + "KindExpression", + "PropertyReference", + "Projection", + "PropertyOrder", + "Filter", + "CompositeFilter", + "PropertyFilter", + "GqlQuery", + "GqlQueryParameter", + "QueryResultBatch", + "LookupRequest", + "LookupResponse", + "RunQueryRequest", + "RunQueryResponse", + "BeginTransactionRequest", + "BeginTransactionResponse", + "RollbackRequest", + "RollbackResponse", + "CommitRequest", + "CommitResponse", + "AllocateIdsRequest", + "AllocateIdsResponse", + "ReserveIdsRequest", + "ReserveIdsResponse", + "Mutation", + "MutationResult", + "ReadOptions", + "TransactionOptions", +) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py new file mode 100644 index 000000000000..e1124457e707 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -0,0 +1,480 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.datastore_v1.types import entity +from google.cloud.datastore_v1.types import query as gd_query + + +__protobuf__ = proto.module( + package="google.datastore.v1", + manifest={ + "LookupRequest", + "LookupResponse", + "RunQueryRequest", + "RunQueryResponse", + "BeginTransactionRequest", + "BeginTransactionResponse", + "RollbackRequest", + "RollbackResponse", + "CommitRequest", + "CommitResponse", + "AllocateIdsRequest", + "AllocateIdsResponse", + "ReserveIdsRequest", + "ReserveIdsResponse", + "Mutation", + "MutationResult", + "ReadOptions", + "TransactionOptions", + }, +) + + +class LookupRequest(proto.Message): + r"""The request for + [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. + + Attributes: + project_id (str): + Required. The ID of the project against which + to make the request. + read_options (~.datastore.ReadOptions): + The options for this lookup request. + keys (Sequence[~.entity.Key]): + Required. Keys of entities to look up. + """ + + project_id = proto.Field(proto.STRING, number=8) + + read_options = proto.Field(proto.MESSAGE, number=1, message="ReadOptions",) + + keys = proto.RepeatedField(proto.MESSAGE, number=3, message=entity.Key,) + + +class LookupResponse(proto.Message): + r"""The response for + [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. + + Attributes: + found (Sequence[~.gd_query.EntityResult]): + Entities found as ``ResultType.FULL`` entities. The order of + results in this field is undefined and has no relation to + the order of the keys in the input. + missing (Sequence[~.gd_query.EntityResult]): + Entities not found as ``ResultType.KEY_ONLY`` entities. The + order of results in this field is undefined and has no + relation to the order of the keys in the input. + deferred (Sequence[~.entity.Key]): + A list of keys that were not looked up due to + resource constraints. The order of results in + this field is undefined and has no relation to + the order of the keys in the input. + """ + + found = proto.RepeatedField(proto.MESSAGE, number=1, message=gd_query.EntityResult,) + + missing = proto.RepeatedField( + proto.MESSAGE, number=2, message=gd_query.EntityResult, + ) + + deferred = proto.RepeatedField(proto.MESSAGE, number=3, message=entity.Key,) + + +class RunQueryRequest(proto.Message): + r"""The request for + [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. + + Attributes: + project_id (str): + Required. The ID of the project against which + to make the request. + partition_id (~.entity.PartitionId): + Entities are partitioned into subsets, + identified by a partition ID. Queries are scoped + to a single partition. This partition ID is + normalized with the standard default context + partition ID. + read_options (~.datastore.ReadOptions): + The options for this query. + query (~.gd_query.Query): + The query to run. + gql_query (~.gd_query.GqlQuery): + The GQL query to run. + """ + + project_id = proto.Field(proto.STRING, number=8) + + partition_id = proto.Field(proto.MESSAGE, number=2, message=entity.PartitionId,) + + read_options = proto.Field(proto.MESSAGE, number=1, message="ReadOptions",) + + query = proto.Field( + proto.MESSAGE, number=3, oneof="query_type", message=gd_query.Query, + ) + + gql_query = proto.Field( + proto.MESSAGE, number=7, oneof="query_type", message=gd_query.GqlQuery, + ) + + +class RunQueryResponse(proto.Message): + r"""The response for + [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. + + Attributes: + batch (~.gd_query.QueryResultBatch): + A batch of query results (always present). + query (~.gd_query.Query): + The parsed form of the ``GqlQuery`` from the request, if it + was set. + """ + + batch = proto.Field(proto.MESSAGE, number=1, message=gd_query.QueryResultBatch,) + + query = proto.Field(proto.MESSAGE, number=2, message=gd_query.Query,) + + +class BeginTransactionRequest(proto.Message): + r"""The request for + [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + + Attributes: + project_id (str): + Required. The ID of the project against which + to make the request. + transaction_options (~.datastore.TransactionOptions): + Options for a new transaction. + """ + + project_id = proto.Field(proto.STRING, number=8) + + transaction_options = proto.Field( + proto.MESSAGE, number=10, message="TransactionOptions", + ) + + +class BeginTransactionResponse(proto.Message): + r"""The response for + [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + + Attributes: + transaction (bytes): + The transaction identifier (always present). + """ + + transaction = proto.Field(proto.BYTES, number=1) + + +class RollbackRequest(proto.Message): + r"""The request for + [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. + + Attributes: + project_id (str): + Required. The ID of the project against which + to make the request. + transaction (bytes): + Required. The transaction identifier, returned by a call to + [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + """ + + project_id = proto.Field(proto.STRING, number=8) + + transaction = proto.Field(proto.BYTES, number=1) + + +class RollbackResponse(proto.Message): + r"""The response for + [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. (an + empty message). + """ + + +class CommitRequest(proto.Message): + r"""The request for + [Datastore.Commit][google.datastore.v1.Datastore.Commit]. + + Attributes: + project_id (str): + Required. The ID of the project against which + to make the request. + mode (~.datastore.CommitRequest.Mode): + The type of commit to perform. Defaults to + ``TRANSACTIONAL``. + transaction (bytes): + The identifier of the transaction associated with the + commit. A transaction identifier is returned by a call to + [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + mutations (Sequence[~.datastore.Mutation]): + The mutations to perform. + + When mode is ``TRANSACTIONAL``, mutations affecting a single + entity are applied in order. The following sequences of + mutations affecting a single entity are not permitted in a + single ``Commit`` request: + + - ``insert`` followed by ``insert`` + - ``update`` followed by ``insert`` + - ``upsert`` followed by ``insert`` + - ``delete`` followed by ``update`` + + When mode is ``NON_TRANSACTIONAL``, no two mutations may + affect a single entity. + """ + + class Mode(proto.Enum): + r"""The modes available for commits.""" + MODE_UNSPECIFIED = 0 + TRANSACTIONAL = 1 + NON_TRANSACTIONAL = 2 + + project_id = proto.Field(proto.STRING, number=8) + + mode = proto.Field(proto.ENUM, number=5, enum=Mode,) + + transaction = proto.Field(proto.BYTES, number=1, oneof="transaction_selector") + + mutations = proto.RepeatedField(proto.MESSAGE, number=6, message="Mutation",) + + +class CommitResponse(proto.Message): + r"""The response for + [Datastore.Commit][google.datastore.v1.Datastore.Commit]. + + Attributes: + mutation_results (Sequence[~.datastore.MutationResult]): + The result of performing the mutations. + The i-th mutation result corresponds to the i-th + mutation in the request. + index_updates (int): + The number of index entries updated during + the commit, or zero if none were updated. + """ + + mutation_results = proto.RepeatedField( + proto.MESSAGE, number=3, message="MutationResult", + ) + + index_updates = proto.Field(proto.INT32, number=4) + + +class AllocateIdsRequest(proto.Message): + r"""The request for + [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. + + Attributes: + project_id (str): + Required. The ID of the project against which + to make the request. + keys (Sequence[~.entity.Key]): + Required. A list of keys with incomplete key + paths for which to allocate IDs. No key may be + reserved/read-only. + """ + + project_id = proto.Field(proto.STRING, number=8) + + keys = proto.RepeatedField(proto.MESSAGE, number=1, message=entity.Key,) + + +class AllocateIdsResponse(proto.Message): + r"""The response for + [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. + + Attributes: + keys (Sequence[~.entity.Key]): + The keys specified in the request (in the + same order), each with its key path completed + with a newly allocated ID. + """ + + keys = proto.RepeatedField(proto.MESSAGE, number=1, message=entity.Key,) + + +class ReserveIdsRequest(proto.Message): + r"""The request for + [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. + + Attributes: + project_id (str): + Required. The ID of the project against which + to make the request. + database_id (str): + If not empty, the ID of the database against + which to make the request. + keys (Sequence[~.entity.Key]): + Required. A list of keys with complete key + paths whose numeric IDs should not be auto- + allocated. + """ + + project_id = proto.Field(proto.STRING, number=8) + + database_id = proto.Field(proto.STRING, number=9) + + keys = proto.RepeatedField(proto.MESSAGE, number=1, message=entity.Key,) + + +class ReserveIdsResponse(proto.Message): + r"""The response for + [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. + """ + + +class Mutation(proto.Message): + r"""A mutation to apply to an entity. + + Attributes: + insert (~.entity.Entity): + The entity to insert. The entity must not + already exist. The entity key's final path + element may be incomplete. + update (~.entity.Entity): + The entity to update. The entity must already + exist. Must have a complete key path. + upsert (~.entity.Entity): + The entity to upsert. The entity may or may + not already exist. The entity key's final path + element may be incomplete. + delete (~.entity.Key): + The key of the entity to delete. The entity + may or may not already exist. Must have a + complete key path and must not be reserved/read- + only. + base_version (int): + The version of the entity that this mutation + is being applied to. If this does not match the + current version on the server, the mutation + conflicts. + """ + + insert = proto.Field( + proto.MESSAGE, number=4, oneof="operation", message=entity.Entity, + ) + + update = proto.Field( + proto.MESSAGE, number=5, oneof="operation", message=entity.Entity, + ) + + upsert = proto.Field( + proto.MESSAGE, number=6, oneof="operation", message=entity.Entity, + ) + + delete = proto.Field( + proto.MESSAGE, number=7, oneof="operation", message=entity.Key, + ) + + base_version = proto.Field( + proto.INT64, number=8, oneof="conflict_detection_strategy" + ) + + +class MutationResult(proto.Message): + r"""The result of applying a mutation. + + Attributes: + key (~.entity.Key): + The automatically allocated key. + Set only when the mutation allocated a key. + version (int): + The version of the entity on the server after + processing the mutation. If the mutation doesn't + change anything on the server, then the version + will be the version of the current entity or, if + no entity is present, a version that is strictly + greater than the version of any previous entity + and less than the version of any possible future + entity. + conflict_detected (bool): + Whether a conflict was detected for this + mutation. Always false when a conflict detection + strategy field is not set in the mutation. + """ + + key = proto.Field(proto.MESSAGE, number=3, message=entity.Key,) + + version = proto.Field(proto.INT64, number=4) + + conflict_detected = proto.Field(proto.BOOL, number=5) + + +class ReadOptions(proto.Message): + r"""The options shared by read requests. + + Attributes: + read_consistency (~.datastore.ReadOptions.ReadConsistency): + The non-transactional read consistency to use. Cannot be set + to ``STRONG`` for global queries. + transaction (bytes): + The identifier of the transaction in which to read. A + transaction identifier is returned by a call to + [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + """ + + class ReadConsistency(proto.Enum): + r"""The possible values for read consistencies.""" + READ_CONSISTENCY_UNSPECIFIED = 0 + STRONG = 1 + EVENTUAL = 2 + + read_consistency = proto.Field( + proto.ENUM, number=1, oneof="consistency_type", enum=ReadConsistency, + ) + + transaction = proto.Field(proto.BYTES, number=2, oneof="consistency_type") + + +class TransactionOptions(proto.Message): + r"""Options for beginning a new transaction. + + Transactions can be created explicitly with calls to + [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction] + or implicitly by setting + [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] + in read requests. + + Attributes: + read_write (~.datastore.TransactionOptions.ReadWrite): + The transaction should allow both reads and + writes. + read_only (~.datastore.TransactionOptions.ReadOnly): + The transaction should only allow reads. + """ + + class ReadWrite(proto.Message): + r"""Options specific to read / write transactions. + + Attributes: + previous_transaction (bytes): + The transaction identifier of the transaction + being retried. + """ + + previous_transaction = proto.Field(proto.BYTES, number=1) + + class ReadOnly(proto.Message): + r"""Options specific to read-only transactions.""" + + read_write = proto.Field(proto.MESSAGE, number=1, oneof="mode", message=ReadWrite,) + + read_only = proto.Field(proto.MESSAGE, number=2, oneof="mode", message=ReadOnly,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py new file mode 100644 index 000000000000..96d4a6f49ddb --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -0,0 +1,260 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import struct_pb2 as struct # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.type import latlng_pb2 as latlng # type: ignore + + +__protobuf__ = proto.module( + package="google.datastore.v1", + manifest={"PartitionId", "Key", "ArrayValue", "Value", "Entity",}, +) + + +class PartitionId(proto.Message): + r"""A partition ID identifies a grouping of entities. The grouping is + always by project and namespace, however the namespace ID may be + empty. + + A partition ID contains several dimensions: project ID and namespace + ID. + + Partition dimensions: + + - May be ``""``. + - Must be valid UTF-8 bytes. + - Must have values that match regex ``[A-Za-z\d\.\-_]{1,100}`` If + the value of any dimension matches regex ``__.*__``, the + partition is reserved/read-only. A reserved/read-only partition + ID is forbidden in certain documented contexts. + + Foreign partition IDs (in which the project ID does not match the + context project ID ) are discouraged. Reads and writes of foreign + partition IDs may fail if the project is not in an active state. + + Attributes: + project_id (str): + The ID of the project to which the entities + belong. + namespace_id (str): + If not empty, the ID of the namespace to + which the entities belong. + """ + + project_id = proto.Field(proto.STRING, number=2) + + namespace_id = proto.Field(proto.STRING, number=4) + + +class Key(proto.Message): + r"""A unique identifier for an entity. + If a key's partition ID or any of its path kinds or names are + reserved/read-only, the key is reserved/read-only. + A reserved/read-only key is forbidden in certain documented + contexts. + + Attributes: + partition_id (~.entity.PartitionId): + Entities are partitioned into subsets, + currently identified by a project ID and + namespace ID. Queries are scoped to a single + partition. + path (Sequence[~.entity.Key.PathElement]): + The entity path. An entity path consists of one or more + elements composed of a kind and a string or numerical + identifier, which identify entities. The first element + identifies a *root entity*, the second element identifies a + *child* of the root entity, the third element identifies a + child of the second entity, and so forth. The entities + identified by all prefixes of the path are called the + element's *ancestors*. + + An entity path is always fully complete: *all* of the + entity's ancestors are required to be in the path along with + the entity identifier itself. The only exception is that in + some documented cases, the identifier in the last path + element (for the entity) itself may be omitted. For example, + the last path element of the key of ``Mutation.insert`` may + have no identifier. + + A path can never be empty, and a path can have at most 100 + elements. + """ + + class PathElement(proto.Message): + r"""A (kind, ID/name) pair used to construct a key path. + If either name or ID is set, the element is complete. If neither + is set, the element is incomplete. + + Attributes: + kind (str): + The kind of the entity. A kind matching regex ``__.*__`` is + reserved/read-only. A kind must not contain more than 1500 + bytes when UTF-8 encoded. Cannot be ``""``. + id (int): + The auto-allocated ID of the entity. + Never equal to zero. Values less than zero are + discouraged and may not be supported in the + future. + name (str): + The name of the entity. A name matching regex ``__.*__`` is + reserved/read-only. A name must not be more than 1500 bytes + when UTF-8 encoded. Cannot be ``""``. + """ + + kind = proto.Field(proto.STRING, number=1) + + id = proto.Field(proto.INT64, number=2, oneof="id_type") + + name = proto.Field(proto.STRING, number=3, oneof="id_type") + + partition_id = proto.Field(proto.MESSAGE, number=1, message=PartitionId,) + + path = proto.RepeatedField(proto.MESSAGE, number=2, message=PathElement,) + + +class ArrayValue(proto.Message): + r"""An array value. + + Attributes: + values (Sequence[~.entity.Value]): + Values in the array. The order of values in an array is + preserved as long as all values have identical settings for + 'exclude_from_indexes'. + """ + + values = proto.RepeatedField(proto.MESSAGE, number=1, message="Value",) + + +class Value(proto.Message): + r"""A message that can hold any of the supported value types and + associated metadata. + + Attributes: + null_value (~.struct.NullValue): + A null value. + boolean_value (bool): + A boolean value. + integer_value (int): + An integer value. + double_value (float): + A double value. + timestamp_value (~.timestamp.Timestamp): + A timestamp value. + When stored in the Datastore, precise only to + microseconds; any additional precision is + rounded down. + key_value (~.entity.Key): + A key value. + string_value (str): + A UTF-8 encoded string value. When ``exclude_from_indexes`` + is false (it is indexed), may have at most 1500 bytes. + Otherwise, may be set to at most 1,000,000 bytes. + blob_value (bytes): + A blob value. May have at most 1,000,000 bytes. When + ``exclude_from_indexes`` is false, may have at most 1500 + bytes. In JSON requests, must be base64-encoded. + geo_point_value (~.latlng.LatLng): + A geo point value representing a point on the + surface of Earth. + entity_value (~.entity.Entity): + An entity value. + - May have no key. + - May have a key with an incomplete key path. - + May have a reserved/read-only key. + array_value (~.entity.ArrayValue): + An array value. Cannot contain another array value. A + ``Value`` instance that sets field ``array_value`` must not + set fields ``meaning`` or ``exclude_from_indexes``. + meaning (int): + The ``meaning`` field should only be populated for backwards + compatibility. + exclude_from_indexes (bool): + If the value should be excluded from all + indexes including those defined explicitly. + """ + + null_value = proto.Field( + proto.ENUM, number=11, oneof="value_type", enum=struct.NullValue, + ) + + boolean_value = proto.Field(proto.BOOL, number=1, oneof="value_type") + + integer_value = proto.Field(proto.INT64, number=2, oneof="value_type") + + double_value = proto.Field(proto.DOUBLE, number=3, oneof="value_type") + + timestamp_value = proto.Field( + proto.MESSAGE, number=10, oneof="value_type", message=timestamp.Timestamp, + ) + + key_value = proto.Field(proto.MESSAGE, number=5, oneof="value_type", message=Key,) + + string_value = proto.Field(proto.STRING, number=17, oneof="value_type") + + blob_value = proto.Field(proto.BYTES, number=18, oneof="value_type") + + geo_point_value = proto.Field( + proto.MESSAGE, number=8, oneof="value_type", message=latlng.LatLng, + ) + + entity_value = proto.Field( + proto.MESSAGE, number=6, oneof="value_type", message="Entity", + ) + + array_value = proto.Field( + proto.MESSAGE, number=9, oneof="value_type", message=ArrayValue, + ) + + meaning = proto.Field(proto.INT32, number=14) + + exclude_from_indexes = proto.Field(proto.BOOL, number=19) + + +class Entity(proto.Message): + r"""A Datastore data object. + + An entity is limited to 1 megabyte when stored. That *roughly* + corresponds to a limit of 1 megabyte for the serialized form of this + message. + + Attributes: + key (~.entity.Key): + The entity's key. + + An entity must have a key, unless otherwise documented (for + example, an entity in ``Value.entity_value`` may have no + key). An entity's kind is its key path's last element's + kind, or null if it has no key. + properties (Sequence[~.entity.Entity.PropertiesEntry]): + The entity's properties. The map's keys are property names. + A property name matching regex ``__.*__`` is reserved. A + reserved property name is forbidden in certain documented + contexts. The name must not contain more than 500 + characters. The name cannot be ``""``. + """ + + key = proto.Field(proto.MESSAGE, number=1, message=Key,) + + properties = proto.MapField(proto.STRING, proto.MESSAGE, number=3, message=Value,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py new file mode 100644 index 000000000000..87ed724d1b34 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -0,0 +1,397 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.datastore_v1.types import entity as gd_entity +from google.protobuf import wrappers_pb2 as wrappers # type: ignore + + +__protobuf__ = proto.module( + package="google.datastore.v1", + manifest={ + "EntityResult", + "Query", + "KindExpression", + "PropertyReference", + "Projection", + "PropertyOrder", + "Filter", + "CompositeFilter", + "PropertyFilter", + "GqlQuery", + "GqlQueryParameter", + "QueryResultBatch", + }, +) + + +class EntityResult(proto.Message): + r"""The result of fetching an entity from Datastore. + + Attributes: + entity (~.gd_entity.Entity): + The resulting entity. + version (int): + The version of the entity, a strictly positive number that + monotonically increases with changes to the entity. + + This field is set for + [``FULL``][google.datastore.v1.EntityResult.ResultType.FULL] + entity results. + + For [missing][google.datastore.v1.LookupResponse.missing] + entities in ``LookupResponse``, this is the version of the + snapshot that was used to look up the entity, and it is + always set except for eventually consistent reads. + cursor (bytes): + A cursor that points to the position after the result + entity. Set only when the ``EntityResult`` is part of a + ``QueryResultBatch`` message. + """ + + class ResultType(proto.Enum): + r"""Specifies what data the 'entity' field contains. A ``ResultType`` is + either implied (for example, in ``LookupResponse.missing`` from + ``datastore.proto``, it is always ``KEY_ONLY``) or specified by + context (for example, in message ``QueryResultBatch``, field + ``entity_result_type`` specifies a ``ResultType`` for all the values + in field ``entity_results``). + """ + RESULT_TYPE_UNSPECIFIED = 0 + FULL = 1 + PROJECTION = 2 + KEY_ONLY = 3 + + entity = proto.Field(proto.MESSAGE, number=1, message=gd_entity.Entity,) + + version = proto.Field(proto.INT64, number=4) + + cursor = proto.Field(proto.BYTES, number=3) + + +class Query(proto.Message): + r"""A query for entities. + + Attributes: + projection (Sequence[~.query.Projection]): + The projection to return. Defaults to + returning all properties. + kind (Sequence[~.query.KindExpression]): + The kinds to query (if empty, returns + entities of all kinds). Currently at most 1 kind + may be specified. + filter (~.query.Filter): + The filter to apply. + order (Sequence[~.query.PropertyOrder]): + The order to apply to the query results (if + empty, order is unspecified). + distinct_on (Sequence[~.query.PropertyReference]): + The properties to make distinct. The query + results will contain the first result for each + distinct combination of values for the given + properties (if empty, all results are returned). + start_cursor (bytes): + A starting point for the query results. Query cursors are + returned in query result batches and `can only be used to + continue the same + query `__. + end_cursor (bytes): + An ending point for the query results. Query cursors are + returned in query result batches and `can only be used to + limit the same + query `__. + offset (int): + The number of results to skip. Applies before + limit, but after all other constraints. + Optional. Must be >= 0 if specified. + limit (~.wrappers.Int32Value): + The maximum number of results to return. + Applies after all other constraints. Optional. + Unspecified is interpreted as no limit. + Must be >= 0 if specified. + """ + + projection = proto.RepeatedField(proto.MESSAGE, number=2, message="Projection",) + + kind = proto.RepeatedField(proto.MESSAGE, number=3, message="KindExpression",) + + filter = proto.Field(proto.MESSAGE, number=4, message="Filter",) + + order = proto.RepeatedField(proto.MESSAGE, number=5, message="PropertyOrder",) + + distinct_on = proto.RepeatedField( + proto.MESSAGE, number=6, message="PropertyReference", + ) + + start_cursor = proto.Field(proto.BYTES, number=7) + + end_cursor = proto.Field(proto.BYTES, number=8) + + offset = proto.Field(proto.INT32, number=10) + + limit = proto.Field(proto.MESSAGE, number=12, message=wrappers.Int32Value,) + + +class KindExpression(proto.Message): + r"""A representation of a kind. + + Attributes: + name (str): + The name of the kind. + """ + + name = proto.Field(proto.STRING, number=1) + + +class PropertyReference(proto.Message): + r"""A reference to a property relative to the kind expressions. + + Attributes: + name (str): + The name of the property. + If name includes "."s, it may be interpreted as + a property name path. + """ + + name = proto.Field(proto.STRING, number=2) + + +class Projection(proto.Message): + r"""A representation of a property in a projection. + + Attributes: + property (~.query.PropertyReference): + The property to project. + """ + + property = proto.Field(proto.MESSAGE, number=1, message=PropertyReference,) + + +class PropertyOrder(proto.Message): + r"""The desired order for a specific property. + + Attributes: + property (~.query.PropertyReference): + The property to order by. + direction (~.query.PropertyOrder.Direction): + The direction to order by. Defaults to ``ASCENDING``. + """ + + class Direction(proto.Enum): + r"""The sort direction.""" + DIRECTION_UNSPECIFIED = 0 + ASCENDING = 1 + DESCENDING = 2 + + property = proto.Field(proto.MESSAGE, number=1, message=PropertyReference,) + + direction = proto.Field(proto.ENUM, number=2, enum=Direction,) + + +class Filter(proto.Message): + r"""A holder for any type of filter. + + Attributes: + composite_filter (~.query.CompositeFilter): + A composite filter. + property_filter (~.query.PropertyFilter): + A filter on a property. + """ + + composite_filter = proto.Field( + proto.MESSAGE, number=1, oneof="filter_type", message="CompositeFilter", + ) + + property_filter = proto.Field( + proto.MESSAGE, number=2, oneof="filter_type", message="PropertyFilter", + ) + + +class CompositeFilter(proto.Message): + r"""A filter that merges multiple other filters using the given + operator. + + Attributes: + op (~.query.CompositeFilter.Operator): + The operator for combining multiple filters. + filters (Sequence[~.query.Filter]): + The list of filters to combine. + Must contain at least one filter. + """ + + class Operator(proto.Enum): + r"""A composite filter operator.""" + OPERATOR_UNSPECIFIED = 0 + AND = 1 + + op = proto.Field(proto.ENUM, number=1, enum=Operator,) + + filters = proto.RepeatedField(proto.MESSAGE, number=2, message=Filter,) + + +class PropertyFilter(proto.Message): + r"""A filter on a specific property. + + Attributes: + property (~.query.PropertyReference): + The property to filter by. + op (~.query.PropertyFilter.Operator): + The operator to filter by. + value (~.gd_entity.Value): + The value to compare the property to. + """ + + class Operator(proto.Enum): + r"""A property filter operator.""" + OPERATOR_UNSPECIFIED = 0 + LESS_THAN = 1 + LESS_THAN_OR_EQUAL = 2 + GREATER_THAN = 3 + GREATER_THAN_OR_EQUAL = 4 + EQUAL = 5 + HAS_ANCESTOR = 11 + + property = proto.Field(proto.MESSAGE, number=1, message=PropertyReference,) + + op = proto.Field(proto.ENUM, number=2, enum=Operator,) + + value = proto.Field(proto.MESSAGE, number=3, message=gd_entity.Value,) + + +class GqlQuery(proto.Message): + r"""A `GQL + query `__. + + Attributes: + query_string (str): + A string of the format described + `here `__. + allow_literals (bool): + When false, the query string must not contain any literals + and instead must bind all values. For example, + ``SELECT * FROM Kind WHERE a = 'string literal'`` is not + allowed, while ``SELECT * FROM Kind WHERE a = @value`` is. + named_bindings (Sequence[~.query.GqlQuery.NamedBindingsEntry]): + For each non-reserved named binding site in the query + string, there must be a named parameter with that name, but + not necessarily the inverse. + + Key must match regex ``[A-Za-z_$][A-Za-z_$0-9]*``, must not + match regex ``__.*__``, and must not be ``""``. + positional_bindings (Sequence[~.query.GqlQueryParameter]): + Numbered binding site @1 references the first numbered + parameter, effectively using 1-based indexing, rather than + the usual 0. + + For each binding site numbered i in ``query_string``, there + must be an i-th numbered parameter. The inverse must also be + true. + """ + + query_string = proto.Field(proto.STRING, number=1) + + allow_literals = proto.Field(proto.BOOL, number=2) + + named_bindings = proto.MapField( + proto.STRING, proto.MESSAGE, number=5, message="GqlQueryParameter", + ) + + positional_bindings = proto.RepeatedField( + proto.MESSAGE, number=4, message="GqlQueryParameter", + ) + + +class GqlQueryParameter(proto.Message): + r"""A binding parameter for a GQL query. + + Attributes: + value (~.gd_entity.Value): + A value parameter. + cursor (bytes): + A query cursor. Query cursors are returned in + query result batches. + """ + + value = proto.Field( + proto.MESSAGE, number=2, oneof="parameter_type", message=gd_entity.Value, + ) + + cursor = proto.Field(proto.BYTES, number=3, oneof="parameter_type") + + +class QueryResultBatch(proto.Message): + r"""A batch of results produced by a query. + + Attributes: + skipped_results (int): + The number of results skipped, typically + because of an offset. + skipped_cursor (bytes): + A cursor that points to the position after the last skipped + result. Will be set when ``skipped_results`` != 0. + entity_result_type (~.query.EntityResult.ResultType): + The result type for every entity in ``entity_results``. + entity_results (Sequence[~.query.EntityResult]): + The results for this batch. + end_cursor (bytes): + A cursor that points to the position after + the last result in the batch. + more_results (~.query.QueryResultBatch.MoreResultsType): + The state of the query after the current + batch. + snapshot_version (int): + The version number of the snapshot this batch was returned + from. This applies to the range of results from the query's + ``start_cursor`` (or the beginning of the query if no cursor + was given) to this batch's ``end_cursor`` (not the query's + ``end_cursor``). + + In a single transaction, subsequent query result batches for + the same query can have a greater snapshot version number. + Each batch's snapshot version is valid for all preceding + batches. The value will be zero for eventually consistent + queries. + """ + + class MoreResultsType(proto.Enum): + r"""The possible values for the ``more_results`` field.""" + MORE_RESULTS_TYPE_UNSPECIFIED = 0 + NOT_FINISHED = 1 + MORE_RESULTS_AFTER_LIMIT = 2 + MORE_RESULTS_AFTER_CURSOR = 4 + NO_MORE_RESULTS = 3 + + skipped_results = proto.Field(proto.INT32, number=6) + + skipped_cursor = proto.Field(proto.BYTES, number=3) + + entity_result_type = proto.Field( + proto.ENUM, number=1, enum=EntityResult.ResultType, + ) + + entity_results = proto.RepeatedField(proto.MESSAGE, number=2, message=EntityResult,) + + end_cursor = proto.Field(proto.BYTES, number=4) + + more_results = proto.Field(proto.ENUM, number=5, enum=MoreResultsType,) + + snapshot_version = proto.Field(proto.INT64, number=7) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 5ee23c5c2b85..acdde3e6db53 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -27,8 +27,8 @@ BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -70,7 +70,7 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. - session.install("mock", "pytest", "pytest-cov") + session.install("mock", "pytest", "pytest-asyncio", "pytest-cov") session.install("-e", ".") # Run py.test against the unit tests. diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py new file mode 100644 index 000000000000..12009ba0eccb --- /dev/null +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py @@ -0,0 +1,181 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class datastore_adminCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'export_entities': ('project_id', 'output_url_prefix', 'labels', 'entity_filter', ), + 'get_index': ('project_id', 'index_id', ), + 'import_entities': ('project_id', 'input_url', 'labels', 'entity_filter', ), + 'list_indexes': ('project_id', 'filter', 'page_size', 'page_token', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=datastore_adminCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the datastore_admin client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py new file mode 100644 index 000000000000..30d643d2d678 --- /dev/null +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class datastoreCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'allocate_ids': ('project_id', 'keys', ), + 'begin_transaction': ('project_id', 'transaction_options', ), + 'commit': ('project_id', 'mode', 'transaction', 'mutations', ), + 'lookup': ('project_id', 'keys', 'read_options', ), + 'reserve_ids': ('project_id', 'keys', 'database_id', ), + 'rollback': ('project_id', 'transaction', ), + 'run_query': ('project_id', 'partition_id', 'read_options', 'query', 'gql_query', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=datastoreCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the datastore client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 3a8b88af3b5a..2df2e821a081 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -29,8 +29,10 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", + "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "google-cloud-core >= 1.4.0, < 2.0dev", + "proto-plus >= 1.4.0", + "libcst >= 0.2.5", ] extras = {} @@ -51,7 +53,9 @@ # Only include packages under the 'google' namespace. Do not include tests, # benchmarks, etc. packages = [ - package for package in setuptools.find_packages() if package.startswith("google") + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") ] # Determine which namespaces are needed. @@ -74,21 +78,25 @@ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", "Operating System :: OS Independent", "Topic :: Internet", + "Topic :: Software Development :: Libraries :: Python Modules", ], platforms="Posix; MacOS X; Windows", packages=packages, namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", + python_requires=">=3.6", + scripts=[ + "scripts/fixup_datastore_v1_keywords.py", + "scripts/fixup_datastore_admin_v1_keywords.py", + ], include_package_data=True, zip_safe=False, ) diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 4c3400d3107f..279f34898d70 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -3,23 +3,15 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-datastore.git", - "sha": "8aa3eac28e0e733b61d6ab9e1d233a99467b7081" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "8d73f9486fc193a150f6c907dfb9f49431aff3ff", - "internalRef": "332497859" + "remote": "git@github.com:crwilcox/python-datastore.git", + "sha": "10442b5a6aec7a352612f02368bf1257ddbfc855" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "f3c04883d6c43261ff13db1f52d03a283be06871" + "sha": "487eba79f8260e34205d8ceb1ebcc65685085e19" } } ], @@ -42,93 +34,5 @@ "generator": "bazel" } } - ], - "generatedFiles": [ - ".coveragerc", - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "google/cloud/datastore_admin_v1/proto/__init__.py", - "google/cloud/datastore_admin_v1/proto/datastore_admin.proto", - "google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py", - "google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py", - "google/cloud/datastore_admin_v1/proto/index.proto", - "google/cloud/datastore_admin_v1/proto/index_pb2.py", - "google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py", - "google/cloud/datastore_v1/gapic/__init__.py", - "google/cloud/datastore_v1/gapic/datastore_client.py", - "google/cloud/datastore_v1/gapic/datastore_client_config.py", - "google/cloud/datastore_v1/gapic/enums.py", - "google/cloud/datastore_v1/gapic/transports/__init__.py", - "google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py", - "google/cloud/datastore_v1/proto/__init__.py", - "google/cloud/datastore_v1/proto/datastore.proto", - "google/cloud/datastore_v1/proto/datastore_pb2.py", - "google/cloud/datastore_v1/proto/datastore_pb2_grpc.py", - "google/cloud/datastore_v1/proto/entity.proto", - "google/cloud/datastore_v1/proto/entity_pb2.py", - "google/cloud/datastore_v1/proto/entity_pb2_grpc.py", - "google/cloud/datastore_v1/proto/query.proto", - "google/cloud/datastore_v1/proto/query_pb2.py", - "google/cloud/datastore_v1/proto/query_pb2_grpc.py", - "noxfile.py", - "renovate.json", - "scripts/decrypt-secrets.sh", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore" ] } \ No newline at end of file diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index af705849487f..251fcc865b14 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -29,8 +29,13 @@ include_protos=True, ) -s.move(library / "google/cloud/datastore_v1/proto") -s.move(library / "google/cloud/datastore_v1/gapic") +s.move(library / "google/cloud/datastore_v1") + +s.move( + library / f"tests/", + f"tests", +) +s.move(library / "scripts") # ---------------------------------------------------------------------------- # Generate datastore admin GAPIC layer @@ -43,12 +48,17 @@ ) s.move( - library / "datastore-admin-v1-py/google/cloud/datastore_admin_v1", + library / "google/cloud/datastore_admin_v1", "google/cloud/datastore_admin_v1" ) -s.move(library / "google/cloud/datastore_admin_v1/proto") +s.move( + library / f"tests/", + f"tests", +) + +s.move(library / "scripts") s.replace( "google/**/datastore_admin_client.py", "google-cloud-datastore-admin", @@ -87,14 +97,14 @@ ):""" ) -if num != 1: - raise Exception("Required replacement not made.") +#if num != 1: +# raise Exception("Required replacement not made.") # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library(unit_cov_level=97, cov_level=99) -s.move(templated_files, excludes=["docs/multiprocessing.rst"]) +s.move(templated_files, excludes=["docs/multiprocessing.rst", ".coveragerc"]) s.replace("noxfile.py", """["']sphinx['"]""", '''"sphinx<3.0.0"''') diff --git a/packages/google-cloud-datastore/tests/doctests.py b/packages/google-cloud-datastore/tests/doctests.py index 329b3d416d3b..cc8d6a3a6378 100644 --- a/packages/google-cloud-datastore/tests/doctests.py +++ b/packages/google-cloud-datastore/tests/doctests.py @@ -17,8 +17,6 @@ import tempfile import unittest -import six - from google.cloud import datastore @@ -39,7 +37,6 @@ """ -@unittest.skipIf(six.PY2, "Doctests run against Python 3 only.") class TestDoctest(unittest.TestCase): def _submodules(self): pkg_iter = pkgutil.iter_modules(datastore.__path__) diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py @@ -0,0 +1 @@ + diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py new file mode 100644 index 000000000000..1efc1dcae7ca --- /dev/null +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -0,0 +1,1425 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation_async +from google.api_core import operations_v1 +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.datastore_admin_v1.services.datastore_admin import ( + DatastoreAdminAsyncClient, +) +from google.cloud.datastore_admin_v1.services.datastore_admin import ( + DatastoreAdminClient, +) +from google.cloud.datastore_admin_v1.services.datastore_admin import pagers +from google.cloud.datastore_admin_v1.services.datastore_admin import transports +from google.cloud.datastore_admin_v1.types import datastore_admin +from google.cloud.datastore_admin_v1.types import index +from google.longrunning import operations_pb2 +from google.oauth2 import service_account + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DatastoreAdminClient._get_default_mtls_endpoint(None) is None + assert ( + DatastoreAdminClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + DatastoreAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DatastoreAdminClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DatastoreAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DatastoreAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", [DatastoreAdminClient, DatastoreAdminAsyncClient] +) +def test_datastore_admin_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client._transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client._transport._credentials == creds + + assert client._transport._host == "datastore.googleapis.com:443" + + +def test_datastore_admin_client_get_transport_class(): + transport = DatastoreAdminClient.get_transport_class() + assert transport == transports.DatastoreAdminGrpcTransport + + transport = DatastoreAdminClient.get_transport_class("grpc") + assert transport == transports.DatastoreAdminGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport, "grpc"), + ( + DatastoreAdminAsyncClient, + transports.DatastoreAdminGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + DatastoreAdminClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DatastoreAdminClient), +) +@mock.patch.object( + DatastoreAdminAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DatastoreAdminAsyncClient), +) +def test_datastore_admin_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DatastoreAdminClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DatastoreAdminClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport, "grpc", "true"), + ( + DatastoreAdminAsyncClient, + transports.DatastoreAdminGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport, "grpc", "false"), + ( + DatastoreAdminAsyncClient, + transports.DatastoreAdminGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + DatastoreAdminClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DatastoreAdminClient), +) +@mock.patch.object( + DatastoreAdminAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DatastoreAdminAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_datastore_admin_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() + with mock.patch( + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds + ): + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport, "grpc"), + ( + DatastoreAdminAsyncClient, + transports.DatastoreAdminGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_datastore_admin_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport, "grpc"), + ( + DatastoreAdminAsyncClient, + transports.DatastoreAdminGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_datastore_admin_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_datastore_admin_client_client_options_from_dict(): + with mock.patch( + "google.cloud.datastore_admin_v1.services.datastore_admin.transports.DatastoreAdminGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DatastoreAdminClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_export_entities( + transport: str = "grpc", request_type=datastore_admin.ExportEntitiesRequest +): + client = DatastoreAdminClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.export_entities), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.export_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datastore_admin.ExportEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_entities_from_dict(): + test_export_entities(request_type=dict) + + +@pytest.mark.asyncio +async def test_export_entities_async(transport: str = "grpc_asyncio"): + client = DatastoreAdminAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datastore_admin.ExportEntitiesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.export_entities), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.export_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_entities_flattened(): + client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.export_entities), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.export_entities( + project_id="project_id_value", + labels={"key_value": "value_value"}, + entity_filter=datastore_admin.EntityFilter(kinds=["kinds_value"]), + output_url_prefix="output_url_prefix_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].project_id == "project_id_value" + + assert args[0].labels == {"key_value": "value_value"} + + assert args[0].entity_filter == datastore_admin.EntityFilter( + kinds=["kinds_value"] + ) + + assert args[0].output_url_prefix == "output_url_prefix_value" + + +def test_export_entities_flattened_error(): + client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.export_entities( + datastore_admin.ExportEntitiesRequest(), + project_id="project_id_value", + labels={"key_value": "value_value"}, + entity_filter=datastore_admin.EntityFilter(kinds=["kinds_value"]), + output_url_prefix="output_url_prefix_value", + ) + + +@pytest.mark.asyncio +async def test_export_entities_flattened_async(): + client = DatastoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.export_entities), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.export_entities( + project_id="project_id_value", + labels={"key_value": "value_value"}, + entity_filter=datastore_admin.EntityFilter(kinds=["kinds_value"]), + output_url_prefix="output_url_prefix_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].project_id == "project_id_value" + + assert args[0].labels == {"key_value": "value_value"} + + assert args[0].entity_filter == datastore_admin.EntityFilter( + kinds=["kinds_value"] + ) + + assert args[0].output_url_prefix == "output_url_prefix_value" + + +@pytest.mark.asyncio +async def test_export_entities_flattened_error_async(): + client = DatastoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.export_entities( + datastore_admin.ExportEntitiesRequest(), + project_id="project_id_value", + labels={"key_value": "value_value"}, + entity_filter=datastore_admin.EntityFilter(kinds=["kinds_value"]), + output_url_prefix="output_url_prefix_value", + ) + + +def test_import_entities( + transport: str = "grpc", request_type=datastore_admin.ImportEntitiesRequest +): + client = DatastoreAdminClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.import_entities), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.import_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datastore_admin.ImportEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_entities_from_dict(): + test_import_entities(request_type=dict) + + +@pytest.mark.asyncio +async def test_import_entities_async(transport: str = "grpc_asyncio"): + client = DatastoreAdminAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datastore_admin.ImportEntitiesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.import_entities), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.import_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_entities_flattened(): + client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.import_entities), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.import_entities( + project_id="project_id_value", + labels={"key_value": "value_value"}, + input_url="input_url_value", + entity_filter=datastore_admin.EntityFilter(kinds=["kinds_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].project_id == "project_id_value" + + assert args[0].labels == {"key_value": "value_value"} + + assert args[0].input_url == "input_url_value" + + assert args[0].entity_filter == datastore_admin.EntityFilter( + kinds=["kinds_value"] + ) + + +def test_import_entities_flattened_error(): + client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.import_entities( + datastore_admin.ImportEntitiesRequest(), + project_id="project_id_value", + labels={"key_value": "value_value"}, + input_url="input_url_value", + entity_filter=datastore_admin.EntityFilter(kinds=["kinds_value"]), + ) + + +@pytest.mark.asyncio +async def test_import_entities_flattened_async(): + client = DatastoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.import_entities), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.import_entities( + project_id="project_id_value", + labels={"key_value": "value_value"}, + input_url="input_url_value", + entity_filter=datastore_admin.EntityFilter(kinds=["kinds_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].project_id == "project_id_value" + + assert args[0].labels == {"key_value": "value_value"} + + assert args[0].input_url == "input_url_value" + + assert args[0].entity_filter == datastore_admin.EntityFilter( + kinds=["kinds_value"] + ) + + +@pytest.mark.asyncio +async def test_import_entities_flattened_error_async(): + client = DatastoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.import_entities( + datastore_admin.ImportEntitiesRequest(), + project_id="project_id_value", + labels={"key_value": "value_value"}, + input_url="input_url_value", + entity_filter=datastore_admin.EntityFilter(kinds=["kinds_value"]), + ) + + +def test_get_index( + transport: str = "grpc", request_type=datastore_admin.GetIndexRequest +): + client = DatastoreAdminClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_index), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = index.Index( + project_id="project_id_value", + index_id="index_id_value", + kind="kind_value", + ancestor=index.Index.AncestorMode.NONE, + state=index.Index.State.CREATING, + ) + + response = client.get_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datastore_admin.GetIndexRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, index.Index) + + assert response.project_id == "project_id_value" + + assert response.index_id == "index_id_value" + + assert response.kind == "kind_value" + + assert response.ancestor == index.Index.AncestorMode.NONE + + assert response.state == index.Index.State.CREATING + + +def test_get_index_from_dict(): + test_get_index(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_index_async(transport: str = "grpc_asyncio"): + client = DatastoreAdminAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datastore_admin.GetIndexRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_index), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + index.Index( + project_id="project_id_value", + index_id="index_id_value", + kind="kind_value", + ancestor=index.Index.AncestorMode.NONE, + state=index.Index.State.CREATING, + ) + ) + + response = await client.get_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, index.Index) + + assert response.project_id == "project_id_value" + + assert response.index_id == "index_id_value" + + assert response.kind == "kind_value" + + assert response.ancestor == index.Index.AncestorMode.NONE + + assert response.state == index.Index.State.CREATING + + +def test_list_indexes( + transport: str = "grpc", request_type=datastore_admin.ListIndexesRequest +): + client = DatastoreAdminClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_indexes), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datastore_admin.ListIndexesResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_indexes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datastore_admin.ListIndexesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListIndexesPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_indexes_from_dict(): + test_list_indexes(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_indexes_async(transport: str = "grpc_asyncio"): + client = DatastoreAdminAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datastore_admin.ListIndexesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_indexes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore_admin.ListIndexesResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_indexes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListIndexesAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_indexes_pager(): + client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_indexes), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + datastore_admin.ListIndexesResponse( + indexes=[index.Index(), index.Index(), index.Index(),], + next_page_token="abc", + ), + datastore_admin.ListIndexesResponse(indexes=[], next_page_token="def",), + datastore_admin.ListIndexesResponse( + indexes=[index.Index(),], next_page_token="ghi", + ), + datastore_admin.ListIndexesResponse( + indexes=[index.Index(), index.Index(),], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_indexes(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, index.Index) for i in results) + + +def test_list_indexes_pages(): + client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_indexes), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + datastore_admin.ListIndexesResponse( + indexes=[index.Index(), index.Index(), index.Index(),], + next_page_token="abc", + ), + datastore_admin.ListIndexesResponse(indexes=[], next_page_token="def",), + datastore_admin.ListIndexesResponse( + indexes=[index.Index(),], next_page_token="ghi", + ), + datastore_admin.ListIndexesResponse( + indexes=[index.Index(), index.Index(),], + ), + RuntimeError, + ) + pages = list(client.list_indexes(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_indexes_async_pager(): + client = DatastoreAdminAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_indexes), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datastore_admin.ListIndexesResponse( + indexes=[index.Index(), index.Index(), index.Index(),], + next_page_token="abc", + ), + datastore_admin.ListIndexesResponse(indexes=[], next_page_token="def",), + datastore_admin.ListIndexesResponse( + indexes=[index.Index(),], next_page_token="ghi", + ), + datastore_admin.ListIndexesResponse( + indexes=[index.Index(), index.Index(),], + ), + RuntimeError, + ) + async_pager = await client.list_indexes(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, index.Index) for i in responses) + + +@pytest.mark.asyncio +async def test_list_indexes_async_pages(): + client = DatastoreAdminAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_indexes), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datastore_admin.ListIndexesResponse( + indexes=[index.Index(), index.Index(), index.Index(),], + next_page_token="abc", + ), + datastore_admin.ListIndexesResponse(indexes=[], next_page_token="def",), + datastore_admin.ListIndexesResponse( + indexes=[index.Index(),], next_page_token="ghi", + ), + datastore_admin.ListIndexesResponse( + indexes=[index.Index(), index.Index(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_indexes(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastoreAdminClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastoreAdminClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastoreAdminClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = DatastoreAdminClient(transport=transport) + assert client._transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DatastoreAdminGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatastoreAdminGrpcTransport, + transports.DatastoreAdminGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client._transport, transports.DatastoreAdminGrpcTransport,) + + +def test_datastore_admin_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.DatastoreAdminTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_datastore_admin_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.datastore_admin_v1.services.datastore_admin.transports.DatastoreAdminTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DatastoreAdminTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "export_entities", + "import_entities", + "get_index", + "list_indexes", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + +def test_datastore_admin_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.datastore_admin_v1.services.datastore_admin.transports.DatastoreAdminTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.DatastoreAdminTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + quota_project_id="octopus", + ) + + +def test_datastore_admin_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.datastore_admin_v1.services.datastore_admin.transports.DatastoreAdminTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.DatastoreAdminTransport() + adc.assert_called_once() + + +def test_datastore_admin_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + DatastoreAdminClient() + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + quota_project_id=None, + ) + + +def test_datastore_admin_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.DatastoreAdminGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + quota_project_id="octopus", + ) + + +def test_datastore_admin_host_no_port(): + client = DatastoreAdminClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datastore.googleapis.com" + ), + ) + assert client._transport._host == "datastore.googleapis.com:443" + + +def test_datastore_admin_host_with_port(): + client = DatastoreAdminClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datastore.googleapis.com:8000" + ), + ) + assert client._transport._host == "datastore.googleapis.com:8000" + + +def test_datastore_admin_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.DatastoreAdminGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + + +def test_datastore_admin_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.DatastoreAdminGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatastoreAdminGrpcTransport, + transports.DatastoreAdminGrpcAsyncIOTransport, + ], +) +def test_datastore_admin_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatastoreAdminGrpcTransport, + transports.DatastoreAdminGrpcAsyncIOTransport, + ], +) +def test_datastore_admin_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_datastore_admin_grpc_lro_client(): + client = DatastoreAdminClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + transport = client._transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_datastore_admin_grpc_lro_async_client(): + client = DatastoreAdminAsyncClient( + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + transport = client._client._transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DatastoreAdminTransport, "_prep_wrapped_messages" + ) as prep: + client = DatastoreAdminClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DatastoreAdminTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DatastoreAdminClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py @@ -0,0 +1 @@ + diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py new file mode 100644 index 000000000000..e5201c3ad90f --- /dev/null +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -0,0 +1,1817 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.datastore_v1.services.datastore import DatastoreAsyncClient +from google.cloud.datastore_v1.services.datastore import DatastoreClient +from google.cloud.datastore_v1.services.datastore import transports +from google.cloud.datastore_v1.types import datastore +from google.cloud.datastore_v1.types import entity +from google.cloud.datastore_v1.types import query +from google.oauth2 import service_account +from google.protobuf import struct_pb2 as struct # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import wrappers_pb2 as wrappers # type: ignore +from google.type import latlng_pb2 as latlng # type: ignore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DatastoreClient._get_default_mtls_endpoint(None) is None + assert DatastoreClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ( + DatastoreClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DatastoreClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DatastoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert DatastoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [DatastoreClient, DatastoreAsyncClient]) +def test_datastore_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client._transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client._transport._credentials == creds + + assert client._transport._host == "datastore.googleapis.com:443" + + +def test_datastore_client_get_transport_class(): + transport = DatastoreClient.get_transport_class() + assert transport == transports.DatastoreGrpcTransport + + transport = DatastoreClient.get_transport_class("grpc") + assert transport == transports.DatastoreGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DatastoreClient, transports.DatastoreGrpcTransport, "grpc"), + ( + DatastoreAsyncClient, + transports.DatastoreGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + DatastoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatastoreClient) +) +@mock.patch.object( + DatastoreAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DatastoreAsyncClient), +) +def test_datastore_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DatastoreClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DatastoreClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (DatastoreClient, transports.DatastoreGrpcTransport, "grpc", "true"), + ( + DatastoreAsyncClient, + transports.DatastoreGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (DatastoreClient, transports.DatastoreGrpcTransport, "grpc", "false"), + ( + DatastoreAsyncClient, + transports.DatastoreGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + DatastoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatastoreClient) +) +@mock.patch.object( + DatastoreAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DatastoreAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_datastore_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() + with mock.patch( + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds + ): + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DatastoreClient, transports.DatastoreGrpcTransport, "grpc"), + ( + DatastoreAsyncClient, + transports.DatastoreGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_datastore_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DatastoreClient, transports.DatastoreGrpcTransport, "grpc"), + ( + DatastoreAsyncClient, + transports.DatastoreGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_datastore_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_datastore_client_client_options_from_dict(): + with mock.patch( + "google.cloud.datastore_v1.services.datastore.transports.DatastoreGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DatastoreClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_lookup(transport: str = "grpc", request_type=datastore.LookupRequest): + client = DatastoreClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.lookup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.LookupResponse() + + response = client.lookup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datastore.LookupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.LookupResponse) + + +def test_lookup_from_dict(): + test_lookup(request_type=dict) + + +@pytest.mark.asyncio +async def test_lookup_async(transport: str = "grpc_asyncio"): + client = DatastoreAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datastore.LookupRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.lookup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.LookupResponse() + ) + + response = await client.lookup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.LookupResponse) + + +def test_lookup_flattened(): + client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.lookup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.LookupResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.lookup( + project_id="project_id_value", + read_options=datastore.ReadOptions( + read_consistency=datastore.ReadOptions.ReadConsistency.STRONG + ), + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].project_id == "project_id_value" + + assert args[0].read_options == datastore.ReadOptions( + read_consistency=datastore.ReadOptions.ReadConsistency.STRONG + ) + + assert args[0].keys == [ + entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) + ] + + +def test_lookup_flattened_error(): + client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.lookup( + datastore.LookupRequest(), + project_id="project_id_value", + read_options=datastore.ReadOptions( + read_consistency=datastore.ReadOptions.ReadConsistency.STRONG + ), + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) + + +@pytest.mark.asyncio +async def test_lookup_flattened_async(): + client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.lookup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.LookupResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.LookupResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.lookup( + project_id="project_id_value", + read_options=datastore.ReadOptions( + read_consistency=datastore.ReadOptions.ReadConsistency.STRONG + ), + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].project_id == "project_id_value" + + assert args[0].read_options == datastore.ReadOptions( + read_consistency=datastore.ReadOptions.ReadConsistency.STRONG + ) + + assert args[0].keys == [ + entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) + ] + + +@pytest.mark.asyncio +async def test_lookup_flattened_error_async(): + client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.lookup( + datastore.LookupRequest(), + project_id="project_id_value", + read_options=datastore.ReadOptions( + read_consistency=datastore.ReadOptions.ReadConsistency.STRONG + ), + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) + + +def test_run_query(transport: str = "grpc", request_type=datastore.RunQueryRequest): + client = DatastoreClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.run_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.RunQueryResponse() + + response = client.run_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datastore.RunQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.RunQueryResponse) + + +def test_run_query_from_dict(): + test_run_query(request_type=dict) + + +@pytest.mark.asyncio +async def test_run_query_async(transport: str = "grpc_asyncio"): + client = DatastoreAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datastore.RunQueryRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.run_query), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RunQueryResponse() + ) + + response = await client.run_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.RunQueryResponse) + + +def test_begin_transaction( + transport: str = "grpc", request_type=datastore.BeginTransactionRequest +): + client = DatastoreClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.begin_transaction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.BeginTransactionResponse( + transaction=b"transaction_blob", + ) + + response = client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datastore.BeginTransactionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.BeginTransactionResponse) + + assert response.transaction == b"transaction_blob" + + +def test_begin_transaction_from_dict(): + test_begin_transaction(request_type=dict) + + +@pytest.mark.asyncio +async def test_begin_transaction_async(transport: str = "grpc_asyncio"): + client = DatastoreAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datastore.BeginTransactionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.begin_transaction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.BeginTransactionResponse(transaction=b"transaction_blob",) + ) + + response = await client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.BeginTransactionResponse) + + assert response.transaction == b"transaction_blob" + + +def test_begin_transaction_flattened(): + client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.begin_transaction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.BeginTransactionResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.begin_transaction(project_id="project_id_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].project_id == "project_id_value" + + +def test_begin_transaction_flattened_error(): + client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.begin_transaction( + datastore.BeginTransactionRequest(), project_id="project_id_value", + ) + + +@pytest.mark.asyncio +async def test_begin_transaction_flattened_async(): + client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.begin_transaction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.BeginTransactionResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.BeginTransactionResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.begin_transaction(project_id="project_id_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].project_id == "project_id_value" + + +@pytest.mark.asyncio +async def test_begin_transaction_flattened_error_async(): + client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.begin_transaction( + datastore.BeginTransactionRequest(), project_id="project_id_value", + ) + + +def test_commit(transport: str = "grpc", request_type=datastore.CommitRequest): + client = DatastoreClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.commit), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.CommitResponse(index_updates=1389,) + + response = client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datastore.CommitRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.CommitResponse) + + assert response.index_updates == 1389 + + +def test_commit_from_dict(): + test_commit(request_type=dict) + + +@pytest.mark.asyncio +async def test_commit_async(transport: str = "grpc_asyncio"): + client = DatastoreAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datastore.CommitRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.commit), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.CommitResponse(index_updates=1389,) + ) + + response = await client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.CommitResponse) + + assert response.index_updates == 1389 + + +def test_commit_flattened(): + client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.commit), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.CommitResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.commit( + project_id="project_id_value", + mode=datastore.CommitRequest.Mode.TRANSACTIONAL, + transaction=b"transaction_blob", + mutations=[ + datastore.Mutation( + insert=entity.Entity( + key=entity.Key( + partition_id=entity.PartitionId( + project_id="project_id_value" + ) + ) + ) + ) + ], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].project_id == "project_id_value" + + assert args[0].mode == datastore.CommitRequest.Mode.TRANSACTIONAL + + assert args[0].mutations == [ + datastore.Mutation( + insert=entity.Entity( + key=entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ) + ) + ] + + assert args[0].transaction == b"transaction_blob" + + +def test_commit_flattened_error(): + client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.commit( + datastore.CommitRequest(), + project_id="project_id_value", + mode=datastore.CommitRequest.Mode.TRANSACTIONAL, + transaction=b"transaction_blob", + mutations=[ + datastore.Mutation( + insert=entity.Entity( + key=entity.Key( + partition_id=entity.PartitionId( + project_id="project_id_value" + ) + ) + ) + ) + ], + ) + + +@pytest.mark.asyncio +async def test_commit_flattened_async(): + client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.commit), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.CommitResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.CommitResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.commit( + project_id="project_id_value", + mode=datastore.CommitRequest.Mode.TRANSACTIONAL, + transaction=b"transaction_blob", + mutations=[ + datastore.Mutation( + insert=entity.Entity( + key=entity.Key( + partition_id=entity.PartitionId( + project_id="project_id_value" + ) + ) + ) + ) + ], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].project_id == "project_id_value" + + assert args[0].mode == datastore.CommitRequest.Mode.TRANSACTIONAL + + assert args[0].mutations == [ + datastore.Mutation( + insert=entity.Entity( + key=entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ) + ) + ] + + assert args[0].transaction == b"transaction_blob" + + +@pytest.mark.asyncio +async def test_commit_flattened_error_async(): + client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.commit( + datastore.CommitRequest(), + project_id="project_id_value", + mode=datastore.CommitRequest.Mode.TRANSACTIONAL, + transaction=b"transaction_blob", + mutations=[ + datastore.Mutation( + insert=entity.Entity( + key=entity.Key( + partition_id=entity.PartitionId( + project_id="project_id_value" + ) + ) + ) + ) + ], + ) + + +def test_rollback(transport: str = "grpc", request_type=datastore.RollbackRequest): + client = DatastoreClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.rollback), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.RollbackResponse() + + response = client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datastore.RollbackRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.RollbackResponse) + + +def test_rollback_from_dict(): + test_rollback(request_type=dict) + + +@pytest.mark.asyncio +async def test_rollback_async(transport: str = "grpc_asyncio"): + client = DatastoreAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datastore.RollbackRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.rollback), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RollbackResponse() + ) + + response = await client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.RollbackResponse) + + +def test_rollback_flattened(): + client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.rollback), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.RollbackResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rollback( + project_id="project_id_value", transaction=b"transaction_blob", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].project_id == "project_id_value" + + assert args[0].transaction == b"transaction_blob" + + +def test_rollback_flattened_error(): + client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback( + datastore.RollbackRequest(), + project_id="project_id_value", + transaction=b"transaction_blob", + ) + + +@pytest.mark.asyncio +async def test_rollback_flattened_async(): + client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.rollback), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.RollbackResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RollbackResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rollback( + project_id="project_id_value", transaction=b"transaction_blob", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].project_id == "project_id_value" + + assert args[0].transaction == b"transaction_blob" + + +@pytest.mark.asyncio +async def test_rollback_flattened_error_async(): + client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rollback( + datastore.RollbackRequest(), + project_id="project_id_value", + transaction=b"transaction_blob", + ) + + +def test_allocate_ids( + transport: str = "grpc", request_type=datastore.AllocateIdsRequest +): + client = DatastoreClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.allocate_ids), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.AllocateIdsResponse() + + response = client.allocate_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datastore.AllocateIdsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.AllocateIdsResponse) + + +def test_allocate_ids_from_dict(): + test_allocate_ids(request_type=dict) + + +@pytest.mark.asyncio +async def test_allocate_ids_async(transport: str = "grpc_asyncio"): + client = DatastoreAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datastore.AllocateIdsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.allocate_ids), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.AllocateIdsResponse() + ) + + response = await client.allocate_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.AllocateIdsResponse) + + +def test_allocate_ids_flattened(): + client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.allocate_ids), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.AllocateIdsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.allocate_ids( + project_id="project_id_value", + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].project_id == "project_id_value" + + assert args[0].keys == [ + entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) + ] + + +def test_allocate_ids_flattened_error(): + client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.allocate_ids( + datastore.AllocateIdsRequest(), + project_id="project_id_value", + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) + + +@pytest.mark.asyncio +async def test_allocate_ids_flattened_async(): + client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.allocate_ids), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.AllocateIdsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.AllocateIdsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.allocate_ids( + project_id="project_id_value", + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].project_id == "project_id_value" + + assert args[0].keys == [ + entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) + ] + + +@pytest.mark.asyncio +async def test_allocate_ids_flattened_error_async(): + client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.allocate_ids( + datastore.AllocateIdsRequest(), + project_id="project_id_value", + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) + + +def test_reserve_ids(transport: str = "grpc", request_type=datastore.ReserveIdsRequest): + client = DatastoreClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.reserve_ids), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.ReserveIdsResponse() + + response = client.reserve_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datastore.ReserveIdsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.ReserveIdsResponse) + + +def test_reserve_ids_from_dict(): + test_reserve_ids(request_type=dict) + + +@pytest.mark.asyncio +async def test_reserve_ids_async(transport: str = "grpc_asyncio"): + client = DatastoreAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datastore.ReserveIdsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.reserve_ids), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.ReserveIdsResponse() + ) + + response = await client.reserve_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.ReserveIdsResponse) + + +def test_reserve_ids_flattened(): + client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.reserve_ids), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.ReserveIdsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.reserve_ids( + project_id="project_id_value", + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].project_id == "project_id_value" + + assert args[0].keys == [ + entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) + ] + + +def test_reserve_ids_flattened_error(): + client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reserve_ids( + datastore.ReserveIdsRequest(), + project_id="project_id_value", + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) + + +@pytest.mark.asyncio +async def test_reserve_ids_flattened_async(): + client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.reserve_ids), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.ReserveIdsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.ReserveIdsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.reserve_ids( + project_id="project_id_value", + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].project_id == "project_id_value" + + assert args[0].keys == [ + entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) + ] + + +@pytest.mark.asyncio +async def test_reserve_ids_flattened_error_async(): + client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.reserve_ids( + datastore.ReserveIdsRequest(), + project_id="project_id_value", + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DatastoreGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastoreClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DatastoreGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastoreClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DatastoreGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastoreClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatastoreGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = DatastoreClient(transport=transport) + assert client._transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatastoreGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DatastoreGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client._transport, transports.DatastoreGrpcTransport,) + + +def test_datastore_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.DatastoreTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_datastore_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.datastore_v1.services.datastore.transports.DatastoreTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DatastoreTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "lookup", + "run_query", + "begin_transaction", + "commit", + "rollback", + "allocate_ids", + "reserve_ids", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_datastore_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.datastore_v1.services.datastore.transports.DatastoreTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.DatastoreTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + quota_project_id="octopus", + ) + + +def test_datastore_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.datastore_v1.services.datastore.transports.DatastoreTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.DatastoreTransport() + adc.assert_called_once() + + +def test_datastore_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + DatastoreClient() + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + quota_project_id=None, + ) + + +def test_datastore_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.DatastoreGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + quota_project_id="octopus", + ) + + +def test_datastore_host_no_port(): + client = DatastoreClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datastore.googleapis.com" + ), + ) + assert client._transport._host == "datastore.googleapis.com:443" + + +def test_datastore_host_with_port(): + client = DatastoreClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datastore.googleapis.com:8000" + ), + ) + assert client._transport._host == "datastore.googleapis.com:8000" + + +def test_datastore_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.DatastoreGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + + +def test_datastore_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.DatastoreGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + + +@pytest.mark.parametrize( + "transport_class", + [transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport], +) +def test_datastore_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "transport_class", + [transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport], +) +def test_datastore_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DatastoreTransport, "_prep_wrapped_messages" + ) as prep: + client = DatastoreClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DatastoreTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DatastoreClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/packages/google-cloud-datastore/tests/unit/gapic/v1/test_datastore_client_v1.py b/packages/google-cloud-datastore/tests/unit/gapic/v1/test_datastore_client_v1.py deleted file mode 100644 index 7dfb27ed129e..000000000000 --- a/packages/google-cloud-datastore/tests/unit/gapic/v1/test_datastore_client_v1.py +++ /dev/null @@ -1,302 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Unit tests.""" - -import pytest - -from google.cloud import datastore_v1 -from google.cloud.datastore_v1 import enums -from google.cloud.datastore_v1.proto import datastore_pb2 -from google.cloud.datastore_v1.proto import entity_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestDatastoreClient(object): - def test_lookup(self): - # Setup Expected Response - expected_response = {} - expected_response = datastore_pb2.LookupResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - client = datastore_v1.DatastoreClient(channel=channel) - - # Setup Request - project_id = "projectId-1969970175" - keys = [] - - response = client.lookup(project_id, keys) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datastore_pb2.LookupRequest(project_id=project_id, keys=keys) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_lookup_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - client = datastore_v1.DatastoreClient(channel=channel) - - # Setup request - project_id = "projectId-1969970175" - keys = [] - - with pytest.raises(CustomException): - client.lookup(project_id, keys) - - def test_run_query(self): - # Setup Expected Response - expected_response = {} - expected_response = datastore_pb2.RunQueryResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - client = datastore_v1.DatastoreClient(channel=channel) - - # Setup Request - project_id = "projectId-1969970175" - partition_id = {} - - response = client.run_query(project_id, partition_id) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datastore_pb2.RunQueryRequest( - project_id=project_id, partition_id=partition_id - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_run_query_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - client = datastore_v1.DatastoreClient(channel=channel) - - # Setup request - project_id = "projectId-1969970175" - partition_id = {} - - with pytest.raises(CustomException): - client.run_query(project_id, partition_id) - - def test_begin_transaction(self): - # Setup Expected Response - transaction = b"-34" - expected_response = {"transaction": transaction} - expected_response = datastore_pb2.BeginTransactionResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - client = datastore_v1.DatastoreClient(channel=channel) - - # Setup Request - project_id = "projectId-1969970175" - - response = client.begin_transaction(project_id) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datastore_pb2.BeginTransactionRequest(project_id=project_id) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_begin_transaction_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - client = datastore_v1.DatastoreClient(channel=channel) - - # Setup request - project_id = "projectId-1969970175" - - with pytest.raises(CustomException): - client.begin_transaction(project_id) - - def test_commit(self): - # Setup Expected Response - index_updates = 1425228195 - expected_response = {"index_updates": index_updates} - expected_response = datastore_pb2.CommitResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - client = datastore_v1.DatastoreClient(channel=channel) - - # Setup Request - project_id = "projectId-1969970175" - mode = enums.CommitRequest.Mode.MODE_UNSPECIFIED - mutations = [] - - response = client.commit(project_id, mode, mutations) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datastore_pb2.CommitRequest( - project_id=project_id, mode=mode, mutations=mutations - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_commit_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - client = datastore_v1.DatastoreClient(channel=channel) - - # Setup request - project_id = "projectId-1969970175" - mode = enums.CommitRequest.Mode.MODE_UNSPECIFIED - mutations = [] - - with pytest.raises(CustomException): - client.commit(project_id, mode, mutations) - - def test_rollback(self): - # Setup Expected Response - expected_response = {} - expected_response = datastore_pb2.RollbackResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - client = datastore_v1.DatastoreClient(channel=channel) - - # Setup Request - project_id = "projectId-1969970175" - transaction = b"-34" - - response = client.rollback(project_id, transaction) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datastore_pb2.RollbackRequest( - project_id=project_id, transaction=transaction - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_rollback_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - client = datastore_v1.DatastoreClient(channel=channel) - - # Setup request - project_id = "projectId-1969970175" - transaction = b"-34" - - with pytest.raises(CustomException): - client.rollback(project_id, transaction) - - def test_allocate_ids(self): - # Setup Expected Response - expected_response = {} - expected_response = datastore_pb2.AllocateIdsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - client = datastore_v1.DatastoreClient(channel=channel) - - # Setup Request - project_id = "projectId-1969970175" - keys = [] - - response = client.allocate_ids(project_id, keys) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datastore_pb2.AllocateIdsRequest( - project_id=project_id, keys=keys - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_allocate_ids_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - client = datastore_v1.DatastoreClient(channel=channel) - - # Setup request - project_id = "projectId-1969970175" - keys = [] - - with pytest.raises(CustomException): - client.allocate_ids(project_id, keys) - - def test_reserve_ids(self): - # Setup Expected Response - expected_response = {} - expected_response = datastore_pb2.ReserveIdsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - client = datastore_v1.DatastoreClient(channel=channel) - - # Setup Request - project_id = "projectId-1969970175" - keys = [] - - response = client.reserve_ids(project_id, keys) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datastore_pb2.ReserveIdsRequest( - project_id=project_id, keys=keys - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_reserve_ids_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - client = datastore_v1.DatastoreClient(channel=channel) - - # Setup request - project_id = "projectId-1969970175" - keys = [] - - with pytest.raises(CustomException): - client.reserve_ids(project_id, keys) diff --git a/packages/google-cloud-datastore/tests/unit/test__gapic.py b/packages/google-cloud-datastore/tests/unit/test__gapic.py index c404dc79c109..4543dba9d07f 100644 --- a/packages/google-cloud-datastore/tests/unit/test__gapic.py +++ b/packages/google-cloud-datastore/tests/unit/test__gapic.py @@ -27,14 +27,18 @@ def _call_fut(self, client): return make_datastore_api(client) @mock.patch( - "google.cloud.datastore_v1.gapic.datastore_client.DatastoreClient", + "google.cloud.datastore_v1.services.datastore.client.DatastoreClient", return_value=mock.sentinel.ds_client, ) + @mock.patch( + "google.cloud.datastore_v1.services.datastore.transports.grpc.DatastoreGrpcTransport", + return_value=mock.sentinel.transport, + ) @mock.patch( "google.cloud.datastore._gapic.make_secure_channel", return_value=mock.sentinel.channel, ) - def test_live_api(self, make_chan, mock_klass): + def test_live_api(self, make_chan, mock_transport, mock_klass): from google.cloud._http import DEFAULT_USER_AGENT base_url = "https://datastore.googleapis.com:443" @@ -47,24 +51,31 @@ def test_live_api(self, make_chan, mock_klass): ds_api = self._call_fut(client) self.assertIs(ds_api, mock.sentinel.ds_client) + mock_transport.assert_called_once_with(channel=mock.sentinel.channel) + make_chan.assert_called_once_with( mock.sentinel.credentials, DEFAULT_USER_AGENT, "datastore.googleapis.com:443", ) + mock_klass.assert_called_once_with( - channel=mock.sentinel.channel, client_info=mock.sentinel.client_info + transport=mock.sentinel.transport, client_info=mock.sentinel.client_info ) @mock.patch( - "google.cloud.datastore_v1.gapic.datastore_client.DatastoreClient", + "google.cloud.datastore_v1.services.datastore.client.DatastoreClient", return_value=mock.sentinel.ds_client, ) + @mock.patch( + "google.cloud.datastore_v1.services.datastore.transports.grpc.DatastoreGrpcTransport", + return_value=mock.sentinel.transport, + ) @mock.patch( "google.cloud.datastore._gapic.insecure_channel", return_value=mock.sentinel.channel, ) - def test_emulator(self, make_chan, mock_klass): + def test_emulator(self, make_chan, mock_transport, mock_klass): host = "localhost:8901" base_url = "http://" + host @@ -77,7 +88,10 @@ def test_emulator(self, make_chan, mock_klass): ds_api = self._call_fut(client) self.assertIs(ds_api, mock.sentinel.ds_client) + mock_transport.assert_called_once_with(channel=mock.sentinel.channel) + make_chan.assert_called_once_with(host) + mock_klass.assert_called_once_with( - channel=mock.sentinel.channel, client_info=mock.sentinel.client_info + transport=mock.sentinel.transport, client_info=mock.sentinel.client_info ) diff --git a/packages/google-cloud-datastore/tests/unit/test__http.py b/packages/google-cloud-datastore/tests/unit/test__http.py index b332c946d40a..700429ff0179 100644 --- a/packages/google-cloud-datastore/tests/unit/test__http.py +++ b/packages/google-cloud-datastore/tests/unit/test__http.py @@ -90,7 +90,7 @@ def _call_fut(*args, **kwargs): return _rpc(*args, **kwargs) def test_it(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 http = object() project = "projectOK" @@ -102,7 +102,7 @@ def test_it(self): response_pb = datastore_pb2.BeginTransactionResponse(transaction=b"7830rmc") patch = mock.patch( "google.cloud.datastore._http._request", - return_value=response_pb.SerializeToString(), + return_value=response_pb._pb.SerializeToString(), ) with patch as mock_request: result = self._call_fut( @@ -114,13 +114,13 @@ def test_it(self): request_pb, datastore_pb2.BeginTransactionResponse, ) - self.assertEqual(result, response_pb) + self.assertEqual(result, response_pb._pb) mock_request.assert_called_once_with( http, project, method, - request_pb.SerializeToString(), + request_pb._pb.SerializeToString(), base_url, client_info, ) @@ -138,7 +138,7 @@ def _make_one(self, *args, **kwargs): @staticmethod def _make_query_pb(kind): - from google.cloud.datastore_v1.proto import query_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 return query_pb2.Query(kind=[query_pb2.KindExpression(name=kind)]) @@ -148,7 +148,7 @@ def test_constructor(self): self.assertIs(ds_api.client, client) def test_lookup_single_key_empty_response(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" key_pb = _make_key_pb(project) @@ -157,7 +157,7 @@ def test_lookup_single_key_empty_response(self): # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())] + [_make_response(content=rsp_pb._pb.SerializeToString())] ) client_info = _make_client_info() client = mock.Mock( @@ -172,29 +172,29 @@ def test_lookup_single_key_empty_response(self): response = ds_api.lookup(project, [key_pb], read_options=read_options) # Check the result and verify the callers. - self.assertEqual(response, rsp_pb) + self.assertEqual(response, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "lookup") self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) - self.assertEqual(list(request.keys), [key_pb]) - self.assertEqual(request.read_options, read_options) + self.assertEqual(list(request.keys), [key_pb._pb]) + self.assertEqual(request.read_options, read_options._pb) def test_lookup_single_key_empty_response_w_eventual(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.LookupResponse() read_options = datastore_pb2.ReadOptions( - read_consistency=datastore_pb2.ReadOptions.EVENTUAL + read_consistency=datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL ) # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())] + [_make_response(content=rsp_pb._pb.SerializeToString())] ) client_info = _make_client_info() client = mock.Mock( @@ -209,18 +209,18 @@ def test_lookup_single_key_empty_response_w_eventual(self): response = ds_api.lookup(project, [key_pb], read_options=read_options) # Check the result and verify the callers. - self.assertEqual(response, rsp_pb) + self.assertEqual(response, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "lookup") self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) - self.assertEqual(list(request.keys), [key_pb]) - self.assertEqual(request.read_options, read_options) + self.assertEqual(list(request.keys), [key_pb._pb]) + self.assertEqual(request.read_options, read_options._pb) def test_lookup_single_key_empty_response_w_transaction(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" transaction = b"TRANSACTION" @@ -230,7 +230,7 @@ def test_lookup_single_key_empty_response_w_transaction(self): # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())] + [_make_response(content=rsp_pb._pb.SerializeToString())] ) client_info = _make_client_info() client = mock.Mock( @@ -245,31 +245,31 @@ def test_lookup_single_key_empty_response_w_transaction(self): response = ds_api.lookup(project, [key_pb], read_options=read_options) # Check the result and verify the callers. - self.assertEqual(response, rsp_pb) + self.assertEqual(response, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "lookup") self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) - self.assertEqual(list(request.keys), [key_pb]) - self.assertEqual(request.read_options, read_options) + self.assertEqual(list(request.keys), [key_pb._pb]) + self.assertEqual(request.read_options, read_options._pb) def test_lookup_single_key_nonempty_response(self): - from google.cloud.datastore_v1.proto import datastore_pb2 - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 project = "PROJECT" key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.LookupResponse() entity = entity_pb2.Entity() - entity.key.CopyFrom(key_pb) - rsp_pb.found.add(entity=entity) + entity.key._pb.CopyFrom(key_pb._pb) + rsp_pb._pb.found.add(entity=entity._pb) read_options = datastore_pb2.ReadOptions() # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())] + [_make_response(content=rsp_pb._pb.SerializeToString())] ) client_info = _make_client_info() client = mock.Mock( @@ -284,7 +284,7 @@ def test_lookup_single_key_nonempty_response(self): response = ds_api.lookup(project, [key_pb], read_options=read_options) # Check the result and verify the callers. - self.assertEqual(response, rsp_pb) + self.assertEqual(response, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "lookup") self.assertEqual(len(response.found), 1) self.assertEqual(len(response.missing), 0) @@ -294,11 +294,11 @@ def test_lookup_single_key_nonempty_response(self): self.assertEqual(found.key.path[0].id, 1234) request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) - self.assertEqual(list(request.keys), [key_pb]) - self.assertEqual(request.read_options, read_options) + self.assertEqual(list(request.keys), [key_pb._pb]) + self.assertEqual(request.read_options, read_options._pb) def test_lookup_multiple_keys_empty_response(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" key_pb1 = _make_key_pb(project) @@ -308,7 +308,7 @@ def test_lookup_multiple_keys_empty_response(self): # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())] + [_make_response(content=rsp_pb._pb.SerializeToString())] ) client_info = _make_client_info() client = mock.Mock( @@ -323,32 +323,32 @@ def test_lookup_multiple_keys_empty_response(self): response = ds_api.lookup(project, [key_pb1, key_pb2], read_options=read_options) # Check the result and verify the callers. - self.assertEqual(response, rsp_pb) + self.assertEqual(response, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "lookup") self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) self.assertEqual(len(response.deferred), 0) request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) - self.assertEqual(list(request.keys), [key_pb1, key_pb2]) - self.assertEqual(request.read_options, read_options) + self.assertEqual(list(request.keys), [key_pb1._pb, key_pb2._pb]) + self.assertEqual(request.read_options, read_options._pb) def test_lookup_multiple_keys_w_missing(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" key_pb1 = _make_key_pb(project) key_pb2 = _make_key_pb(project, id_=2345) rsp_pb = datastore_pb2.LookupResponse() - er_1 = rsp_pb.missing.add() - er_1.entity.key.CopyFrom(key_pb1) - er_2 = rsp_pb.missing.add() - er_2.entity.key.CopyFrom(key_pb2) + er_1 = rsp_pb._pb.missing.add() + er_1.entity.key.CopyFrom(key_pb1._pb) + er_2 = rsp_pb._pb.missing.add() + er_2.entity.key.CopyFrom(key_pb2._pb) read_options = datastore_pb2.ReadOptions() # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())] + [_make_response(content=rsp_pb._pb.SerializeToString())] ) client_info = _make_client_info() client = mock.Mock( @@ -363,31 +363,31 @@ def test_lookup_multiple_keys_w_missing(self): response = ds_api.lookup(project, [key_pb1, key_pb2], read_options=read_options) # Check the result and verify the callers. - self.assertEqual(response, rsp_pb) + self.assertEqual(response, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "lookup") self.assertEqual(len(response.found), 0) self.assertEqual(len(response.deferred), 0) missing_keys = [result.entity.key for result in response.missing] - self.assertEqual(missing_keys, [key_pb1, key_pb2]) + self.assertEqual(missing_keys, [key_pb1._pb, key_pb2._pb]) request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) - self.assertEqual(list(request.keys), [key_pb1, key_pb2]) - self.assertEqual(request.read_options, read_options) + self.assertEqual(list(request.keys), [key_pb1._pb, key_pb2._pb]) + self.assertEqual(request.read_options, read_options._pb) def test_lookup_multiple_keys_w_deferred(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" key_pb1 = _make_key_pb(project) key_pb2 = _make_key_pb(project, id_=2345) rsp_pb = datastore_pb2.LookupResponse() - rsp_pb.deferred.add().CopyFrom(key_pb1) - rsp_pb.deferred.add().CopyFrom(key_pb2) + rsp_pb._pb.deferred.add().CopyFrom(key_pb1._pb) + rsp_pb._pb.deferred.add().CopyFrom(key_pb2._pb) read_options = datastore_pb2.ReadOptions() # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())] + [_make_response(content=rsp_pb._pb.SerializeToString())] ) client_info = _make_client_info() client = mock.Mock( @@ -402,20 +402,20 @@ def test_lookup_multiple_keys_w_deferred(self): response = ds_api.lookup(project, [key_pb1, key_pb2], read_options=read_options) # Check the result and verify the callers. - self.assertEqual(response, rsp_pb) + self.assertEqual(response, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "lookup") self.assertEqual(len(response.found), 0) self.assertEqual(len(response.missing), 0) - self.assertEqual(list(response.deferred), [key_pb1, key_pb2]) + self.assertEqual(list(response.deferred), [key_pb1._pb, key_pb2._pb]) request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) - self.assertEqual(list(request.keys), [key_pb1, key_pb2]) - self.assertEqual(request.read_options, read_options) + self.assertEqual(list(request.keys), [key_pb1._pb, key_pb2._pb]) + self.assertEqual(request.read_options, read_options._pb) def test_run_query_w_eventual_no_transaction(self): - from google.cloud.datastore_v1.proto import datastore_pb2 - from google.cloud.datastore_v1.proto import entity_pb2 - from google.cloud.datastore_v1.proto import query_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 project = "PROJECT" kind = "Nonesuch" @@ -423,19 +423,19 @@ def test_run_query_w_eventual_no_transaction(self): query_pb = self._make_query_pb(kind) partition_id = entity_pb2.PartitionId(project_id=project) read_options = datastore_pb2.ReadOptions( - read_consistency=datastore_pb2.ReadOptions.EVENTUAL + read_consistency=datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL ) rsp_pb = datastore_pb2.RunQueryResponse( batch=query_pb2.QueryResultBatch( - entity_result_type=query_pb2.EntityResult.FULL, + entity_result_type=query_pb2.EntityResult.ResultType.FULL, end_cursor=cursor, - more_results=query_pb2.QueryResultBatch.NO_MORE_RESULTS, + more_results=query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS, ) ) # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())] + [_make_response(content=rsp_pb._pb.SerializeToString())] ) client_info = _make_client_info() client = mock.Mock( @@ -450,18 +450,18 @@ def test_run_query_w_eventual_no_transaction(self): response = ds_api.run_query(project, partition_id, read_options, query=query_pb) # Check the result and verify the callers. - self.assertEqual(response, rsp_pb) + self.assertEqual(response, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "runQuery") request = _verify_protobuf_call(http, uri, datastore_pb2.RunQueryRequest()) - self.assertEqual(request.partition_id, partition_id) - self.assertEqual(request.query, query_pb) - self.assertEqual(request.read_options, read_options) + self.assertEqual(request.partition_id, partition_id._pb) + self.assertEqual(request.query, query_pb._pb) + self.assertEqual(request.read_options, read_options._pb) def test_run_query_wo_eventual_w_transaction(self): - from google.cloud.datastore_v1.proto import datastore_pb2 - from google.cloud.datastore_v1.proto import entity_pb2 - from google.cloud.datastore_v1.proto import query_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 project = "PROJECT" kind = "Nonesuch" @@ -472,15 +472,15 @@ def test_run_query_wo_eventual_w_transaction(self): read_options = datastore_pb2.ReadOptions(transaction=transaction) rsp_pb = datastore_pb2.RunQueryResponse( batch=query_pb2.QueryResultBatch( - entity_result_type=query_pb2.EntityResult.FULL, + entity_result_type=query_pb2.EntityResult.ResultType.FULL, end_cursor=cursor, - more_results=query_pb2.QueryResultBatch.NO_MORE_RESULTS, + more_results=query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS, ) ) # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())] + [_make_response(content=rsp_pb._pb.SerializeToString())] ) client_info = _make_client_info() client = mock.Mock( @@ -495,18 +495,18 @@ def test_run_query_wo_eventual_w_transaction(self): response = ds_api.run_query(project, partition_id, read_options, query=query_pb) # Check the result and verify the callers. - self.assertEqual(response, rsp_pb) + self.assertEqual(response, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "runQuery") request = _verify_protobuf_call(http, uri, datastore_pb2.RunQueryRequest()) - self.assertEqual(request.partition_id, partition_id) - self.assertEqual(request.query, query_pb) - self.assertEqual(request.read_options, read_options) + self.assertEqual(request.partition_id, partition_id._pb) + self.assertEqual(request.query, query_pb._pb) + self.assertEqual(request.read_options, read_options._pb) def test_run_query_wo_namespace_empty_result(self): - from google.cloud.datastore_v1.proto import datastore_pb2 - from google.cloud.datastore_v1.proto import entity_pb2 - from google.cloud.datastore_v1.proto import query_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 project = "PROJECT" kind = "Nonesuch" @@ -516,15 +516,15 @@ def test_run_query_wo_namespace_empty_result(self): read_options = datastore_pb2.ReadOptions() rsp_pb = datastore_pb2.RunQueryResponse( batch=query_pb2.QueryResultBatch( - entity_result_type=query_pb2.EntityResult.FULL, + entity_result_type=query_pb2.EntityResult.ResultType.FULL, end_cursor=cursor, - more_results=query_pb2.QueryResultBatch.NO_MORE_RESULTS, + more_results=query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS, ) ) # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())] + [_make_response(content=rsp_pb._pb.SerializeToString())] ) client_info = _make_client_info() client = mock.Mock( @@ -539,18 +539,18 @@ def test_run_query_wo_namespace_empty_result(self): response = ds_api.run_query(project, partition_id, read_options, query=query_pb) # Check the result and verify the callers. - self.assertEqual(response, rsp_pb) + self.assertEqual(response, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "runQuery") request = _verify_protobuf_call(http, uri, datastore_pb2.RunQueryRequest()) - self.assertEqual(request.partition_id, partition_id) - self.assertEqual(request.query, query_pb) - self.assertEqual(request.read_options, read_options) + self.assertEqual(request.partition_id, partition_id._pb) + self.assertEqual(request.query, query_pb._pb) + self.assertEqual(request.read_options, read_options._pb) def test_run_query_w_namespace_nonempty_result(self): - from google.cloud.datastore_v1.proto import datastore_pb2 - from google.cloud.datastore_v1.proto import entity_pb2 - from google.cloud.datastore_v1.proto import query_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 project = "PROJECT" kind = "Kind" @@ -562,15 +562,15 @@ def test_run_query_w_namespace_nonempty_result(self): read_options = datastore_pb2.ReadOptions() rsp_pb = datastore_pb2.RunQueryResponse( batch=query_pb2.QueryResultBatch( - entity_result_type=query_pb2.EntityResult.FULL, + entity_result_type=query_pb2.EntityResult.ResultType.FULL, entity_results=[query_pb2.EntityResult(entity=entity_pb2.Entity())], - more_results=query_pb2.QueryResultBatch.NO_MORE_RESULTS, + more_results=query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS, ) ) # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())] + [_make_response(content=rsp_pb._pb.SerializeToString())] ) client_info = _make_client_info() client = mock.Mock( @@ -585,15 +585,15 @@ def test_run_query_w_namespace_nonempty_result(self): response = ds_api.run_query(project, partition_id, read_options, query=query_pb) # Check the result and verify the callers. - self.assertEqual(response, rsp_pb) + self.assertEqual(response, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "runQuery") request = _verify_protobuf_call(http, uri, datastore_pb2.RunQueryRequest()) - self.assertEqual(request.partition_id, partition_id) - self.assertEqual(request.query, query_pb) + self.assertEqual(request.partition_id, partition_id._pb) + self.assertEqual(request.query, query_pb._pb) def test_begin_transaction(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" transaction = b"TRANSACTION" @@ -602,7 +602,7 @@ def test_begin_transaction(self): # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())] + [_make_response(content=rsp_pb._pb.SerializeToString())] ) client_info = _make_client_info() client = mock.Mock( @@ -617,7 +617,7 @@ def test_begin_transaction(self): response = ds_api.begin_transaction(project) # Check the result and verify the callers. - self.assertEqual(response, rsp_pb) + self.assertEqual(response, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "beginTransaction") request = _verify_protobuf_call( @@ -627,22 +627,22 @@ def test_begin_transaction(self): self.assertEqual(request.project_id, u"") def test_commit_wo_transaction(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb project = "PROJECT" key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.CommitResponse() req_pb = datastore_pb2.CommitRequest() - mutation = req_pb.mutations.add() + mutation = req_pb._pb.mutations.add() insert = mutation.upsert - insert.key.CopyFrom(key_pb) + insert.key.CopyFrom(key_pb._pb) value_pb = _new_value_pb(insert, "foo") value_pb.string_value = u"Foo" # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())] + [_make_response(content=rsp_pb._pb.SerializeToString())] ) client_info = _make_client_info() client = mock.Mock( @@ -655,35 +655,35 @@ def test_commit_wo_transaction(self): # Make request. rq_class = datastore_pb2.CommitRequest ds_api = self._make_one(client) - mode = rq_class.NON_TRANSACTIONAL + mode = rq_class.Mode.NON_TRANSACTIONAL result = ds_api.commit(project, mode, [mutation]) # Check the result and verify the callers. - self.assertEqual(result, rsp_pb) + self.assertEqual(result, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "commit") request = _verify_protobuf_call(http, uri, rq_class()) self.assertEqual(request.transaction, b"") self.assertEqual(list(request.mutations), [mutation]) - self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL) + self.assertEqual(request.mode, rq_class.Mode.NON_TRANSACTIONAL) def test_commit_w_transaction(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb project = "PROJECT" key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.CommitResponse() req_pb = datastore_pb2.CommitRequest() - mutation = req_pb.mutations.add() + mutation = req_pb._pb.mutations.add() insert = mutation.upsert - insert.key.CopyFrom(key_pb) + insert.key.CopyFrom(key_pb._pb) value_pb = _new_value_pb(insert, "foo") value_pb.string_value = u"Foo" # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())] + [_make_response(content=rsp_pb._pb.SerializeToString())] ) client_info = _make_client_info() client = mock.Mock( @@ -696,20 +696,20 @@ def test_commit_w_transaction(self): # Make request. rq_class = datastore_pb2.CommitRequest ds_api = self._make_one(client) - mode = rq_class.TRANSACTIONAL + mode = rq_class.Mode.TRANSACTIONAL result = ds_api.commit(project, mode, [mutation], transaction=b"xact") # Check the result and verify the callers. - self.assertEqual(result, rsp_pb) + self.assertEqual(result, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "commit") request = _verify_protobuf_call(http, uri, rq_class()) self.assertEqual(request.transaction, b"xact") self.assertEqual(list(request.mutations), [mutation]) - self.assertEqual(request.mode, rq_class.TRANSACTIONAL) + self.assertEqual(request.mode, rq_class.Mode.TRANSACTIONAL) def test_rollback_ok(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" transaction = b"xact" @@ -717,7 +717,7 @@ def test_rollback_ok(self): # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())] + [_make_response(content=rsp_pb._pb.SerializeToString())] ) client_info = _make_client_info() client = mock.Mock( @@ -732,21 +732,21 @@ def test_rollback_ok(self): response = ds_api.rollback(project, transaction) # Check the result and verify the callers. - self.assertEqual(response, rsp_pb) + self.assertEqual(response, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "rollback") request = _verify_protobuf_call(http, uri, datastore_pb2.RollbackRequest()) self.assertEqual(request.transaction, transaction) def test_allocate_ids_empty(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" rsp_pb = datastore_pb2.AllocateIdsResponse() # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())] + [_make_response(content=rsp_pb._pb.SerializeToString())] ) client_info = _make_client_info() client = mock.Mock( @@ -761,7 +761,7 @@ def test_allocate_ids_empty(self): response = ds_api.allocate_ids(project, []) # Check the result and verify the callers. - self.assertEqual(response, rsp_pb) + self.assertEqual(response, rsp_pb._pb) self.assertEqual(list(response.keys), []) uri = _build_expected_url(client._base_url, project, "allocateIds") @@ -769,7 +769,7 @@ def test_allocate_ids_empty(self): self.assertEqual(list(request.keys), []) def test_allocate_ids_non_empty(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" before_key_pbs = [ @@ -778,12 +778,12 @@ def test_allocate_ids_non_empty(self): ] after_key_pbs = [_make_key_pb(project), _make_key_pb(project, id_=2345)] rsp_pb = datastore_pb2.AllocateIdsResponse() - rsp_pb.keys.add().CopyFrom(after_key_pbs[0]) - rsp_pb.keys.add().CopyFrom(after_key_pbs[1]) + rsp_pb._pb.keys.add().CopyFrom(after_key_pbs[0]._pb) + rsp_pb._pb.keys.add().CopyFrom(after_key_pbs[1]._pb) # Create mock HTTP and client with response. http = _make_requests_session( - [_make_response(content=rsp_pb.SerializeToString())] + [_make_response(content=rsp_pb._pb.SerializeToString())] ) client_info = _make_client_info() client = mock.Mock( @@ -798,8 +798,8 @@ def test_allocate_ids_non_empty(self): response = ds_api.allocate_ids(project, before_key_pbs) # Check the result and verify the callers. - self.assertEqual(list(response.keys), after_key_pbs) - self.assertEqual(response, rsp_pb) + self.assertEqual(list(response.keys), [i._pb for i in after_key_pbs]) + self.assertEqual(response, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "allocateIds") request = _verify_protobuf_call(http, uri, datastore_pb2.AllocateIdsRequest()) @@ -863,5 +863,5 @@ def _verify_protobuf_call(http, expected_url, pb): ) data = http.request.mock_calls[0][2]["data"] - pb.ParseFromString(data) + pb._pb.ParseFromString(data) return pb diff --git a/packages/google-cloud-datastore/tests/unit/test_batch.py b/packages/google-cloud-datastore/tests/unit/test_batch.py index 7ad2aeab4d33..78c1db20e37b 100644 --- a/packages/google-cloud-datastore/tests/unit/test_batch.py +++ b/packages/google-cloud-datastore/tests/unit/test_batch.py @@ -42,7 +42,7 @@ def test_ctor(self): self.assertEqual(batch._partial_key_entities, []) def test_current(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" client = _Client(project) @@ -63,8 +63,15 @@ def test_current(self): commit_method = client._datastore_api.commit self.assertEqual(commit_method.call_count, 2) - mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL - commit_method.assert_called_with(project, mode, [], transaction=None) + mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL + commit_method.assert_called_with( + request={ + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": None, + } + ) def test_put_entity_wo_key(self): project = "PROJECT" @@ -213,7 +220,7 @@ def test_rollback_wrong_status(self): self.assertRaises(ValueError, batch.rollback) def test_commit(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" client = _Client(project) @@ -226,11 +233,18 @@ def test_commit(self): self.assertEqual(batch._status, batch._FINISHED) commit_method = client._datastore_api.commit - mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL - commit_method.assert_called_with(project, mode, [], transaction=None) + mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL + commit_method.assert_called_with( + request={ + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": None, + } + ) def test_commit_w_timeout(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" client = _Client(project) @@ -244,13 +258,19 @@ def test_commit_w_timeout(self): self.assertEqual(batch._status, batch._FINISHED) commit_method = client._datastore_api.commit - mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL + mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL commit_method.assert_called_with( - project, mode, [], transaction=None, timeout=timeout + request={ + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": None, + }, + timeout=timeout, ) def test_commit_w_retry(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" client = _Client(project) @@ -264,9 +284,15 @@ def test_commit_w_retry(self): self.assertEqual(batch._status, batch._FINISHED) commit_method = client._datastore_api.commit - mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL + mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL commit_method.assert_called_with( - project, mode, [], transaction=None, retry=retry + request={ + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": None, + }, + retry=retry, ) def test_commit_wrong_status(self): @@ -278,7 +304,7 @@ def test_commit_wrong_status(self): self.assertRaises(ValueError, batch.commit) def test_commit_w_partial_key_entities(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" new_id = 1234 @@ -296,13 +322,20 @@ def test_commit_w_partial_key_entities(self): batch.commit() self.assertEqual(batch._status, batch._FINISHED) - mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL - ds_api.commit.assert_called_once_with(project, mode, [], transaction=None) + mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL + ds_api.commit.assert_called_once_with( + request={ + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": None, + } + ) self.assertFalse(entity.key.is_partial) self.assertEqual(entity.key._id, new_id) def test_as_context_mgr_wo_error(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" properties = {"foo": "bar"} @@ -321,13 +354,18 @@ def test_as_context_mgr_wo_error(self): mutated_entity = _mutated_pb(self, batch.mutations, "upsert") self.assertEqual(mutated_entity.key, key._key) commit_method = client._datastore_api.commit - mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL + mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL commit_method.assert_called_with( - project, mode, batch.mutations, transaction=None + request={ + "project_id": project, + "mode": mode, + "mutations": batch.mutations, + "transaction": None, + } ) def test_as_context_mgr_nested(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" properties = {"foo": "bar"} @@ -358,12 +396,22 @@ def test_as_context_mgr_nested(self): commit_method = client._datastore_api.commit self.assertEqual(commit_method.call_count, 2) - mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL + mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL commit_method.assert_called_with( - project, mode, batch1.mutations, transaction=None + request={ + "project_id": project, + "mode": mode, + "mutations": batch1.mutations, + "transaction": None, + } ) commit_method.assert_called_with( - project, mode, batch2.mutations, transaction=None + request={ + "project_id": project, + "mode": mode, + "mutations": batch2.mutations, + "transaction": None, + } ) def test_as_context_mgr_w_error(self): @@ -415,8 +463,8 @@ def _call_fut(self, commit_response_pb): return _parse_commit_response(commit_response_pb) def test_it(self): - from google.cloud.datastore_v1.proto import datastore_pb2 - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 index_updates = 1337 keys = [ @@ -428,7 +476,7 @@ def test_it(self): index_updates=index_updates, ) result = self._call_fut(response) - self.assertEqual(result, (index_updates, keys)) + self.assertEqual(result, (index_updates, [i._pb for i in keys])) class _Entity(dict): @@ -452,13 +500,13 @@ def is_partial(self): return self._id is None def to_protobuf(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 key = self._key = entity_pb2.Key() # Don't assign it, because it will just get ripped out # key.partition_id.project_id = self.project - element = key.path.add() + element = key._pb.path.add() element.kind = self._kind if self._id is not None: element.id = self._id @@ -504,25 +552,25 @@ def _mutated_pb(test_case, mutation_pb_list, mutation_type): # We grab the only mutation. mutated_pb = mutation_pb_list[0] # Then check if it is the correct type. - test_case.assertEqual(mutated_pb.WhichOneof("operation"), mutation_type) + test_case.assertEqual(mutated_pb._pb.WhichOneof("operation"), mutation_type) return getattr(mutated_pb, mutation_type) def _make_mutation(id_): - from google.cloud.datastore_v1.proto import datastore_pb2 - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 key = entity_pb2.Key() key.partition_id.project_id = "PROJECT" - elem = key.path.add() + elem = key._pb.path.add() elem.kind = "Kind" elem.id = id_ return datastore_pb2.MutationResult(key=key) def _make_commit_response(*new_key_ids): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 mutation_results = [_make_mutation(key_id) for key_id in new_key_ids] return datastore_pb2.CommitResponse(mutation_results=mutation_results) diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 61f8af7b7bf9..59588f10ade7 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -24,12 +24,12 @@ def _make_credentials(): def _make_entity_pb(project, kind, integer_id, name=None, str_val=None): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() entity_pb.key.partition_id.project_id = project - path_element = entity_pb.key.path.add() + path_element = entity_pb._pb.key.path.add() path_element.kind = kind path_element.id = integer_id if name is not None and str_val is not None: @@ -420,7 +420,7 @@ def test_get_multi_no_keys(self): self.assertEqual(results, []) def test_get_multi_miss(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore.key import Key creds = _make_credentials() @@ -434,12 +434,16 @@ def test_get_multi_miss(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, [key.to_protobuf()], read_options=read_options + request={ + "project_id": self.PROJECT, + "keys": [key.to_protobuf()], + "read_options": read_options, + } ) def test_get_multi_miss_w_missing(self): - from google.cloud.datastore_v1.proto import entity_pb2 - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.key import Key KIND = "Kind" @@ -448,14 +452,14 @@ def test_get_multi_miss_w_missing(self): # Make a missing entity pb to be returned from mock backend. missed = entity_pb2.Entity() missed.key.partition_id.project_id = self.PROJECT - path_element = missed.key.path.add() + path_element = missed._pb.key.path.add() path_element.kind = KIND path_element.id = ID creds = _make_credentials() client = self._make_one(credentials=creds) # Set missing entity on mock connection. - lookup_response = _make_lookup_response(missing=[missed]) + lookup_response = _make_lookup_response(missing=[missed._pb]) ds_api = _make_datastore_api(lookup_response=lookup_response) client._datastore_api_internal = ds_api @@ -464,11 +468,15 @@ def test_get_multi_miss_w_missing(self): entities = client.get_multi([key], missing=missing) self.assertEqual(entities, []) key_pb = key.to_protobuf() - self.assertEqual([missed.key.to_protobuf() for missed in missing], [key_pb]) + self.assertEqual([missed.key.to_protobuf() for missed in missing], [key_pb._pb]) read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, [key_pb], read_options=read_options + request={ + "project_id": self.PROJECT, + "keys": [key_pb], + "read_options": read_options, + } ) def test_get_multi_w_missing_non_empty(self): @@ -492,7 +500,7 @@ def test_get_multi_w_deferred_non_empty(self): self.assertRaises(ValueError, client.get_multi, [key], deferred=deferred) def test_get_multi_miss_w_deferred(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore.key import Key key = Key("Kind", 1234, project=self.PROJECT) @@ -512,12 +520,16 @@ def test_get_multi_miss_w_deferred(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, [key_pb], read_options=read_options + request={ + "project_id": self.PROJECT, + "keys": [key_pb], + "read_options": read_options, + } ) def test_get_multi_w_deferred_from_backend_but_not_passed(self): - from google.cloud.datastore_v1.proto import datastore_pb2 - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key @@ -527,9 +539,9 @@ def test_get_multi_w_deferred_from_backend_but_not_passed(self): key2_pb = key2.to_protobuf() entity1_pb = entity_pb2.Entity() - entity1_pb.key.CopyFrom(key1_pb) + entity1_pb._pb.key.CopyFrom(key1_pb._pb) entity2_pb = entity_pb2.Entity() - entity2_pb.key.CopyFrom(key2_pb) + entity2_pb._pb.key.CopyFrom(key2_pb._pb) creds = _make_credentials() client = self._make_one(credentials=creds) @@ -561,15 +573,25 @@ def test_get_multi_w_deferred_from_backend_but_not_passed(self): self.assertEqual(ds_api.lookup.call_count, 2) read_options = datastore_pb2.ReadOptions() + ds_api.lookup.assert_any_call( - self.PROJECT, [key2_pb], read_options=read_options + request={ + "project_id": self.PROJECT, + "keys": [key2_pb], + "read_options": read_options, + }, ) + ds_api.lookup.assert_any_call( - self.PROJECT, [key1_pb, key2_pb], read_options=read_options + request={ + "project_id": self.PROJECT, + "keys": [key1_pb, key2_pb], + "read_options": read_options, + }, ) def test_get_multi_hit_w_retry_w_timeout(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore.key import Key kind = "Kind" @@ -600,16 +622,19 @@ def test_get_multi_hit_w_retry_w_timeout(self): self.assertEqual(result["foo"], "Foo") read_options = datastore_pb2.ReadOptions() + ds_api.lookup.assert_called_once_with( - self.PROJECT, - [key.to_protobuf()], - read_options=read_options, + request={ + "project_id": self.PROJECT, + "keys": [key.to_protobuf()], + "read_options": read_options, + }, retry=retry, timeout=timeout, ) def test_get_multi_hit_w_transaction(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore.key import Key txn_id = b"123" @@ -642,11 +667,15 @@ def test_get_multi_hit_w_transaction(self): read_options = datastore_pb2.ReadOptions(transaction=txn_id) ds_api.lookup.assert_called_once_with( - self.PROJECT, [key.to_protobuf()], read_options=read_options + request={ + "project_id": self.PROJECT, + "keys": [key.to_protobuf()], + "read_options": read_options, + } ) def test_get_multi_hit_multiple_keys_same_project(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore.key import Key kind = "Kind" @@ -676,9 +705,11 @@ def test_get_multi_hit_multiple_keys_same_project(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, - [key1.to_protobuf(), key2.to_protobuf()], - read_options=read_options, + request={ + "project_id": self.PROJECT, + "keys": [key1.to_protobuf(), key2.to_protobuf()], + "read_options": read_options, + } ) def test_get_multi_hit_multiple_keys_different_project(self): @@ -770,7 +801,7 @@ def test_put_multi_w_single_empty_entity(self): self.assertRaises(ValueError, client.put_multi, Entity()) def test_put_multi_no_batch_w_partial_key_w_retry_w_timeout(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore.helpers import _property_tuples entity = _Entity(foo=u"bar") @@ -789,14 +820,21 @@ def test_put_multi_no_batch_w_partial_key_w_retry_w_timeout(self): self.assertEqual(ds_api.commit.call_count, 1) _, positional, keyword = ds_api.commit.mock_calls[0] - expected_kw = {"transaction": None, "retry": retry, "timeout": timeout} - self.assertEqual(keyword, expected_kw) - self.assertEqual(len(positional), 3) - self.assertEqual(positional[0], self.PROJECT) - self.assertEqual(positional[1], datastore_pb2.CommitRequest.NON_TRANSACTIONAL) + self.assertEqual(len(positional), 0) + + self.assertEqual(len(keyword), 3) + self.assertEqual(keyword["retry"], retry) + self.assertEqual(keyword["timeout"], timeout) - mutations = positional[2] + self.assertEqual(len(keyword["request"]), 4) + self.assertEqual(keyword["request"]["project_id"], self.PROJECT) + self.assertEqual( + keyword["request"]["mode"], + datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL, + ) + self.assertEqual(keyword["request"]["transaction"], None) + mutations = keyword["request"]["mutations"] mutated_entity = _mutated_pb(self, mutations, "insert") self.assertEqual(mutated_entity.key, key.to_protobuf()) @@ -859,7 +897,7 @@ def test_delete_multi_no_keys(self): client._datastore_api_internal.commit.assert_not_called() def test_delete_multi_no_batch_w_retry_w_timeout(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 key = _Key() retry = mock.Mock() @@ -875,14 +913,21 @@ def test_delete_multi_no_batch_w_retry_w_timeout(self): self.assertEqual(ds_api.commit.call_count, 1) _, positional, keyword = ds_api.commit.mock_calls[0] - expected_kw = {"transaction": None, "retry": retry, "timeout": timeout} - self.assertEqual(keyword, expected_kw) - self.assertEqual(len(positional), 3) - self.assertEqual(positional[0], self.PROJECT) - self.assertEqual(positional[1], datastore_pb2.CommitRequest.NON_TRANSACTIONAL) + self.assertEqual(len(positional), 0) + + self.assertEqual(len(keyword), 3) + self.assertEqual(keyword["retry"], retry) + self.assertEqual(keyword["timeout"], timeout) - mutations = positional[2] + self.assertEqual(len(keyword["request"]), 4) + self.assertEqual(keyword["request"]["project_id"], self.PROJECT) + self.assertEqual( + keyword["request"]["mode"], + datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL, + ) + self.assertEqual(keyword["request"]["transaction"], None) + mutations = keyword["request"]["mutations"] mutated_key = _mutated_pb(self, mutations, "delete") self.assertEqual(mutated_key, key.to_protobuf()) @@ -934,7 +979,9 @@ def test_allocate_ids_w_partial_key(self): self.assertEqual([key.id for key in result], list(range(num_ids))) expected_keys = [incomplete_key.to_protobuf()] * num_ids - alloc_ids.assert_called_once_with(self.PROJECT, expected_keys) + alloc_ids.assert_called_once_with( + request={"project_id": self.PROJECT, "keys": expected_keys} + ) def test_allocate_ids_w_partial_key_w_retry_w_timeout(self): num_ids = 2 @@ -959,7 +1006,9 @@ def test_allocate_ids_w_partial_key_w_retry_w_timeout(self): expected_keys = [incomplete_key.to_protobuf()] * num_ids alloc_ids.assert_called_once_with( - self.PROJECT, expected_keys, retry=retry, timeout=timeout + request={"project_id": self.PROJECT, "keys": expected_keys}, + retry=retry, + timeout=timeout, ) def test_allocate_ids_w_completed_key(self): @@ -986,7 +1035,9 @@ def test_reserve_ids_sequential_w_completed_key(self): for id in range(complete_key.id, complete_key.id + num_ids) ) expected_keys = [key.to_protobuf() for key in reserved_keys] - reserve_ids.assert_called_once_with(self.PROJECT, expected_keys) + reserve_ids.assert_called_once_with( + request={"project_id": self.PROJECT, "keys": expected_keys} + ) def test_reserve_ids_sequential_w_completed_key_w_retry_w_timeout(self): num_ids = 2 @@ -1011,7 +1062,9 @@ def test_reserve_ids_sequential_w_completed_key_w_retry_w_timeout(self): ) expected_keys = [key.to_protobuf() for key in reserved_keys] reserve_ids.assert_called_once_with( - self.PROJECT, expected_keys, retry=retry, timeout=timeout + request={"project_id": self.PROJECT, "keys": expected_keys}, + retry=retry, + timeout=timeout, ) def test_reserve_ids_sequential_w_completed_key_w_ancestor(self): @@ -1031,7 +1084,9 @@ def test_reserve_ids_sequential_w_completed_key_w_ancestor(self): for id in range(complete_key.id, complete_key.id + num_ids) ) expected_keys = [key.to_protobuf() for key in reserved_keys] - reserve_ids.assert_called_once_with(self.PROJECT, expected_keys) + reserve_ids.assert_called_once_with( + request={"project_id": self.PROJECT, "keys": expected_keys} + ) def test_reserve_ids_sequential_w_partial_key(self): num_ids = 2 @@ -1074,7 +1129,9 @@ def test_reserve_ids_w_completed_key(self): for id in range(complete_key.id, complete_key.id + num_ids) ) expected_keys = [key.to_protobuf() for key in reserved_keys] - reserve_ids.assert_called_once_with(self.PROJECT, expected_keys) + reserve_ids.assert_called_once_with( + request={"project_id": self.PROJECT, "keys": expected_keys} + ) def test_reserve_ids_w_completed_key_w_retry_w_timeout(self): num_ids = 2 @@ -1097,7 +1154,9 @@ def test_reserve_ids_w_completed_key_w_retry_w_timeout(self): ) expected_keys = [key.to_protobuf() for key in reserved_keys] reserve_ids.assert_called_once_with( - self.PROJECT, expected_keys, retry=retry, timeout=timeout + request={"project_id": self.PROJECT, "keys": expected_keys}, + retry=retry, + timeout=timeout, ) def test_reserve_ids_w_completed_key_w_ancestor(self): @@ -1117,7 +1176,9 @@ def test_reserve_ids_w_completed_key_w_ancestor(self): for id in range(complete_key.id, complete_key.id + num_ids) ) expected_keys = [key.to_protobuf() for key in reserved_keys] - reserve_ids.assert_called_once_with(self.PROJECT, expected_keys) + reserve_ids.assert_called_once_with( + request={"project_id": self.PROJECT, "keys": expected_keys} + ) def test_reserve_ids_w_partial_key(self): num_ids = 2 @@ -1155,7 +1216,9 @@ def test_reserve_ids_multi(self): client.reserve_ids_multi([key1, key2]) expected_keys = [key1.to_protobuf(), key2.to_protobuf()] - reserve_ids.assert_called_once_with(self.PROJECT, expected_keys) + reserve_ids.assert_called_once_with( + request={"project_id": self.PROJECT, "keys": expected_keys} + ) def test_reserve_ids_multi_w_partial_key(self): incomplete_key = _Key(_Key.kind, None) @@ -1252,9 +1315,9 @@ def test_read_only_transaction_defaults(self): self.assertEqual( xact._options, TransactionOptions(read_only=TransactionOptions.ReadOnly()) ) - self.assertFalse(xact._options.HasField("read_write")) - self.assertTrue(xact._options.HasField("read_only")) - self.assertEqual(xact._options.read_only, TransactionOptions.ReadOnly()) + self.assertFalse(xact._options._pb.HasField("read_write")) + self.assertTrue(xact._options._pb.HasField("read_only")) + self.assertEqual(xact._options._pb.read_only, TransactionOptions.ReadOnly()._pb) def test_query_w_client(self): KIND = "KIND" @@ -1426,13 +1489,13 @@ def is_partial(self): return self.id is None and self.name is None def to_protobuf(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 key = self._key = entity_pb2.Key() path = self._flat_path while path: - element = key.path.add() + element = key._pb.path.add() kind, id_or_name = path[:2] element.kind = kind if isinstance(id_or_name, int): @@ -1476,22 +1539,22 @@ def _mutated_pb(test_case, mutation_pb_list, mutation_type): # We grab the only mutation. mutated_pb = mutation_pb_list[0] # Then check if it is the correct type. - test_case.assertEqual(mutated_pb.WhichOneof("operation"), mutation_type) + test_case.assertEqual(mutated_pb._pb.WhichOneof("operation"), mutation_type) return getattr(mutated_pb, mutation_type) def _make_key(id_): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 key = entity_pb2.Key() - elem = key.path.add() + elem = key._pb.path.add() elem.id = id_ return key def _make_commit_response(*keys): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 mutation_results = [datastore_pb2.MutationResult(key=key) for key in keys] return datastore_pb2.CommitResponse(mutation_results=mutation_results) diff --git a/packages/google-cloud-datastore/tests/unit/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py index 995d9cfa2330..81cae0f3bdf1 100644 --- a/packages/google-cloud-datastore/tests/unit/test_helpers.py +++ b/packages/google-cloud-datastore/tests/unit/test_helpers.py @@ -22,15 +22,15 @@ def _call_fut(self, entity_pb, name): return _new_value_pb(entity_pb, name) def test_it(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 entity_pb = entity_pb2.Entity() name = "foo" result = self._call_fut(entity_pb, name) - self.assertIsInstance(result, entity_pb2.Value) - self.assertEqual(len(entity_pb.properties), 1) - self.assertEqual(entity_pb.properties[name], result) + self.assertIsInstance(result, type(entity_pb2.Value()._pb)) + self.assertEqual(len(entity_pb._pb.properties), 1) + self.assertEqual(entity_pb._pb.properties[name], result) class Test__property_tuples(unittest.TestCase): @@ -41,7 +41,7 @@ def _call_fut(self, entity_pb): def test_it(self): import types - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() @@ -62,7 +62,7 @@ def _call_fut(self, val): return entity_from_protobuf(val) def test_it(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.helpers import _new_value_pb _PROJECT = "PROJECT" @@ -70,7 +70,7 @@ def test_it(self): _ID = 1234 entity_pb = entity_pb2.Entity() entity_pb.key.partition_id.project_id = _PROJECT - entity_pb.key.path.add(kind=_KIND, id=_ID) + entity_pb._pb.key.path.add(kind=_KIND, id=_ID) value_pb = _new_value_pb(entity_pb, "foo") value_pb.string_value = "Foo" @@ -92,7 +92,7 @@ def test_it(self): indexed_array_val_pb = array_pb2.add() indexed_array_val_pb.integer_value = 12 - entity = self._call_fut(entity_pb) + entity = self._call_fut(entity_pb._pb) self.assertEqual(entity.kind, _KIND) self.assertEqual(entity.exclude_from_indexes, frozenset(["bar", "baz"])) entity_props = dict(entity) @@ -108,7 +108,7 @@ def test_it(self): self.assertEqual(key.id, _ID) def test_mismatched_value_indexed(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.helpers import _new_value_pb _PROJECT = "PROJECT" @@ -116,7 +116,7 @@ def test_mismatched_value_indexed(self): _ID = 1234 entity_pb = entity_pb2.Entity() entity_pb.key.partition_id.project_id = _PROJECT - entity_pb.key.path.add(kind=_KIND, id=_ID) + entity_pb._pb.key.path.add(kind=_KIND, id=_ID) array_val_pb = _new_value_pb(entity_pb, "baz") array_pb = array_val_pb.array_value.values @@ -129,19 +129,19 @@ def test_mismatched_value_indexed(self): unindexed_value_pb2.integer_value = 11 with self.assertRaises(ValueError): - self._call_fut(entity_pb) + self._call_fut(entity_pb._pb) def test_entity_no_key(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 entity_pb = entity_pb2.Entity() - entity = self._call_fut(entity_pb) + entity = self._call_fut(entity_pb._pb) self.assertIsNone(entity.key) self.assertEqual(dict(entity), {}) def test_entity_with_meaning(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() @@ -156,7 +156,7 @@ def test_entity_with_meaning(self): self.assertEqual(entity._meanings, {name: (meaning, val)}) def test_nested_entity_no_key(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.helpers import _new_value_pb PROJECT = "FOO" @@ -171,13 +171,13 @@ def test_nested_entity_no_key(self): entity_pb = entity_pb2.Entity() entity_pb.key.partition_id.project_id = PROJECT - element = entity_pb.key.path.add() + element = entity_pb._pb.key.path.add() element.kind = KIND outside_val_pb = _new_value_pb(entity_pb, OUTSIDE_NAME) - outside_val_pb.entity_value.CopyFrom(entity_inside) + outside_val_pb.entity_value.CopyFrom(entity_inside._pb) - entity = self._call_fut(entity_pb) + entity = self._call_fut(entity_pb._pb) self.assertEqual(entity.key.project, PROJECT) self.assertEqual(entity.key.flat_path, (KIND,)) self.assertEqual(len(entity), 1) @@ -188,7 +188,7 @@ def test_nested_entity_no_key(self): self.assertEqual(inside_entity[INSIDE_NAME], INSIDE_VALUE) def test_index_mismatch_ignores_empty_list(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 _PROJECT = "PROJECT" _KIND = "KIND" @@ -198,9 +198,9 @@ def test_index_mismatch_ignores_empty_list(self): entity_pb = entity_pb2.Entity(properties={"baz": array_val_pb}) entity_pb.key.partition_id.project_id = _PROJECT - entity_pb.key.path.add(kind=_KIND, id=_ID) + entity_pb.key._pb.path.add(kind=_KIND, id=_ID) - entity = self._call_fut(entity_pb) + entity = self._call_fut(entity_pb._pb) entity_dict = dict(entity) self.assertEqual(entity_dict["baz"], []) @@ -222,14 +222,14 @@ def _compare_entity_proto(self, entity_pb1, entity_pb2): name1, val1 = pair1 name2, val2 = pair2 self.assertEqual(name1, name2) - if val1.HasField("entity_value"): # Message field (Entity) + if val1._pb.HasField("entity_value"): # Message field (Entity) self.assertEqual(val1.meaning, val2.meaning) self._compare_entity_proto(val1.entity_value, val2.entity_value) else: self.assertEqual(val1, val2) def test_empty(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() @@ -237,7 +237,7 @@ def test_empty(self): self._compare_entity_proto(entity_pb, entity_pb2.Entity()) def test_key_only(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key @@ -249,14 +249,14 @@ def test_key_only(self): expected_pb = entity_pb2.Entity() expected_pb.key.partition_id.project_id = project - path_elt = expected_pb.key.path.add() + path_elt = expected_pb._pb.key.path.add() path_elt.kind = kind path_elt.name = name self._compare_entity_proto(entity_pb, expected_pb) def test_simple_fields(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -276,7 +276,7 @@ def test_simple_fields(self): self._compare_entity_proto(entity_pb, expected_pb) def test_with_empty_list(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() @@ -284,23 +284,23 @@ def test_with_empty_list(self): entity_pb = self._call_fut(entity) expected_pb = entity_pb2.Entity() - prop = expected_pb.properties.get_or_create("foo") - prop.array_value.CopyFrom(entity_pb2.ArrayValue(values=[])) + prop = expected_pb._pb.properties.get_or_create("foo") + prop.array_value.CopyFrom(entity_pb2.ArrayValue(values=[])._pb) self._compare_entity_proto(entity_pb, expected_pb) def test_inverts_to_protobuf(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.helpers import _new_value_pb from google.cloud.datastore.helpers import entity_from_protobuf original_pb = entity_pb2.Entity() # Add a key. original_pb.key.partition_id.project_id = project = "PROJECT" - elem1 = original_pb.key.path.add() + elem1 = original_pb._pb.key.path.add() elem1.kind = "Family" elem1.id = 1234 - elem2 = original_pb.key.path.add() + elem2 = original_pb._pb.key.path.add() elem2.kind = "King" elem2.name = "Spades" @@ -320,7 +320,7 @@ def test_inverts_to_protobuf(self): sub_val_pb2 = _new_value_pb(sub_pb, "y") sub_val_pb2.double_value = 2.718281828 val_pb3.meaning = 9 - val_pb3.entity_value.CopyFrom(sub_pb) + val_pb3.entity_value.CopyFrom(sub_pb._pb) # Add a list property. val_pb4 = _new_value_pb(original_pb, "list-quux") @@ -343,7 +343,7 @@ def test_inverts_to_protobuf(self): self._compare_entity_proto(original_pb, new_pb) def test_meaning_with_change(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -361,7 +361,7 @@ def test_meaning_with_change(self): self._compare_entity_proto(entity_pb, expected_pb) def test_variable_meanings(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -387,7 +387,7 @@ def test_variable_meanings(self): self._compare_entity_proto(entity_pb, expected_pb) def test_dict_to_entity(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() @@ -406,7 +406,7 @@ def test_dict_to_entity(self): self.assertEqual(entity_pb, expected_pb) def test_dict_to_entity_recursive(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() @@ -445,7 +445,7 @@ def _call_fut(self, val): return key_from_protobuf(val) def _makePB(self, project=None, namespace=None, path=()): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 pb = entity_pb2.Key() if project is not None: @@ -453,7 +453,7 @@ def _makePB(self, project=None, namespace=None, path=()): if namespace is not None: pb.partition_id.namespace_id = namespace for elem in path: - added = pb.path.add() + added = pb._pb.path.add() added.kind = elem["kind"] if "id" in elem: added.id = elem["id"] @@ -504,16 +504,16 @@ def test_eventual_w_transaction(self): self._call_fut(True, b"123") def test_eventual_wo_transaction(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 read_options = self._call_fut(True, None) expected = datastore_pb2.ReadOptions( - read_consistency=datastore_pb2.ReadOptions.EVENTUAL + read_consistency=datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL ) self.assertEqual(read_options, expected) def test_default_w_transaction(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 txn_id = b"123abc-easy-as" read_options = self._call_fut(False, txn_id) @@ -521,7 +521,7 @@ def test_default_w_transaction(self): self.assertEqual(read_options, expected) def test_default_wo_transaction(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 read_options = self._call_fut(False, None) expected = datastore_pb2.ReadOptions() @@ -589,13 +589,9 @@ def test_long(self): self.assertEqual(value, must_be_long) def test_native_str(self): - import six - name, value = self._call_fut("str") - if six.PY2: - self.assertEqual(name, "blob_value") - else: # pragma: NO COVER Python 3 - self.assertEqual(name, "string_value") + + self.assertEqual(name, "string_value") self.assertEqual(value, "str") def test_bytes(self): @@ -664,7 +660,7 @@ def _call_fut(self, pb): return _get_value_from_value_pb(pb) def _makePB(self, attr_name, value): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 pb = entity_pb2.Value() setattr(pb, attr_name, value) @@ -674,22 +670,22 @@ def test_datetime(self): import calendar import datetime from google.cloud._helpers import UTC - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 micros = 4375 utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) pb = entity_pb2.Value() - pb.timestamp_value.seconds = calendar.timegm(utc.timetuple()) - pb.timestamp_value.nanos = 1000 * micros + pb._pb.timestamp_value.seconds = calendar.timegm(utc.timetuple()) + pb._pb.timestamp_value.nanos = 1000 * micros self.assertEqual(self._call_fut(pb), utc) def test_key(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.key import Key pb = entity_pb2.Value() expected = Key("KIND", 1234, project="PROJECT").to_protobuf() - pb.key_value.CopyFrom(expected) + pb.key_value._pb.CopyFrom(expected._pb) found = self._call_fut(pb) self.assertEqual(found.to_protobuf(), expected) @@ -714,13 +710,13 @@ def test_unicode(self): self.assertEqual(self._call_fut(pb), u"str") def test_entity(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb pb = entity_pb2.Value() entity_pb = pb.entity_value - entity_pb.key.path.add(kind="KIND") + entity_pb._pb.key.path.add(kind="KIND") entity_pb.key.partition_id.project_id = "PROJECT" value_pb = _new_value_pb(entity_pb, "foo") @@ -730,20 +726,20 @@ def test_entity(self): self.assertEqual(entity["foo"], "Foo") def test_array(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 pb = entity_pb2.Value() array_pb = pb.array_value.values - item_pb = array_pb.add() + item_pb = array_pb._pb.add() item_pb.string_value = "Foo" - item_pb = array_pb.add() + item_pb = array_pb._pb.add() item_pb.string_value = "Bar" items = self._call_fut(pb) self.assertEqual(items, ["Foo", "Bar"]) def test_geo_point(self): from google.type import latlng_pb2 - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.helpers import GeoPoint lat = -3.14 @@ -757,14 +753,14 @@ def test_geo_point(self): def test_null(self): from google.protobuf import struct_pb2 - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 pb = entity_pb2.Value(null_value=struct_pb2.NULL_VALUE) result = self._call_fut(pb) self.assertIsNone(result) def test_unknown(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 pb = entity_pb2.Value() with self.assertRaises(ValueError): @@ -778,9 +774,9 @@ def _call_fut(self, value_pb, val): return _set_protobuf_value(value_pb, val) def _makePB(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 - return entity_pb2.Value() + return entity_pb2.Value()._pb def test_datetime(self): import calendar @@ -802,7 +798,7 @@ def test_key(self): key = Key("KIND", 1234, project="PROJECT") self._call_fut(pb, key) value = pb.key_value - self.assertEqual(value, key.to_protobuf()) + self.assertEqual(value, key.to_protobuf()._pb) def test_none(self): pb = self._makePB() @@ -835,14 +831,10 @@ def test_long(self): self.assertEqual(value, must_be_long) def test_native_str(self): - import six - pb = self._makePB() self._call_fut(pb, "str") - if six.PY2: - value = pb.blob_value - else: # pragma: NO COVER Python 3 - value = pb.string_value + + value = pb.string_value self.assertEqual(value, "str") def test_bytes(self): @@ -881,7 +873,7 @@ def test_entity_w_key(self): entity[name] = value self._call_fut(pb, entity) entity_pb = pb.entity_value - self.assertEqual(entity_pb.key, key.to_protobuf()) + self.assertEqual(entity_pb.key, key.to_protobuf()._pb) prop_dict = dict(_property_tuples(entity_pb)) self.assertEqual(len(prop_dict), 1) @@ -918,14 +910,14 @@ def _call_fut(self, *args, **kwargs): return _get_meaning(*args, **kwargs) def test_no_meaning(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 value_pb = entity_pb2.Value() result = self._call_fut(value_pb) self.assertIsNone(result) def test_single(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 value_pb = entity_pb2.Value() value_pb.meaning = meaning = 22 @@ -934,22 +926,22 @@ def test_single(self): self.assertEqual(meaning, result) def test_empty_array_value(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 value_pb = entity_pb2.Value() - value_pb.array_value.values.add() - value_pb.array_value.values.pop() + value_pb._pb.array_value.values.add() + value_pb._pb.array_value.values.pop() result = self._call_fut(value_pb, is_list=True) self.assertEqual(None, result) def test_array_value(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 value_pb = entity_pb2.Value() meaning = 9 - sub_value_pb1 = value_pb.array_value.values.add() - sub_value_pb2 = value_pb.array_value.values.add() + sub_value_pb1 = value_pb._pb.array_value.values.add() + sub_value_pb2 = value_pb._pb.array_value.values.add() sub_value_pb1.meaning = sub_value_pb2.meaning = meaning sub_value_pb1.string_value = u"hi" @@ -959,13 +951,13 @@ def test_array_value(self): self.assertEqual(meaning, result) def test_array_value_multiple_meanings(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 value_pb = entity_pb2.Value() meaning1 = 9 meaning2 = 10 - sub_value_pb1 = value_pb.array_value.values.add() - sub_value_pb2 = value_pb.array_value.values.add() + sub_value_pb1 = value_pb._pb.array_value.values.add() + sub_value_pb2 = value_pb._pb.array_value.values.add() sub_value_pb1.meaning = meaning1 sub_value_pb2.meaning = meaning2 @@ -976,12 +968,12 @@ def test_array_value_multiple_meanings(self): self.assertEqual(result, [meaning1, meaning2]) def test_array_value_meaning_partially_unset(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 value_pb = entity_pb2.Value() meaning1 = 9 - sub_value_pb1 = value_pb.array_value.values.add() - sub_value_pb2 = value_pb.array_value.values.add() + sub_value_pb1 = value_pb._pb.array_value.values.add() + sub_value_pb2 = value_pb._pb.array_value.values.add() sub_value_pb1.meaning = meaning1 sub_value_pb1.string_value = u"hi" diff --git a/packages/google-cloud-datastore/tests/unit/test_key.py b/packages/google-cloud-datastore/tests/unit/test_key.py index 0478e2cb8a85..73565eadb412 100644 --- a/packages/google-cloud-datastore/tests/unit/test_key.py +++ b/packages/google-cloud-datastore/tests/unit/test_key.py @@ -345,7 +345,7 @@ def test_completed_key_on_complete(self): self.assertRaises(ValueError, key.completed_key, 5678) def test_to_protobuf_defaults(self): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 _KIND = "KIND" key = self._make_one(_KIND, project=self._DEFAULT_PROJECT) diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index fbaadb2862c7..97c1db88c7a6 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -152,7 +152,7 @@ def _assign(val): def test_ancestor_setter_w_key(self): from google.cloud.datastore.key import Key - _NAME = u"NAME" + _NAME = "NAME" key = Key("KIND", 123, project=self._PROJECT) query = self._make_one(self._make_client()) query.add_filter("name", "=", _NAME) @@ -173,38 +173,38 @@ def test_add_filter_setter_w_unknown_operator(self): def test_add_filter_w_known_operator(self): query = self._make_one(self._make_client()) - query.add_filter("firstname", "=", u"John") - self.assertEqual(query.filters, [("firstname", "=", u"John")]) + query.add_filter("firstname", "=", "John") + self.assertEqual(query.filters, [("firstname", "=", "John")]) def test_add_filter_w_all_operators(self): query = self._make_one(self._make_client()) - query.add_filter("leq_prop", "<=", u"val1") - query.add_filter("geq_prop", ">=", u"val2") - query.add_filter("lt_prop", "<", u"val3") - query.add_filter("gt_prop", ">", u"val4") - query.add_filter("eq_prop", "=", u"val5") + query.add_filter("leq_prop", "<=", "val1") + query.add_filter("geq_prop", ">=", "val2") + query.add_filter("lt_prop", "<", "val3") + query.add_filter("gt_prop", ">", "val4") + query.add_filter("eq_prop", "=", "val5") self.assertEqual(len(query.filters), 5) - self.assertEqual(query.filters[0], ("leq_prop", "<=", u"val1")) - self.assertEqual(query.filters[1], ("geq_prop", ">=", u"val2")) - self.assertEqual(query.filters[2], ("lt_prop", "<", u"val3")) - self.assertEqual(query.filters[3], ("gt_prop", ">", u"val4")) - self.assertEqual(query.filters[4], ("eq_prop", "=", u"val5")) + self.assertEqual(query.filters[0], ("leq_prop", "<=", "val1")) + self.assertEqual(query.filters[1], ("geq_prop", ">=", "val2")) + self.assertEqual(query.filters[2], ("lt_prop", "<", "val3")) + self.assertEqual(query.filters[3], ("gt_prop", ">", "val4")) + self.assertEqual(query.filters[4], ("eq_prop", "=", "val5")) def test_add_filter_w_known_operator_and_entity(self): from google.cloud.datastore.entity import Entity query = self._make_one(self._make_client()) other = Entity() - other["firstname"] = u"John" - other["lastname"] = u"Smith" + other["firstname"] = "John" + other["lastname"] = "Smith" query.add_filter("other", "=", other) self.assertEqual(query.filters, [("other", "=", other)]) def test_add_filter_w_whitespace_property_name(self): query = self._make_one(self._make_client()) PROPERTY_NAME = " property with lots of space " - query.add_filter(PROPERTY_NAME, "=", u"John") - self.assertEqual(query.filters, [(PROPERTY_NAME, "=", u"John")]) + query.add_filter(PROPERTY_NAME, "=", "John") + self.assertEqual(query.filters, [(PROPERTY_NAME, "=", "John")]) def test_add_filter___key__valid_key(self): from google.cloud.datastore.key import Key @@ -218,9 +218,9 @@ def test_add_filter_return_query_obj(self): from google.cloud.datastore.query import Query query = self._make_one(self._make_client()) - query_obj = query.add_filter("firstname", "=", u"John") + query_obj = query.add_filter("firstname", "=", "John") self.assertIsInstance(query_obj, Query) - self.assertEqual(query_obj.filters, [("firstname", "=", u"John")]) + self.assertEqual(query_obj.filters, [("firstname", "=", "John")]) def test_filter___key__not_equal_operator(self): from google.cloud.datastore.key import Key @@ -429,7 +429,7 @@ def test_constructor_explicit(self): self.assertEqual(iterator._timeout, timeout) def test__build_protobuf_empty(self): - from google.cloud.datastore_v1.proto import query_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 from google.cloud.datastore.query import Query client = _Client(None) @@ -444,7 +444,7 @@ def test__build_protobuf_all_values_except_offset(self): # this test and the following (all_values_except_start_and_end_cursor) # test mutually exclusive states; the offset is ignored # if a start_cursor is supplied - from google.cloud.datastore_v1.proto import query_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 from google.cloud.datastore.query import Query client = _Client(None) @@ -463,14 +463,14 @@ def test__build_protobuf_all_values_except_offset(self): pb = iterator._build_protobuf() expected_pb = query_pb2.Query(start_cursor=start_bytes, end_cursor=end_bytes) - expected_pb.limit.value = limit - iterator.num_results + expected_pb._pb.limit.value = limit - iterator.num_results self.assertEqual(pb, expected_pb) def test__build_protobuf_all_values_except_start_and_end_cursor(self): # this test and the previous (all_values_except_start_offset) # test mutually exclusive states; the offset is ignored # if a start_cursor is supplied - from google.cloud.datastore_v1.proto import query_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 from google.cloud.datastore.query import Query client = _Client(None) @@ -483,11 +483,11 @@ def test__build_protobuf_all_values_except_start_and_end_cursor(self): pb = iterator._build_protobuf() expected_pb = query_pb2.Query(offset=offset - iterator._skipped_results) - expected_pb.limit.value = limit - iterator.num_results + expected_pb._pb.limit.value = limit - iterator.num_results self.assertEqual(pb, expected_pb) def test__process_query_results(self): - from google.cloud.datastore_v1.proto import query_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 iterator = self._make_one(None, None, end_cursor="abcd") self.assertIsNotNone(iterator._end_cursor) @@ -496,7 +496,7 @@ def test__process_query_results(self): cursor_as_bytes = b"\x9ai\xe7" cursor = b"mmnn" skipped_results = 4 - more_results_enum = query_pb2.QueryResultBatch.NOT_FINISHED + more_results_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED response_pb = _make_query_response( entity_pbs, cursor_as_bytes, more_results_enum, skipped_results ) @@ -508,7 +508,7 @@ def test__process_query_results(self): self.assertTrue(iterator._more_results) def test__process_query_results_done(self): - from google.cloud.datastore_v1.proto import query_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 iterator = self._make_one(None, None, end_cursor="abcd") self.assertIsNotNone(iterator._end_cursor) @@ -516,7 +516,7 @@ def test__process_query_results_done(self): entity_pbs = [_make_entity("World", 1234, "PROJECT")] cursor_as_bytes = b"\x9ai\xe7" skipped_results = 44 - more_results_enum = query_pb2.QueryResultBatch.NO_MORE_RESULTS + more_results_enum = query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS response_pb = _make_query_response( entity_pbs, cursor_as_bytes, more_results_enum, skipped_results ) @@ -536,12 +536,12 @@ def test__process_query_results_bad_enum(self): def _next_page_helper(self, txn_id=None, retry=None, timeout=None): from google.api_core import page_iterator - from google.cloud.datastore_v1.proto import datastore_pb2 - from google.cloud.datastore_v1.proto import entity_pb2 - from google.cloud.datastore_v1.proto import query_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 from google.cloud.datastore.query import Query - more_enum = query_pb2.QueryResultBatch.NOT_FINISHED + more_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED result = _make_query_response([], b"", more_enum, 0) project = "prujekt" ds_api = _make_datastore_api(result) @@ -574,7 +574,13 @@ def _next_page_helper(self, txn_id=None, retry=None, timeout=None): read_options = datastore_pb2.ReadOptions(transaction=txn_id) empty_query = query_pb2.Query() ds_api.run_query.assert_called_once_with( - project, partition_id, read_options, query=empty_query, **kwargs + request={ + "project_id": project, + "partition_id": partition_id, + "read_options": read_options, + "query": empty_query, + }, + **kwargs, ) def test__next_page(self): @@ -611,7 +617,8 @@ def _call_fut(self, iterator, entity_pb): return _item_to_entity(iterator, entity_pb) def test_it(self): - entity_pb = mock.sentinel.entity_pb + entity_pb = mock.Mock() + entity_pb._pb = mock.sentinel.entity_pb patch = mock.patch("google.cloud.datastore.helpers.entity_from_protobuf") with patch as entity_from_protobuf: result = self._call_fut(None, entity_pb) @@ -627,7 +634,7 @@ def _call_fut(self, query): return _pb_from_query(query) def test_empty(self): - from google.cloud.datastore_v1.proto import query_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 pb = self._call_fut(_Query()) self.assertEqual(list(pb.projection), []) @@ -636,11 +643,13 @@ def test_empty(self): self.assertEqual(list(pb.distinct_on), []) self.assertEqual(pb.filter.property_filter.property.name, "") cfilter = pb.filter.composite_filter - self.assertEqual(cfilter.op, query_pb2.CompositeFilter.OPERATOR_UNSPECIFIED) + self.assertEqual( + cfilter.op, query_pb2.CompositeFilter.Operator.OPERATOR_UNSPECIFIED + ) self.assertEqual(list(cfilter.filters), []) self.assertEqual(pb.start_cursor, b"") self.assertEqual(pb.end_cursor, b"") - self.assertEqual(pb.limit.value, 0) + self.assertEqual(pb._pb.limit.value, 0) self.assertEqual(pb.offset, 0) def test_projection(self): @@ -655,12 +664,12 @@ def test_kind(self): def test_ancestor(self): from google.cloud.datastore.key import Key - from google.cloud.datastore_v1.proto import query_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 ancestor = Key("Ancestor", 123, project="PROJECT") pb = self._call_fut(_Query(ancestor=ancestor)) cfilter = pb.filter.composite_filter - self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND) + self.assertEqual(cfilter.op, query_pb2.CompositeFilter.Operator.AND) self.assertEqual(len(cfilter.filters), 1) pfilter = cfilter.filters[0].property_filter self.assertEqual(pfilter.property.name, "__key__") @@ -668,28 +677,28 @@ def test_ancestor(self): self.assertEqual(pfilter.value.key_value, ancestor_pb) def test_filter(self): - from google.cloud.datastore_v1.proto import query_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 - query = _Query(filters=[("name", "=", u"John")]) - query.OPERATORS = {"=": query_pb2.PropertyFilter.EQUAL} + query = _Query(filters=[("name", "=", "John")]) + query.OPERATORS = {"=": query_pb2.PropertyFilter.Operator.EQUAL} pb = self._call_fut(query) cfilter = pb.filter.composite_filter - self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND) + self.assertEqual(cfilter.op, query_pb2.CompositeFilter.Operator.AND) self.assertEqual(len(cfilter.filters), 1) pfilter = cfilter.filters[0].property_filter self.assertEqual(pfilter.property.name, "name") - self.assertEqual(pfilter.value.string_value, u"John") + self.assertEqual(pfilter.value.string_value, "John") def test_filter_key(self): from google.cloud.datastore.key import Key - from google.cloud.datastore_v1.proto import query_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 key = Key("Kind", 123, project="PROJECT") query = _Query(filters=[("__key__", "=", key)]) - query.OPERATORS = {"=": query_pb2.PropertyFilter.EQUAL} + query.OPERATORS = {"=": query_pb2.PropertyFilter.Operator.EQUAL} pb = self._call_fut(query) cfilter = pb.filter.composite_filter - self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND) + self.assertEqual(cfilter.op, query_pb2.CompositeFilter.Operator.AND) self.assertEqual(len(cfilter.filters), 1) pfilter = cfilter.filters[0].property_filter self.assertEqual(pfilter.property.name, "__key__") @@ -697,16 +706,16 @@ def test_filter_key(self): self.assertEqual(pfilter.value.key_value, key_pb) def test_order(self): - from google.cloud.datastore_v1.proto import query_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 pb = self._call_fut(_Query(order=["a", "-b", "c"])) self.assertEqual([item.property.name for item in pb.order], ["a", "b", "c"]) self.assertEqual( [item.direction for item in pb.order], [ - query_pb2.PropertyOrder.ASCENDING, - query_pb2.PropertyOrder.DESCENDING, - query_pb2.PropertyOrder.ASCENDING, + query_pb2.PropertyOrder.Direction.ASCENDING, + query_pb2.PropertyOrder.Direction.DESCENDING, + query_pb2.PropertyOrder.Direction.ASCENDING, ], ) @@ -752,11 +761,11 @@ def current_transaction(self): def _make_entity(kind, id_, project): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 key = entity_pb2.Key() key.partition_id.project_id = project - elem = key.path.add() + elem = key.path._pb.add() elem.kind = kind elem.id = id_ return entity_pb2.Entity(key=key) @@ -765,8 +774,8 @@ def _make_entity(kind, id_, project): def _make_query_response( entity_pbs, cursor_as_bytes, more_results_enum, skipped_results ): - from google.cloud.datastore_v1.proto import datastore_pb2 - from google.cloud.datastore_v1.proto import query_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 return datastore_pb2.RunQueryResponse( batch=query_pb2.QueryResultBatch( diff --git a/packages/google-cloud-datastore/tests/unit/test_transaction.py b/packages/google-cloud-datastore/tests/unit/test_transaction.py index b285db1f5058..1bc355cc7339 100644 --- a/packages/google-cloud-datastore/tests/unit/test_transaction.py +++ b/packages/google-cloud-datastore/tests/unit/test_transaction.py @@ -47,7 +47,7 @@ def test_ctor_defaults(self): self.assertEqual(len(xact._partial_key_entities), 0) def test_current(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" id_ = 678 @@ -77,12 +77,19 @@ def test_current(self): ds_api.rollback.assert_not_called() commit_method = ds_api.commit self.assertEqual(commit_method.call_count, 2) - mode = datastore_pb2.CommitRequest.TRANSACTIONAL - commit_method.assert_called_with(project, mode, [], transaction=id_) + mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL + commit_method.assert_called_with( + request={ + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": id_, + } + ) begin_txn = ds_api.begin_transaction self.assertEqual(begin_txn.call_count, 2) - begin_txn.assert_called_with(project) + begin_txn.assert_called_with(request={"project_id": project}) def test_begin(self): project = "PROJECT" @@ -92,7 +99,9 @@ def test_begin(self): xact = self._make_one(client) xact.begin() self.assertEqual(xact.id, id_) - ds_api.begin_transaction.assert_called_once_with(project) + ds_api.begin_transaction.assert_called_once_with( + request={"project_id": project} + ) def test_begin_w_retry_w_timeout(self): project = "PROJECT" @@ -108,7 +117,7 @@ def test_begin_w_retry_w_timeout(self): self.assertEqual(xact.id, id_) ds_api.begin_transaction.assert_called_once_with( - project, retry=retry, timeout=timeout + request={"project_id": project}, retry=retry, timeout=timeout ) def test_begin_tombstoned(self): @@ -119,10 +128,14 @@ def test_begin_tombstoned(self): xact = self._make_one(client) xact.begin() self.assertEqual(xact.id, id_) - ds_api.begin_transaction.assert_called_once_with(project) + ds_api.begin_transaction.assert_called_once_with( + request={"project_id": project} + ) xact.rollback() - client._datastore_api.rollback.assert_called_once_with(project, id_) + client._datastore_api.rollback.assert_called_once_with( + request={"project_id": project, "transaction": id_} + ) self.assertIsNone(xact.id) self.assertRaises(ValueError, xact.begin) @@ -139,7 +152,9 @@ def test_begin_w_begin_transaction_failure(self): xact.begin() self.assertIsNone(xact.id) - ds_api.begin_transaction.assert_called_once_with(project) + ds_api.begin_transaction.assert_called_once_with( + request={"project_id": project} + ) def test_rollback(self): project = "PROJECT" @@ -152,7 +167,9 @@ def test_rollback(self): xact.rollback() self.assertIsNone(xact.id) - ds_api.rollback.assert_called_once_with(project, id_) + ds_api.rollback.assert_called_once_with( + request={"project_id": project, "transaction": id_} + ) def test_rollback_w_retry_w_timeout(self): project = "PROJECT" @@ -169,15 +186,17 @@ def test_rollback_w_retry_w_timeout(self): self.assertIsNone(xact.id) ds_api.rollback.assert_called_once_with( - project, id_, retry=retry, timeout=timeout + request={"project_id": project, "transaction": id_}, + retry=retry, + timeout=timeout, ) def test_commit_no_partial_keys(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" id_ = 1002930 - mode = datastore_pb2.CommitRequest.TRANSACTIONAL + mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL ds_api = _make_datastore_api(xact_id=id_) client = _Client(project, datastore_api=ds_api) @@ -185,16 +204,23 @@ def test_commit_no_partial_keys(self): xact.begin() xact.commit() - ds_api.commit.assert_called_once_with(project, mode, [], transaction=id_) + ds_api.commit.assert_called_once_with( + request={ + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": id_, + } + ) self.assertIsNone(xact.id) def test_commit_w_partial_keys_w_retry_w_timeout(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" kind = "KIND" id1 = 123 - mode = datastore_pb2.CommitRequest.TRANSACTIONAL + mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL key = _make_key(kind, id1, project) id2 = 234 retry = mock.Mock() @@ -210,10 +236,12 @@ def test_commit_w_partial_keys_w_retry_w_timeout(self): xact.commit(retry=retry, timeout=timeout) ds_api.commit.assert_called_once_with( - project, - mode, - xact.mutations, - transaction=id2, + request={ + "project_id": project, + "mode": mode, + "mutations": xact.mutations, + "transaction": id2, + }, retry=retry, timeout=timeout, ) @@ -221,7 +249,7 @@ def test_commit_w_partial_keys_w_retry_w_timeout(self): self.assertEqual(entity.key.path, [{"kind": kind, "id": id1}]) def test_context_manager_no_raise(self): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" id_ = 912830 @@ -230,12 +258,20 @@ def test_context_manager_no_raise(self): xact = self._make_one(client) with xact: self.assertEqual(xact.id, id_) - ds_api.begin_transaction.assert_called_once_with(project) + ds_api.begin_transaction.assert_called_once_with( + request={"project_id": project} + ) - mode = datastore_pb2.CommitRequest.TRANSACTIONAL + mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL client._datastore_api.commit.assert_called_once_with( - project, mode, [], transaction=id_ + request={ + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": id_, + }, ) + self.assertIsNone(xact.id) self.assertEqual(ds_api.begin_transaction.call_count, 1) @@ -252,11 +288,15 @@ class Foo(Exception): try: with xact: self.assertEqual(xact.id, id_) - ds_api.begin_transaction.assert_called_once_with(project) + ds_api.begin_transaction.assert_called_once_with( + request={"project_id": project} + ) raise Foo() except Foo: self.assertIsNone(xact.id) - client._datastore_api.rollback.assert_called_once_with(project, id_) + client._datastore_api.rollback.assert_called_once_with( + request={"project_id": project, "transaction": id_} + ) client._datastore_api.commit.assert_not_called() self.assertIsNone(xact.id) @@ -286,11 +326,11 @@ def test_put_read_only(self): def _make_key(kind, id_, project): - from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 key = entity_pb2.Key() key.partition_id.project_id = project - elem = key.path.add() + elem = key._pb.path.add() elem.kind = kind elem.id = id_ return key @@ -340,9 +380,9 @@ def __exit__(self, *args): def _make_commit_response(*keys): - from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.types import datastore as datastore_pb2 - mutation_results = [datastore_pb2.MutationResult(key=key) for key in keys] + mutation_results = [datastore_pb2.MutationResult(key=key)._pb for key in keys] return datastore_pb2.CommitResponse(mutation_results=mutation_results) From c8b1e8a2eef76540ea8214e86e86382a1db9d226 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Fri, 30 Oct 2020 15:46:29 -0700 Subject: [PATCH 300/611] chore: regenerate from new generator configuration (#108) * chore: regenerate from generator * chore(docs): update docstrings to not have separator which python docs builds interprets as first-party markup * test: update noxfile to test current supported versions * chore: update synth.py * test(cov): drop coverage until we address generated test coverage drop --- .../.kokoro/docs/common.cfg | 2 +- .../.kokoro/test-samples.sh | 8 +- .../google-cloud-datastore/CODE_OF_CONDUCT.md | 123 +++++--- packages/google-cloud-datastore/docs/conf.py | 1 + .../services/datastore_admin/async_client.py | 64 +++- .../services/datastore_admin/client.py | 111 +++++-- .../datastore_admin/transports/base.py | 4 +- .../datastore_admin/transports/grpc.py | 18 +- .../transports/grpc_asyncio.py | 4 + .../types/datastore_admin.py | 16 +- .../services/datastore/async_client.py | 70 ++++- .../datastore_v1/services/datastore/client.py | 114 +++++-- .../services/datastore/transports/grpc.py | 18 +- .../datastore/transports/grpc_asyncio.py | 4 + .../google/cloud/datastore_v1/types/entity.py | 10 +- .../google/cloud/datastore_v1/types/query.py | 14 +- packages/google-cloud-datastore/noxfile.py | 6 +- .../fixup_datastore_admin_v1_keywords.py | 1 + .../scripts/fixup_datastore_v1_keywords.py | 1 + .../google-cloud-datastore/synth.metadata | 12 +- packages/google-cloud-datastore/synth.py | 40 +-- .../test_datastore_admin.py | 226 ++++++++++---- .../unit/gapic/datastore_v1/test_datastore.py | 282 +++++++++++++----- 23 files changed, 848 insertions(+), 301 deletions(-) diff --git a/packages/google-cloud-datastore/.kokoro/docs/common.cfg b/packages/google-cloud-datastore/.kokoro/docs/common.cfg index 895e96a69feb..cc2ce85aa8c8 100644 --- a/packages/google-cloud-datastore/.kokoro/docs/common.cfg +++ b/packages/google-cloud-datastore/.kokoro/docs/common.cfg @@ -30,7 +30,7 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" - value: "docs-staging-v2-staging" + value: "docs-staging-v2" } # It will upload the docker image after successful builds. diff --git a/packages/google-cloud-datastore/.kokoro/test-samples.sh b/packages/google-cloud-datastore/.kokoro/test-samples.sh index 78494e1e5490..c4163d795804 100755 --- a/packages/google-cloud-datastore/.kokoro/test-samples.sh +++ b/packages/google-cloud-datastore/.kokoro/test-samples.sh @@ -28,6 +28,12 @@ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then git checkout $LATEST_RELEASE fi +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -101,4 +107,4 @@ cd "$ROOT" # Workaround for Kokoro permissions issue: delete secrets rm testing/{test-env.sh,client-secrets.json,service-account.json} -exit "$RTN" \ No newline at end of file +exit "$RTN" diff --git a/packages/google-cloud-datastore/CODE_OF_CONDUCT.md b/packages/google-cloud-datastore/CODE_OF_CONDUCT.md index b3d1f6029849..039f43681204 100644 --- a/packages/google-cloud-datastore/CODE_OF_CONDUCT.md +++ b/packages/google-cloud-datastore/CODE_OF_CONDUCT.md @@ -1,44 +1,95 @@ -# Contributor Code of Conduct +# Code of Conduct -As contributors and maintainers of this project, -and in the interest of fostering an open and welcoming community, -we pledge to respect all people who contribute through reporting issues, -posting feature requests, updating documentation, -submitting pull requests or patches, and other activities. +## Our Pledge -We are committed to making participation in this project -a harassment-free experience for everyone, -regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, -body size, race, ethnicity, age, religion, or nationality. +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, -such as physical or electronic -addresses, without explicit permission -* Other unethical or unprofessional conduct. +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct. -By adopting this Code of Conduct, -project maintainers commit themselves to fairly and consistently -applying these principles to every aspect of managing this project. -Project maintainers who do not follow or enforce the Code of Conduct -may be permanently removed from the project team. - -This code of conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior -may be reported by opening an issue -or contacting one or more of the project maintainers. - -This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, -available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index 72346ac9fe04..2aff2fc7a9d1 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -349,6 +349,7 @@ "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), } diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index fd9589b6d468..0cd7d99e2d85 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -28,8 +28,8 @@ from google.auth import credentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.api_core import operation -from google.api_core import operation_async +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.datastore_admin_v1.services.datastore_admin import pagers from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index @@ -106,9 +106,47 @@ class DatastoreAdminAsyncClient: DEFAULT_ENDPOINT = DatastoreAdminClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = DatastoreAdminClient.DEFAULT_MTLS_ENDPOINT + common_billing_account_path = staticmethod( + DatastoreAdminClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DatastoreAdminClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(DatastoreAdminClient.common_folder_path) + parse_common_folder_path = staticmethod( + DatastoreAdminClient.parse_common_folder_path + ) + + common_organization_path = staticmethod( + DatastoreAdminClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + DatastoreAdminClient.parse_common_organization_path + ) + + common_project_path = staticmethod(DatastoreAdminClient.common_project_path) + parse_common_project_path = staticmethod( + DatastoreAdminClient.parse_common_project_path + ) + + common_location_path = staticmethod(DatastoreAdminClient.common_location_path) + parse_common_location_path = staticmethod( + DatastoreAdminClient.parse_common_location_path + ) + from_service_account_file = DatastoreAdminClient.from_service_account_file from_service_account_json = from_service_account_file + @property + def transport(self) -> DatastoreAdminTransport: + """Return the transport used by the client instance. + + Returns: + DatastoreAdminTransport: The transport used by the client instance. + """ + return self._client.transport + get_transport_class = functools.partial( type(DatastoreAdminClient).get_transport_class, type(DatastoreAdminClient) ) @@ -253,9 +291,10 @@ async def export_entities( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any( + has_flattened_params = any( [project_id, labels, entity_filter, output_url_prefix] - ): + ) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -268,13 +307,14 @@ async def export_entities( if project_id is not None: request.project_id = project_id - if labels is not None: - request.labels = labels if entity_filter is not None: request.entity_filter = entity_filter if output_url_prefix is not None: request.output_url_prefix = output_url_prefix + if labels: + request.labels.update(labels) + # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( @@ -392,7 +432,8 @@ async def import_entities( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, labels, input_url, entity_filter]): + has_flattened_params = any([project_id, labels, input_url, entity_filter]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -405,13 +446,14 @@ async def import_entities( if project_id is not None: request.project_id = project_id - if labels is not None: - request.labels = labels if input_url is not None: request.input_url = input_url if entity_filter is not None: request.entity_filter = entity_filter + if labels: + request.labels.update(labels) + # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( @@ -472,7 +514,7 @@ async def get_index( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -531,7 +573,7 @@ async def list_indexes( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index 0ebed21e0971..a97567590ce6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -13,15 +13,16 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# from collections import OrderedDict from distutils import util import os import re -from typing import Callable, Dict, Sequence, Tuple, Type, Union +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore @@ -31,8 +32,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.api_core import operation -from google.api_core import operation_async +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.datastore_admin_v1.services.datastore_admin import pagers from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index @@ -80,7 +81,6 @@ class DatastoreAdminClient(metaclass=DatastoreAdminClientMeta): """Google Cloud Datastore Admin API The Datastore Admin API provides several admin services for Cloud Datastore. - ----------------------------------------------------------------------------- ## Concepts Project, namespace, kind, and entity as defined in the Google @@ -91,7 +91,6 @@ class DatastoreAdminClient(metaclass=DatastoreAdminClientMeta): EntityFilter: Allows specifying a subset of entities in a project. This is specified as a combination of kinds and namespaces (either or both of which may be all). - ## Services # Export/Import @@ -190,12 +189,80 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @property + def transport(self) -> DatastoreAdminTransport: + """Return the transport used by the client instance. + + Returns: + DatastoreAdminTransport: The transport used by the client instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + def __init__( self, *, - credentials: credentials.Credentials = None, - transport: Union[str, DatastoreAdminTransport] = None, - client_options: ClientOptions = None, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, DatastoreAdminTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the datastore admin client. @@ -209,8 +276,8 @@ def __init__( transport (Union[str, ~.DatastoreAdminTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: @@ -225,10 +292,10 @@ def __init__( not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -236,9 +303,9 @@ def __init__( creation failed for any reason. """ if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) + client_options = client_options_lib.from_dict(client_options) if client_options is None: - client_options = ClientOptions.ClientOptions() + client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. use_client_cert = bool( @@ -420,13 +487,14 @@ def export_entities( if project_id is not None: request.project_id = project_id - if labels is not None: - request.labels = labels if entity_filter is not None: request.entity_filter = entity_filter if output_url_prefix is not None: request.output_url_prefix = output_url_prefix + if labels: + request.labels.update(labels) + # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.export_entities] @@ -559,13 +627,14 @@ def import_entities( if project_id is not None: request.project_id = project_id - if labels is not None: - request.labels = labels if input_url is not None: request.input_url = input_url if entity_filter is not None: request.entity_filter = entity_filter + if labels: + request.labels.update(labels) + # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.import_entities] diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index 6049d54623fd..d2a8b621863b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -125,7 +125,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -138,7 +138,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index b478b75ad331..498a6a53abdf 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -149,10 +149,10 @@ def __init__( for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -161,6 +161,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -168,6 +170,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn( "api_mtls_endpoint and client_cert_source are deprecated", @@ -204,6 +207,7 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" @@ -281,12 +285,8 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Return the channel from cache. return self._grpc_channel @property diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index f80c7da93006..f731d4c0ca24 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -206,6 +206,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -213,6 +215,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn( "api_mtls_endpoint and client_cert_source are deprecated", @@ -249,6 +252,7 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py index 8f60bfe16b0f..1fd3c8d58ff3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -240,11 +240,11 @@ class ExportEntitiesMetadata(proto.Message): [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. """ - common = proto.Field(proto.MESSAGE, number=1, message=CommonMetadata,) + common = proto.Field(proto.MESSAGE, number=1, message="CommonMetadata",) - progress_entities = proto.Field(proto.MESSAGE, number=2, message=Progress,) + progress_entities = proto.Field(proto.MESSAGE, number=2, message="Progress",) - progress_bytes = proto.Field(proto.MESSAGE, number=3, message=Progress,) + progress_bytes = proto.Field(proto.MESSAGE, number=3, message="Progress",) entity_filter = proto.Field(proto.MESSAGE, number=4, message="EntityFilter",) @@ -273,11 +273,11 @@ class ImportEntitiesMetadata(proto.Message): field. """ - common = proto.Field(proto.MESSAGE, number=1, message=CommonMetadata,) + common = proto.Field(proto.MESSAGE, number=1, message="CommonMetadata",) - progress_entities = proto.Field(proto.MESSAGE, number=2, message=Progress,) + progress_entities = proto.Field(proto.MESSAGE, number=2, message="Progress",) - progress_bytes = proto.Field(proto.MESSAGE, number=3, message=Progress,) + progress_bytes = proto.Field(proto.MESSAGE, number=3, message="Progress",) entity_filter = proto.Field(proto.MESSAGE, number=4, message="EntityFilter",) @@ -398,9 +398,9 @@ class IndexOperationMetadata(proto.Message): acting on. """ - common = proto.Field(proto.MESSAGE, number=1, message=CommonMetadata,) + common = proto.Field(proto.MESSAGE, number=1, message="CommonMetadata",) - progress_entities = proto.Field(proto.MESSAGE, number=2, message=Progress,) + progress_entities = proto.Field(proto.MESSAGE, number=2, message="Progress",) index_id = proto.Field(proto.STRING, number=3) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index cc1760e0cca5..01a2cbee3050 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -52,9 +52,41 @@ class DatastoreAsyncClient: DEFAULT_ENDPOINT = DatastoreClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = DatastoreClient.DEFAULT_MTLS_ENDPOINT + common_billing_account_path = staticmethod( + DatastoreClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DatastoreClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(DatastoreClient.common_folder_path) + parse_common_folder_path = staticmethod(DatastoreClient.parse_common_folder_path) + + common_organization_path = staticmethod(DatastoreClient.common_organization_path) + parse_common_organization_path = staticmethod( + DatastoreClient.parse_common_organization_path + ) + + common_project_path = staticmethod(DatastoreClient.common_project_path) + parse_common_project_path = staticmethod(DatastoreClient.parse_common_project_path) + + common_location_path = staticmethod(DatastoreClient.common_location_path) + parse_common_location_path = staticmethod( + DatastoreClient.parse_common_location_path + ) + from_service_account_file = DatastoreClient.from_service_account_file from_service_account_json = from_service_account_file + @property + def transport(self) -> DatastoreTransport: + """Return the transport used by the client instance. + + Returns: + DatastoreTransport: The transport used by the client instance. + """ + return self._client.transport + get_transport_class = functools.partial( type(DatastoreClient).get_transport_class, type(DatastoreClient) ) @@ -157,7 +189,8 @@ async def lookup( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, read_options, keys]): + has_flattened_params = any([project_id, read_options, keys]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -172,8 +205,9 @@ async def lookup( request.project_id = project_id if read_options is not None: request.read_options = read_options - if keys is not None: - request.keys = keys + + if keys: + request.keys.extend(keys) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -287,7 +321,8 @@ async def begin_transaction( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id]): + has_flattened_params = any([project_id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -388,7 +423,8 @@ async def commit( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, mode, transaction, mutations]): + has_flattened_params = any([project_id, mode, transaction, mutations]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -405,8 +441,9 @@ async def commit( request.mode = mode if transaction is not None: request.transaction = transaction - if mutations is not None: - request.mutations = mutations + + if mutations: + request.mutations.extend(mutations) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -468,7 +505,8 @@ async def rollback( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, transaction]): + has_flattened_params = any([project_id, transaction]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -545,7 +583,8 @@ async def allocate_ids( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, keys]): + has_flattened_params = any([project_id, keys]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -558,8 +597,9 @@ async def allocate_ids( if project_id is not None: request.project_id = project_id - if keys is not None: - request.keys = keys + + if keys: + request.keys.extend(keys) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -621,7 +661,8 @@ async def reserve_ids( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, keys]): + has_flattened_params = any([project_id, keys]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -634,8 +675,9 @@ async def reserve_ids( if project_id is not None: request.project_id = project_id - if keys is not None: - request.keys = keys + + if keys: + request.keys.extend(keys) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index 5271a96a7874..e13791581aa4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -19,10 +19,10 @@ from distutils import util import os import re -from typing import Callable, Dict, Sequence, Tuple, Type, Union +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore @@ -136,12 +136,80 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @property + def transport(self) -> DatastoreTransport: + """Return the transport used by the client instance. + + Returns: + DatastoreTransport: The transport used by the client instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + def __init__( self, *, - credentials: credentials.Credentials = None, - transport: Union[str, DatastoreTransport] = None, - client_options: ClientOptions = None, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, DatastoreTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the datastore client. @@ -155,8 +223,8 @@ def __init__( transport (Union[str, ~.DatastoreTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: @@ -171,10 +239,10 @@ def __init__( not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -182,9 +250,9 @@ def __init__( creation failed for any reason. """ if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) + client_options = client_options_lib.from_dict(client_options) if client_options is None: - client_options = ClientOptions.ClientOptions() + client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. use_client_cert = bool( @@ -324,8 +392,9 @@ def lookup( request.project_id = project_id if read_options is not None: request.read_options = read_options - if keys is not None: - request.keys = keys + + if keys: + request.keys.extend(keys) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -546,8 +615,9 @@ def commit( request.mode = mode if transaction is not None: request.transaction = transaction - if mutations is not None: - request.mutations = mutations + + if mutations: + request.mutations.extend(mutations) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -703,8 +773,9 @@ def allocate_ids( if project_id is not None: request.project_id = project_id - if keys is not None: - request.keys = keys + + if keys: + request.keys.extend(keys) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -781,8 +852,9 @@ def reserve_ids( if project_id is not None: request.project_id = project_id - if keys is not None: - request.keys = keys + + if keys: + request.keys.extend(keys) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py index f8f18768f380..7d170570bb82 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -95,10 +95,10 @@ def __init__( for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -107,6 +107,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -114,6 +116,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn( "api_mtls_endpoint and client_cert_source are deprecated", @@ -150,6 +153,7 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" @@ -227,12 +231,8 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Return the channel from cache. return self._grpc_channel @property diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index a9c5611f8c28..8ba5f66dbaa9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -152,6 +152,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -159,6 +161,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn( "api_mtls_endpoint and client_cert_source are deprecated", @@ -195,6 +198,7 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index 96d4a6f49ddb..cc1be6e2aab1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -126,7 +126,7 @@ class PathElement(proto.Message): name = proto.Field(proto.STRING, number=3, oneof="id_type") - partition_id = proto.Field(proto.MESSAGE, number=1, message=PartitionId,) + partition_id = proto.Field(proto.MESSAGE, number=1, message="PartitionId",) path = proto.RepeatedField(proto.MESSAGE, number=2, message=PathElement,) @@ -206,7 +206,7 @@ class Value(proto.Message): proto.MESSAGE, number=10, oneof="value_type", message=timestamp.Timestamp, ) - key_value = proto.Field(proto.MESSAGE, number=5, oneof="value_type", message=Key,) + key_value = proto.Field(proto.MESSAGE, number=5, oneof="value_type", message="Key",) string_value = proto.Field(proto.STRING, number=17, oneof="value_type") @@ -221,7 +221,7 @@ class Value(proto.Message): ) array_value = proto.Field( - proto.MESSAGE, number=9, oneof="value_type", message=ArrayValue, + proto.MESSAGE, number=9, oneof="value_type", message="ArrayValue", ) meaning = proto.Field(proto.INT32, number=14) @@ -252,9 +252,9 @@ class Entity(proto.Message): characters. The name cannot be ``""``. """ - key = proto.Field(proto.MESSAGE, number=1, message=Key,) + key = proto.Field(proto.MESSAGE, number=1, message="Key",) - properties = proto.MapField(proto.STRING, proto.MESSAGE, number=3, message=Value,) + properties = proto.MapField(proto.STRING, proto.MESSAGE, number=3, message="Value",) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index 87ed724d1b34..173626b06f2f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -180,7 +180,7 @@ class Projection(proto.Message): The property to project. """ - property = proto.Field(proto.MESSAGE, number=1, message=PropertyReference,) + property = proto.Field(proto.MESSAGE, number=1, message="PropertyReference",) class PropertyOrder(proto.Message): @@ -199,7 +199,7 @@ class Direction(proto.Enum): ASCENDING = 1 DESCENDING = 2 - property = proto.Field(proto.MESSAGE, number=1, message=PropertyReference,) + property = proto.Field(proto.MESSAGE, number=1, message="PropertyReference",) direction = proto.Field(proto.ENUM, number=2, enum=Direction,) @@ -242,7 +242,7 @@ class Operator(proto.Enum): op = proto.Field(proto.ENUM, number=1, enum=Operator,) - filters = proto.RepeatedField(proto.MESSAGE, number=2, message=Filter,) + filters = proto.RepeatedField(proto.MESSAGE, number=2, message="Filter",) class PropertyFilter(proto.Message): @@ -267,7 +267,7 @@ class Operator(proto.Enum): EQUAL = 5 HAS_ANCESTOR = 11 - property = proto.Field(proto.MESSAGE, number=1, message=PropertyReference,) + property = proto.Field(proto.MESSAGE, number=1, message="PropertyReference",) op = proto.Field(proto.ENUM, number=2, enum=Operator,) @@ -382,10 +382,12 @@ class MoreResultsType(proto.Enum): skipped_cursor = proto.Field(proto.BYTES, number=3) entity_result_type = proto.Field( - proto.ENUM, number=1, enum=EntityResult.ResultType, + proto.ENUM, number=1, enum="EntityResult.ResultType", ) - entity_results = proto.RepeatedField(proto.MESSAGE, number=2, message=EntityResult,) + entity_results = proto.RepeatedField( + proto.MESSAGE, number=2, message="EntityResult", + ) end_cursor = proto.Field(proto.BYTES, number=4) diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index acdde3e6db53..21cdf161e79a 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -70,7 +70,9 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. - session.install("mock", "pytest", "pytest-asyncio", "pytest-cov") + session.install( + "mock", "pytest", "pytest-cov", + ) session.install("-e", ".") # Run py.test against the unit tests. @@ -139,7 +141,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=99") + session.run("coverage", "report", "--show-missing", "--fail-under=97") session.run("coverage", "erase") diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py index 12009ba0eccb..fae3ea91605f 100644 --- a/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py @@ -1,3 +1,4 @@ +#! /usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright 2020 Google LLC diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py index 30d643d2d678..8b04f6fe13ba 100644 --- a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py @@ -1,3 +1,4 @@ +#! /usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright 2020 Google LLC diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index 279f34898d70..b9711974f3f4 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -4,14 +4,22 @@ "git": { "name": ".", "remote": "git@github.com:crwilcox/python-datastore.git", - "sha": "10442b5a6aec7a352612f02368bf1257ddbfc855" + "sha": "4a8ba01b9802d1284915f18fbd974aca8fef4b48" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "86e167e6be2c4300066fd8ff5db139c6b846d049", + "internalRef": "339897204" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "487eba79f8260e34205d8ceb1ebcc65685085e19" + "sha": "6542bd723403513626f61642fc02ddca528409aa" } } ], diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index 251fcc865b14..5aa94d2505b7 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -67,43 +67,21 @@ # Remove spurious markup s.replace( - "google/**/datastore_admin_client.py", - "-----------------------------------------------------------------------------", + "google/**/datastore_admin/client.py", + "\s+---------------------------------(-)+", "" ) -# TODO(busunkim): Remove during the microgenerator transition. -# This re-orders the parameters to avoid breaking existing code. -num = s.replace( -"google/**/datastore_client.py", -"""def commit\( -\s+self, -\s+project_id, -\s+mode=None, -\s+transaction=None, -\s+mutations=None, -\s+retry=google\.api_core\.gapic_v1\.method\.DEFAULT, -\s+timeout=google\.api_core\.gapic_v1\.method\.DEFAULT, -\s+metadata=None\):""", -"""def commit( - self, - project_id, - mode=None, - mutations=None, - transaction=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ):""" -) - -#if num != 1: -# raise Exception("Required replacement not made.") - # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(unit_cov_level=97, cov_level=99) +# TODO: cov_level should be 99%, reduced due to regression in test coverage. +templated_files = common.py_library( + unit_cov_level=97, + cov_level=97, + unit_test_python_versions=["3.6", "3.7", "3.8"], + system_test_python_versions=["3.8"], +) s.move(templated_files, excludes=["docs/multiprocessing.rst", ".coveragerc"]) s.replace("noxfile.py", """["']sphinx['"]""", '''"sphinx<3.0.0"''') diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index 1efc1dcae7ca..54320c9774f1 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -31,7 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.api_core import operation_async +from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.auth import credentials from google.auth.exceptions import MutualTLSChannelError @@ -103,12 +103,12 @@ def test_datastore_admin_client_from_service_account_file(client_class): ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds - assert client._transport._host == "datastore.googleapis.com:443" + assert client.transport._host == "datastore.googleapis.com:443" def test_datastore_admin_client_get_transport_class(): @@ -458,7 +458,7 @@ def test_export_entities( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.export_entities), "__call__") as call: + with mock.patch.object(type(client.transport.export_entities), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -479,19 +479,19 @@ def test_export_entities_from_dict(): @pytest.mark.asyncio -async def test_export_entities_async(transport: str = "grpc_asyncio"): +async def test_export_entities_async( + transport: str = "grpc_asyncio", request_type=datastore_admin.ExportEntitiesRequest +): client = DatastoreAdminAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datastore_admin.ExportEntitiesRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.export_entities), "__call__" - ) as call: + with mock.patch.object(type(client.transport.export_entities), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -503,17 +503,22 @@ async def test_export_entities_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datastore_admin.ExportEntitiesRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_export_entities_async_from_dict(): + await test_export_entities_async(request_type=dict) + + def test_export_entities_flattened(): client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.export_entities), "__call__") as call: + with mock.patch.object(type(client.transport.export_entities), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -562,9 +567,7 @@ async def test_export_entities_flattened_async(): client = DatastoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.export_entities), "__call__" - ) as call: + with mock.patch.object(type(client.transport.export_entities), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -624,7 +627,7 @@ def test_import_entities( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.import_entities), "__call__") as call: + with mock.patch.object(type(client.transport.import_entities), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -645,19 +648,19 @@ def test_import_entities_from_dict(): @pytest.mark.asyncio -async def test_import_entities_async(transport: str = "grpc_asyncio"): +async def test_import_entities_async( + transport: str = "grpc_asyncio", request_type=datastore_admin.ImportEntitiesRequest +): client = DatastoreAdminAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datastore_admin.ImportEntitiesRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.import_entities), "__call__" - ) as call: + with mock.patch.object(type(client.transport.import_entities), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -669,17 +672,22 @@ async def test_import_entities_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datastore_admin.ImportEntitiesRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_import_entities_async_from_dict(): + await test_import_entities_async(request_type=dict) + + def test_import_entities_flattened(): client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.import_entities), "__call__") as call: + with mock.patch.object(type(client.transport.import_entities), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -728,9 +736,7 @@ async def test_import_entities_flattened_async(): client = DatastoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.import_entities), "__call__" - ) as call: + with mock.patch.object(type(client.transport.import_entities), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -790,7 +796,7 @@ def test_get_index( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_index), "__call__") as call: + with mock.patch.object(type(client.transport.get_index), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = index.Index( project_id="project_id_value", @@ -809,6 +815,7 @@ def test_get_index( assert args[0] == datastore_admin.GetIndexRequest() # Establish that the response is the type that we expect. + assert isinstance(response, index.Index) assert response.project_id == "project_id_value" @@ -827,19 +834,19 @@ def test_get_index_from_dict(): @pytest.mark.asyncio -async def test_get_index_async(transport: str = "grpc_asyncio"): +async def test_get_index_async( + transport: str = "grpc_asyncio", request_type=datastore_admin.GetIndexRequest +): client = DatastoreAdminAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datastore_admin.GetIndexRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_index), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_index), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( index.Index( @@ -857,7 +864,7 @@ async def test_get_index_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datastore_admin.GetIndexRequest() # Establish that the response is the type that we expect. assert isinstance(response, index.Index) @@ -873,6 +880,11 @@ async def test_get_index_async(transport: str = "grpc_asyncio"): assert response.state == index.Index.State.CREATING +@pytest.mark.asyncio +async def test_get_index_async_from_dict(): + await test_get_index_async(request_type=dict) + + def test_list_indexes( transport: str = "grpc", request_type=datastore_admin.ListIndexesRequest ): @@ -885,7 +897,7 @@ def test_list_indexes( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_indexes), "__call__") as call: + with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore_admin.ListIndexesResponse( next_page_token="next_page_token_value", @@ -900,6 +912,7 @@ def test_list_indexes( assert args[0] == datastore_admin.ListIndexesRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListIndexesPager) assert response.next_page_token == "next_page_token_value" @@ -910,19 +923,19 @@ def test_list_indexes_from_dict(): @pytest.mark.asyncio -async def test_list_indexes_async(transport: str = "grpc_asyncio"): +async def test_list_indexes_async( + transport: str = "grpc_asyncio", request_type=datastore_admin.ListIndexesRequest +): client = DatastoreAdminAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datastore_admin.ListIndexesRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_indexes), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datastore_admin.ListIndexesResponse( @@ -936,7 +949,7 @@ async def test_list_indexes_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datastore_admin.ListIndexesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListIndexesAsyncPager) @@ -944,11 +957,16 @@ async def test_list_indexes_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_indexes_async_from_dict(): + await test_list_indexes_async(request_type=dict) + + def test_list_indexes_pager(): client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_indexes), "__call__") as call: + with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datastore_admin.ListIndexesResponse( @@ -979,7 +997,7 @@ def test_list_indexes_pages(): client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_indexes), "__call__") as call: + with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datastore_admin.ListIndexesResponse( @@ -1006,9 +1024,7 @@ async def test_list_indexes_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_indexes), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_indexes), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1041,9 +1057,7 @@ async def test_list_indexes_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_indexes), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_indexes), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1103,7 +1117,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = DatastoreAdminClient(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_get_channel(): @@ -1139,7 +1153,7 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client._transport, transports.DatastoreAdminGrpcTransport,) + assert isinstance(client.transport, transports.DatastoreAdminGrpcTransport,) def test_datastore_admin_base_transport_error(): @@ -1250,7 +1264,7 @@ def test_datastore_admin_host_no_port(): api_endpoint="datastore.googleapis.com" ), ) - assert client._transport._host == "datastore.googleapis.com:443" + assert client.transport._host == "datastore.googleapis.com:443" def test_datastore_admin_host_with_port(): @@ -1260,7 +1274,7 @@ def test_datastore_admin_host_with_port(): api_endpoint="datastore.googleapis.com:8000" ), ) - assert client._transport._host == "datastore.googleapis.com:8000" + assert client.transport._host == "datastore.googleapis.com:8000" def test_datastore_admin_grpc_transport_channel(): @@ -1272,6 +1286,7 @@ def test_datastore_admin_grpc_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None def test_datastore_admin_grpc_asyncio_transport_channel(): @@ -1283,6 +1298,7 @@ def test_datastore_admin_grpc_asyncio_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None @pytest.mark.parametrize( @@ -1333,6 +1349,7 @@ def test_datastore_admin_transport_channel_mtls_with_client_cert_source( quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred @pytest.mark.parametrize( @@ -1382,7 +1399,7 @@ def test_datastore_admin_grpc_lro_client(): client = DatastoreAdminClient( credentials=credentials.AnonymousCredentials(), transport="grpc", ) - transport = client._transport + transport = client.transport # Ensure that we have a api-core operations client. assert isinstance(transport.operations_client, operations_v1.OperationsClient,) @@ -1395,7 +1412,7 @@ def test_datastore_admin_grpc_lro_async_client(): client = DatastoreAdminAsyncClient( credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) - transport = client._client._transport + transport = client.transport # Ensure that we have a api-core operations client. assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) @@ -1404,6 +1421,107 @@ def test_datastore_admin_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client +def test_common_billing_account_path(): + billing_account = "squid" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DatastoreAdminClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = DatastoreAdminClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DatastoreAdminClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + + expected = "folders/{folder}".format(folder=folder,) + actual = DatastoreAdminClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = DatastoreAdminClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DatastoreAdminClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + + expected = "organizations/{organization}".format(organization=organization,) + actual = DatastoreAdminClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = DatastoreAdminClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DatastoreAdminClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + + expected = "projects/{project}".format(project=project,) + actual = DatastoreAdminClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = DatastoreAdminClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DatastoreAdminClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = DatastoreAdminClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = DatastoreAdminClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DatastoreAdminClient.parse_common_location_path(path) + assert expected == actual + + def test_client_withDEFAULT_CLIENT_INFO(): client_info = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index e5201c3ad90f..32faab361137 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -92,12 +92,12 @@ def test_datastore_client_from_service_account_file(client_class): ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds - assert client._transport._host == "datastore.googleapis.com:443" + assert client.transport._host == "datastore.googleapis.com:443" def test_datastore_client_get_transport_class(): @@ -437,7 +437,7 @@ def test_lookup(transport: str = "grpc", request_type=datastore.LookupRequest): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.lookup), "__call__") as call: + with mock.patch.object(type(client.transport.lookup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.LookupResponse() @@ -450,6 +450,7 @@ def test_lookup(transport: str = "grpc", request_type=datastore.LookupRequest): assert args[0] == datastore.LookupRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datastore.LookupResponse) @@ -458,17 +459,19 @@ def test_lookup_from_dict(): @pytest.mark.asyncio -async def test_lookup_async(transport: str = "grpc_asyncio"): +async def test_lookup_async( + transport: str = "grpc_asyncio", request_type=datastore.LookupRequest +): client = DatastoreAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datastore.LookupRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._client._transport.lookup), "__call__") as call: + with mock.patch.object(type(client.transport.lookup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datastore.LookupResponse() @@ -480,17 +483,22 @@ async def test_lookup_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datastore.LookupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datastore.LookupResponse) +@pytest.mark.asyncio +async def test_lookup_async_from_dict(): + await test_lookup_async(request_type=dict) + + def test_lookup_flattened(): client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.lookup), "__call__") as call: + with mock.patch.object(type(client.transport.lookup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.LookupResponse() @@ -549,7 +557,7 @@ async def test_lookup_flattened_async(): client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._client._transport.lookup), "__call__") as call: + with mock.patch.object(type(client.transport.lookup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.LookupResponse() @@ -617,7 +625,7 @@ def test_run_query(transport: str = "grpc", request_type=datastore.RunQueryReque request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.run_query), "__call__") as call: + with mock.patch.object(type(client.transport.run_query), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.RunQueryResponse() @@ -630,6 +638,7 @@ def test_run_query(transport: str = "grpc", request_type=datastore.RunQueryReque assert args[0] == datastore.RunQueryRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datastore.RunQueryResponse) @@ -638,19 +647,19 @@ def test_run_query_from_dict(): @pytest.mark.asyncio -async def test_run_query_async(transport: str = "grpc_asyncio"): +async def test_run_query_async( + transport: str = "grpc_asyncio", request_type=datastore.RunQueryRequest +): client = DatastoreAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datastore.RunQueryRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.run_query), "__call__" - ) as call: + with mock.patch.object(type(client.transport.run_query), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datastore.RunQueryResponse() @@ -662,12 +671,17 @@ async def test_run_query_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datastore.RunQueryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datastore.RunQueryResponse) +@pytest.mark.asyncio +async def test_run_query_async_from_dict(): + await test_run_query_async(request_type=dict) + + def test_begin_transaction( transport: str = "grpc", request_type=datastore.BeginTransactionRequest ): @@ -681,7 +695,7 @@ def test_begin_transaction( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.begin_transaction), "__call__" + type(client.transport.begin_transaction), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datastore.BeginTransactionResponse( @@ -697,6 +711,7 @@ def test_begin_transaction( assert args[0] == datastore.BeginTransactionRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datastore.BeginTransactionResponse) assert response.transaction == b"transaction_blob" @@ -707,18 +722,20 @@ def test_begin_transaction_from_dict(): @pytest.mark.asyncio -async def test_begin_transaction_async(transport: str = "grpc_asyncio"): +async def test_begin_transaction_async( + transport: str = "grpc_asyncio", request_type=datastore.BeginTransactionRequest +): client = DatastoreAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datastore.BeginTransactionRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.begin_transaction), "__call__" + type(client.transport.begin_transaction), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -731,7 +748,7 @@ async def test_begin_transaction_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datastore.BeginTransactionRequest() # Establish that the response is the type that we expect. assert isinstance(response, datastore.BeginTransactionResponse) @@ -739,12 +756,17 @@ async def test_begin_transaction_async(transport: str = "grpc_asyncio"): assert response.transaction == b"transaction_blob" +@pytest.mark.asyncio +async def test_begin_transaction_async_from_dict(): + await test_begin_transaction_async(request_type=dict) + + def test_begin_transaction_flattened(): client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.begin_transaction), "__call__" + type(client.transport.begin_transaction), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datastore.BeginTransactionResponse() @@ -778,7 +800,7 @@ async def test_begin_transaction_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.begin_transaction), "__call__" + type(client.transport.begin_transaction), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datastore.BeginTransactionResponse() @@ -820,7 +842,7 @@ def test_commit(transport: str = "grpc", request_type=datastore.CommitRequest): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.commit), "__call__") as call: + with mock.patch.object(type(client.transport.commit), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.CommitResponse(index_updates=1389,) @@ -833,6 +855,7 @@ def test_commit(transport: str = "grpc", request_type=datastore.CommitRequest): assert args[0] == datastore.CommitRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datastore.CommitResponse) assert response.index_updates == 1389 @@ -843,17 +866,19 @@ def test_commit_from_dict(): @pytest.mark.asyncio -async def test_commit_async(transport: str = "grpc_asyncio"): +async def test_commit_async( + transport: str = "grpc_asyncio", request_type=datastore.CommitRequest +): client = DatastoreAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datastore.CommitRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._client._transport.commit), "__call__") as call: + with mock.patch.object(type(client.transport.commit), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datastore.CommitResponse(index_updates=1389,) @@ -865,7 +890,7 @@ async def test_commit_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datastore.CommitRequest() # Establish that the response is the type that we expect. assert isinstance(response, datastore.CommitResponse) @@ -873,11 +898,16 @@ async def test_commit_async(transport: str = "grpc_asyncio"): assert response.index_updates == 1389 +@pytest.mark.asyncio +async def test_commit_async_from_dict(): + await test_commit_async(request_type=dict) + + def test_commit_flattened(): client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.commit), "__call__") as call: + with mock.patch.object(type(client.transport.commit), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.CommitResponse() @@ -952,7 +982,7 @@ async def test_commit_flattened_async(): client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._client._transport.commit), "__call__") as call: + with mock.patch.object(type(client.transport.commit), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.CommitResponse() @@ -1036,7 +1066,7 @@ def test_rollback(transport: str = "grpc", request_type=datastore.RollbackReques request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.rollback), "__call__") as call: + with mock.patch.object(type(client.transport.rollback), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.RollbackResponse() @@ -1049,6 +1079,7 @@ def test_rollback(transport: str = "grpc", request_type=datastore.RollbackReques assert args[0] == datastore.RollbackRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datastore.RollbackResponse) @@ -1057,19 +1088,19 @@ def test_rollback_from_dict(): @pytest.mark.asyncio -async def test_rollback_async(transport: str = "grpc_asyncio"): +async def test_rollback_async( + transport: str = "grpc_asyncio", request_type=datastore.RollbackRequest +): client = DatastoreAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datastore.RollbackRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.rollback), "__call__" - ) as call: + with mock.patch.object(type(client.transport.rollback), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datastore.RollbackResponse() @@ -1081,17 +1112,22 @@ async def test_rollback_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datastore.RollbackRequest() # Establish that the response is the type that we expect. assert isinstance(response, datastore.RollbackResponse) +@pytest.mark.asyncio +async def test_rollback_async_from_dict(): + await test_rollback_async(request_type=dict) + + def test_rollback_flattened(): client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.rollback), "__call__") as call: + with mock.patch.object(type(client.transport.rollback), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.RollbackResponse() @@ -1129,9 +1165,7 @@ async def test_rollback_flattened_async(): client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.rollback), "__call__" - ) as call: + with mock.patch.object(type(client.transport.rollback), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.RollbackResponse() @@ -1180,7 +1214,7 @@ def test_allocate_ids( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.allocate_ids), "__call__") as call: + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.AllocateIdsResponse() @@ -1193,6 +1227,7 @@ def test_allocate_ids( assert args[0] == datastore.AllocateIdsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datastore.AllocateIdsResponse) @@ -1201,19 +1236,19 @@ def test_allocate_ids_from_dict(): @pytest.mark.asyncio -async def test_allocate_ids_async(transport: str = "grpc_asyncio"): +async def test_allocate_ids_async( + transport: str = "grpc_asyncio", request_type=datastore.AllocateIdsRequest +): client = DatastoreAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datastore.AllocateIdsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.allocate_ids), "__call__" - ) as call: + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datastore.AllocateIdsResponse() @@ -1225,17 +1260,22 @@ async def test_allocate_ids_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datastore.AllocateIdsRequest() # Establish that the response is the type that we expect. assert isinstance(response, datastore.AllocateIdsResponse) +@pytest.mark.asyncio +async def test_allocate_ids_async_from_dict(): + await test_allocate_ids_async(request_type=dict) + + def test_allocate_ids_flattened(): client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.allocate_ids), "__call__") as call: + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.AllocateIdsResponse() @@ -1284,9 +1324,7 @@ async def test_allocate_ids_flattened_async(): client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.allocate_ids), "__call__" - ) as call: + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.AllocateIdsResponse() @@ -1344,7 +1382,7 @@ def test_reserve_ids(transport: str = "grpc", request_type=datastore.ReserveIdsR request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.reserve_ids), "__call__") as call: + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.ReserveIdsResponse() @@ -1357,6 +1395,7 @@ def test_reserve_ids(transport: str = "grpc", request_type=datastore.ReserveIdsR assert args[0] == datastore.ReserveIdsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datastore.ReserveIdsResponse) @@ -1365,19 +1404,19 @@ def test_reserve_ids_from_dict(): @pytest.mark.asyncio -async def test_reserve_ids_async(transport: str = "grpc_asyncio"): +async def test_reserve_ids_async( + transport: str = "grpc_asyncio", request_type=datastore.ReserveIdsRequest +): client = DatastoreAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datastore.ReserveIdsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.reserve_ids), "__call__" - ) as call: + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datastore.ReserveIdsResponse() @@ -1389,17 +1428,22 @@ async def test_reserve_ids_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datastore.ReserveIdsRequest() # Establish that the response is the type that we expect. assert isinstance(response, datastore.ReserveIdsResponse) +@pytest.mark.asyncio +async def test_reserve_ids_async_from_dict(): + await test_reserve_ids_async(request_type=dict) + + def test_reserve_ids_flattened(): client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.reserve_ids), "__call__") as call: + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.ReserveIdsResponse() @@ -1448,9 +1492,7 @@ async def test_reserve_ids_flattened_async(): client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.reserve_ids), "__call__" - ) as call: + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.ReserveIdsResponse() @@ -1534,7 +1576,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = DatastoreClient(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_get_channel(): @@ -1567,7 +1609,7 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client._transport, transports.DatastoreGrpcTransport,) + assert isinstance(client.transport, transports.DatastoreGrpcTransport,) def test_datastore_base_transport_error(): @@ -1676,7 +1718,7 @@ def test_datastore_host_no_port(): api_endpoint="datastore.googleapis.com" ), ) - assert client._transport._host == "datastore.googleapis.com:443" + assert client.transport._host == "datastore.googleapis.com:443" def test_datastore_host_with_port(): @@ -1686,7 +1728,7 @@ def test_datastore_host_with_port(): api_endpoint="datastore.googleapis.com:8000" ), ) - assert client._transport._host == "datastore.googleapis.com:8000" + assert client.transport._host == "datastore.googleapis.com:8000" def test_datastore_grpc_transport_channel(): @@ -1698,6 +1740,7 @@ def test_datastore_grpc_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None def test_datastore_grpc_asyncio_transport_channel(): @@ -1709,6 +1752,7 @@ def test_datastore_grpc_asyncio_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None @pytest.mark.parametrize( @@ -1754,6 +1798,7 @@ def test_datastore_transport_channel_mtls_with_client_cert_source(transport_clas quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred @pytest.mark.parametrize( @@ -1796,6 +1841,107 @@ def test_datastore_transport_channel_mtls_with_adc(transport_class): assert transport.grpc_channel == mock_grpc_channel +def test_common_billing_account_path(): + billing_account = "squid" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DatastoreClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = DatastoreClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DatastoreClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + + expected = "folders/{folder}".format(folder=folder,) + actual = DatastoreClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = DatastoreClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DatastoreClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + + expected = "organizations/{organization}".format(organization=organization,) + actual = DatastoreClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = DatastoreClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DatastoreClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + + expected = "projects/{project}".format(project=project,) + actual = DatastoreClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = DatastoreClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DatastoreClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = DatastoreClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = DatastoreClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DatastoreClient.parse_common_location_path(path) + assert expected == actual + + def test_client_withDEFAULT_CLIENT_INFO(): client_info = gapic_v1.client_info.ClientInfo() From 4f3adba4f84b0ef214b3f314b93fca0793db4d73 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 30 Oct 2020 15:57:42 -0700 Subject: [PATCH 301/611] chore: release 2.0.0-dev1 (#110) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 11 +++++++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 26fa580d0561..8c9f588abea1 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,17 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.0.0-dev1](https://www.github.com/googleapis/python-datastore/compare/v1.15.3...v2.0.0-dev1) (2020-10-30) + + +### ⚠ BREAKING CHANGES + +* Leverage new generator, proto-plus, for google-cloud-datastore (#104) + +### Features + +* Leverage new generator, proto-plus, for google-cloud-datastore ([#104](https://www.github.com/googleapis/python-datastore/issues/104)) ([1723a26](https://www.github.com/googleapis/python-datastore/commit/1723a268a6f647d1c798deb076c038f7af9b16c9)) + ### [1.15.3](https://www.github.com/googleapis/python-datastore/compare/v1.15.2...v1.15.3) (2020-10-06) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 871c958735e7..6eb9f1519ec7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.15.3" +__version__ = "2.0.0-dev1" From af9d6768b0a73ff73e61fb66139856174fe05373 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 3 Nov 2020 16:00:08 -0800 Subject: [PATCH 302/611] chore: start tracking obsolete files (#111) --- .../google-cloud-datastore/synth.metadata | 113 +++++++++++++++++- 1 file changed, 108 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index b9711974f3f4..a8bf8ba9639d 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -3,23 +3,23 @@ { "git": { "name": ".", - "remote": "git@github.com:crwilcox/python-datastore.git", - "sha": "4a8ba01b9802d1284915f18fbd974aca8fef4b48" + "remote": "https://github.com/googleapis/python-datastore.git", + "sha": "dd6c0ee43fe6763528374a3494de0124de0ee652" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "86e167e6be2c4300066fd8ff5db139c6b846d049", - "internalRef": "339897204" + "sha": "7f31f40209008ad24058579e7112e45fc9d5715e", + "internalRef": "339939234" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "6542bd723403513626f61642fc02ddca528409aa" + "sha": "b19b401571e77192f8dd38eab5fb2300a0de9324" } } ], @@ -42,5 +42,108 @@ "generator": "bazel" } } + ], + "generatedFiles": [ + ".flake8", + ".github/CONTRIBUTING.md", + ".github/ISSUE_TEMPLATE/bug_report.md", + ".github/ISSUE_TEMPLATE/feature_request.md", + ".github/ISSUE_TEMPLATE/support_request.md", + ".github/PULL_REQUEST_TEMPLATE.md", + ".github/release-please.yml", + ".github/snippet-bot.yml", + ".gitignore", + ".kokoro/build.sh", + ".kokoro/continuous/common.cfg", + ".kokoro/continuous/continuous.cfg", + ".kokoro/docker/docs/Dockerfile", + ".kokoro/docker/docs/fetch_gpg_keys.sh", + ".kokoro/docs/common.cfg", + ".kokoro/docs/docs-presubmit.cfg", + ".kokoro/docs/docs.cfg", + ".kokoro/populate-secrets.sh", + ".kokoro/presubmit/common.cfg", + ".kokoro/presubmit/presubmit.cfg", + ".kokoro/publish-docs.sh", + ".kokoro/release.sh", + ".kokoro/release/common.cfg", + ".kokoro/release/release.cfg", + ".kokoro/samples/lint/common.cfg", + ".kokoro/samples/lint/continuous.cfg", + ".kokoro/samples/lint/periodic.cfg", + ".kokoro/samples/lint/presubmit.cfg", + ".kokoro/samples/python3.6/common.cfg", + ".kokoro/samples/python3.6/continuous.cfg", + ".kokoro/samples/python3.6/periodic.cfg", + ".kokoro/samples/python3.6/presubmit.cfg", + ".kokoro/samples/python3.7/common.cfg", + ".kokoro/samples/python3.7/continuous.cfg", + ".kokoro/samples/python3.7/periodic.cfg", + ".kokoro/samples/python3.7/presubmit.cfg", + ".kokoro/samples/python3.8/common.cfg", + ".kokoro/samples/python3.8/continuous.cfg", + ".kokoro/samples/python3.8/periodic.cfg", + ".kokoro/samples/python3.8/presubmit.cfg", + ".kokoro/test-samples.sh", + ".kokoro/trampoline.sh", + ".kokoro/trampoline_v2.sh", + ".trampolinerc", + "CODE_OF_CONDUCT.md", + "CONTRIBUTING.rst", + "LICENSE", + "MANIFEST.in", + "docs/_static/custom.css", + "docs/_templates/layout.html", + "docs/conf.py", + "google/cloud/datastore_admin_v1/__init__.py", + "google/cloud/datastore_admin_v1/proto/datastore_admin.proto", + "google/cloud/datastore_admin_v1/proto/index.proto", + "google/cloud/datastore_admin_v1/py.typed", + "google/cloud/datastore_admin_v1/services/__init__.py", + "google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py", + "google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py", + "google/cloud/datastore_admin_v1/services/datastore_admin/client.py", + "google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py", + "google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py", + "google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py", + "google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py", + "google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py", + "google/cloud/datastore_admin_v1/types/__init__.py", + "google/cloud/datastore_admin_v1/types/datastore_admin.py", + "google/cloud/datastore_admin_v1/types/index.py", + "google/cloud/datastore_v1/__init__.py", + "google/cloud/datastore_v1/proto/datastore.proto", + "google/cloud/datastore_v1/proto/entity.proto", + "google/cloud/datastore_v1/proto/query.proto", + "google/cloud/datastore_v1/py.typed", + "google/cloud/datastore_v1/services/__init__.py", + "google/cloud/datastore_v1/services/datastore/__init__.py", + "google/cloud/datastore_v1/services/datastore/async_client.py", + "google/cloud/datastore_v1/services/datastore/client.py", + "google/cloud/datastore_v1/services/datastore/transports/__init__.py", + "google/cloud/datastore_v1/services/datastore/transports/base.py", + "google/cloud/datastore_v1/services/datastore/transports/grpc.py", + "google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py", + "google/cloud/datastore_v1/types/__init__.py", + "google/cloud/datastore_v1/types/datastore.py", + "google/cloud/datastore_v1/types/entity.py", + "google/cloud/datastore_v1/types/query.py", + "noxfile.py", + "renovate.json", + "scripts/decrypt-secrets.sh", + "scripts/fixup_datastore_admin_v1_keywords.py", + "scripts/fixup_datastore_v1_keywords.py", + "scripts/readme-gen/readme_gen.py", + "scripts/readme-gen/templates/README.tmpl.rst", + "scripts/readme-gen/templates/auth.tmpl.rst", + "scripts/readme-gen/templates/auth_api_key.tmpl.rst", + "scripts/readme-gen/templates/install_deps.tmpl.rst", + "scripts/readme-gen/templates/install_portaudio.tmpl.rst", + "setup.cfg", + "testing/.gitignore", + "tests/unit/gapic/datastore_admin_v1/__init__.py", + "tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py", + "tests/unit/gapic/datastore_v1/__init__.py", + "tests/unit/gapic/datastore_v1/test_datastore.py" ] } \ No newline at end of file From e5236f7d6c780f9d3762c977bd4ea60e5fb31748 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Fri, 6 Nov 2020 14:15:03 -0800 Subject: [PATCH 303/611] docs: adds UPGRADING.md, not to readme, to help inform users about migration to v2 (#113) * docs: adds UPGRADING.md, not to readme, to help inform users about migration to v2 * Update UPGRADING.md Co-authored-by: BenWhitehead * docs: clarify enums statement * docs: add migration section to docs index Co-authored-by: BenWhitehead release-as: 2.0.0 --- packages/google-cloud-datastore/README.rst | 1 + packages/google-cloud-datastore/UPGRADING.md | 134 ++++++++++++++++++ .../google-cloud-datastore/docs/UPGRADING.md | 134 ++++++++++++++++++ .../google-cloud-datastore/docs/index.rst | 9 ++ 4 files changed, 278 insertions(+) create mode 100644 packages/google-cloud-datastore/UPGRADING.md create mode 100644 packages/google-cloud-datastore/docs/UPGRADING.md diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index 0b6470e6725e..bef8a2fcdb74 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -55,6 +55,7 @@ Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ Python >= 3.6 +The last version of this library compatible with Python 2.7 is google-cloud-datastore==1.15.3 Mac/Linux ^^^^^^^^^ diff --git a/packages/google-cloud-datastore/UPGRADING.md b/packages/google-cloud-datastore/UPGRADING.md new file mode 100644 index 000000000000..517c39adb8f9 --- /dev/null +++ b/packages/google-cloud-datastore/UPGRADING.md @@ -0,0 +1,134 @@ +# 2.0.0 Migration Guide + +The 2.0 release of the `google-cloud-datastore` client is a significant upgrade based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), and includes substantial interface changes. Existing code written for earlier versions of this library may require updates to use this version. This document describes the changes that have been made, and what you need to do to update your usage. + +If you experience issues or have questions, please file an [issue](https://github.com/googleapis/python-datastore/issues). + +## Supported Python Versions + +> **WARNING**: Breaking change + +The 2.0.0 release requires Python 3.6+. + + +## Method Calls + +> **WARNING**: Breaking change + +If you previously were using modules or functions under the namespace +`google.cloud.datastore_v1.gapic` there is a high likelihood you have incompatible code. +To assist with this, we have included some helpful scripts to make some of the code +modifications required to use 2.0.0. + +* Install the library + +```py +python3 -m pip install google-cloud-datastore +``` + +* The scripts `fixup_datastore_v1_keywords.py` and `fixup_datastore_admin_v1_keywords.py` +is shipped with the library. It expects an input directory (with the code to convert) +and an empty destination directory. + +```sh +$ fixup_datastore_v1_keywords.py --input-directory .samples/ --output-directory samples/ +$ fixup_datastore_admin_v1_keywords.py --input-directory .samples/ --output-directory samples/ +``` + +### More Details + +In `google-cloud-datastore<2.0.0`, parameters required by the API were positional parameters and optional parameters were keyword parameters. + +**Before:** +```py + def a_method( + self, + param1, + param2, + param3, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): +``` + +In the 2.0.0 release, all methods have a single positional parameter `request`. Method docstrings indicate whether a parameter is required or optional. + +Some methods have additional keyword only parameters. The available parameters depend on the `google.api.method_signature` annotation specified by the API producer. + + +**After:** +```py + def a_method( + self, + request: RequestType = None, + * + param1, + param2, + param3, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): +``` + +> **NOTE:** The `request` parameter and flattened keyword parameters for the API are mutually exclusive. +> Passing both will result in an error. + + +Both of these calls are valid: + +```py +response = client.a_method( + request={ + "param1": param1, + "param2": param2, + "param3": param3 + } +) +``` + +```py +response = client.a_method( + param1=param1, + param2=param2, + param3=param3 +) +``` + +This call is invalid because it mixes `request` with a keyword argument `param1`. Executing this code +will result in an error. + +```py +response = client.a_method( + request={ + "param1": param1, + "param2": param2 + }, + param2=param2 +) +``` + + + +## Enums and Types + + +> **WARNING**: Breaking change + +The `enums` submodule has been removed. + +**Before:** +```py +from google.cloud import datastore_v1 + +direction = datastore_v1.enums.CommitRequest.Mode.TRANSACTIONAL +``` + + +**After:** +```py +from google.cloud import datastore_v1 + +direction = datastore_v1.types.CommitRequest.Mode.TRANSACTIONAL +``` diff --git a/packages/google-cloud-datastore/docs/UPGRADING.md b/packages/google-cloud-datastore/docs/UPGRADING.md new file mode 100644 index 000000000000..489f9f4325dd --- /dev/null +++ b/packages/google-cloud-datastore/docs/UPGRADING.md @@ -0,0 +1,134 @@ +# 2.0.0 Migration Guide + +The 2.0 release of the `google-cloud-datastore` client is a significant upgrade based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), and includes substantial interface changes. Existing code written for earlier versions of this library may require updates to use this version. This document describes the changes that have been made, and what you need to do to update your usage. + +If you experience issues or have questions, please file an [issue](https://github.com/googleapis/python-datastore/issues). + +## Supported Python Versions + +> **WARNING**: Breaking change + +The 2.0.0 release requires Python 3.6+. + + +## Method Calls + +> **WARNING**: Breaking change + +If you previously were using modules or functions under the namespace +`google.cloud.datastore_v1.gapic` there is a high likelihood you have incompatible code. +To assist with this, we have includes some helpful scripts to make some of the code +modifications required to use 2.0.0. + +* Install the library + +```py +python3 -m pip install google-cloud-datastore +``` + +* The scripts `fixup_datastore_v1_keywords.py` and `fixup_datastore_admin_v1_keywords.py` +is shipped with the library. It expects an input directory (with the code to convert) +and an empty destination directory. + +```sh +$ fixup_datastore_v1_keywords.py --input-directory .samples/ --output-directory samples/ +$ fixup_datastore_admin_v1_keywords.py --input-directory .samples/ --output-directory samples/ +``` + +### More Details + +In `google-cloud-datastore<2.0.0`, parameters required by the API were positional parameters and optional parameters were keyword parameters. + +**Before:** +```py + def a_method( + self, + param1, + param2, + param3, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): +``` + +In the 2.0.0 release, all methods have a single positional parameter `request`. Method docstrings indicate whether a parameter is required or optional. + +Some methods have additional keyword only parameters. The available parameters depend on the `google.api.method_signature` annotation specified by the API producer. + + +**After:** +```py + def a_method( + self, + request: RequestType = None, + * + param1, + param2, + param3, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): +``` + +> **NOTE:** The `request` parameter and flattened keyword parameters for the API are mutually exclusive. +> Passing both will result in an error. + + +Both of these calls are valid: + +```py +response = client.a_method( + request={ + "param1": param1, + "param2": param2, + "param3": param3 + } +) +``` + +```py +response = client.a_method( + param1=param1, + param2=param2, + param3=param3 +) +``` + +This call is invalid because it mixes `request` with a keyword argument `param1`. Executing this code +will result in an error. + +```py +response = client.a_method( + request={ + "param1": param1, + "param2": param2 + }, + param2=param2 +) +``` + + + +## Enums and Types + + +> **WARNING**: Breaking change + +The `enums` submodule has been removed. + +**Before:** +```py +from google.cloud import datastore_v1 + +direction = datastore_v1.enums.CommitRequest.Mode.TRANSACTIONAL +``` + + +**After:** +```py +from google.cloud import datastore_v1 + +direction = datastore_v1.types.CommitRequest.Mode.TRANSACTIONAL +``` diff --git a/packages/google-cloud-datastore/docs/index.rst b/packages/google-cloud-datastore/docs/index.rst index 60357e2dc3fb..4866c89142ba 100644 --- a/packages/google-cloud-datastore/docs/index.rst +++ b/packages/google-cloud-datastore/docs/index.rst @@ -23,6 +23,15 @@ API Reference helpers admin_client +Migration Guide +--------------- + +See the guide below for instructions on migrating to the 2.x release of this library. + +.. toctree:: + :maxdepth: 2 + + UPGRADING Changelog --------- From 1215185c92eacf95a51affbaa895b6a35a3be8da Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 10 Nov 2020 10:33:30 -0800 Subject: [PATCH 304/611] chore: release 2.0.0 (#114) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Christopher Wilcox --- packages/google-cloud-datastore/CHANGELOG.md | 17 +++++++++++++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 8c9f588abea1..d8eb7e089f33 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,23 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.0.0](https://www.github.com/googleapis/python-datastore/compare/v1.15.3...v2.0.0) (2020-11-06) + + +### ⚠ BREAKING CHANGES + +* remove support for Python 2.7 +* Leverage new generator, proto-plus, for google-cloud-datastore (#104) + +### Features + +* Leverage new generator, proto-plus, for google-cloud-datastore ([#104](https://www.github.com/googleapis/python-datastore/issues/104)) ([1723a26](https://www.github.com/googleapis/python-datastore/commit/1723a268a6f647d1c798deb076c038f7af9b16c9)) + + +### Documentation + +* adds UPGRADING.md, note to readme, to help inform users about migration to v2 ([#113](https://www.github.com/googleapis/python-datastore/issues/113)) ([0d496c6](https://www.github.com/googleapis/python-datastore/commit/0d496c639170d2d5e30a3b69c790b3abfb2ad170)) + ## [2.0.0-dev1](https://www.github.com/googleapis/python-datastore/compare/v1.15.3...v2.0.0-dev1) (2020-10-30) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 6eb9f1519ec7..a12de3d254b4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.0.0-dev1" +__version__ = "2.0.0" From 170262eb500641e8c0c0e25a98672d14de8352b1 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 11 Nov 2020 16:23:21 -0500 Subject: [PATCH 305/611] fix: normalize / test deprecation of 'Client.reserve_ids' (#103) Closes #101 Closes #100 Co-authored-by: Christopher Wilcox --- .../google/cloud/datastore/client.py | 10 ++-- .../tests/unit/test_client.py | 48 ++++++++++++++++--- 2 files changed, 47 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index e06b8e60e2ce..24b53b543756 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -60,6 +60,10 @@ DISABLE_GRPC = "GOOGLE_CLOUD_DISABLE_GRPC" """Environment variable acting as flag to disable gRPC.""" +_RESERVE_IDS_DEPRECATED_MESSAGE = """\ +Client.reserve_ids is deprecated. Please use \ +Client.reserve_ids_multi or Client.reserve_ids_sequential""" + _USE_GRPC = _HAVE_GRPC and not os.getenv(DISABLE_GRPC, False) @@ -890,11 +894,7 @@ def reserve_ids(self, complete_key, num_ids, retry=None, timeout=None): Please use either :meth:`reserve_ids_multi` (recommended) or :meth:`reserve_ids_sequential`. """ - message = ( - "Client.reserve_ids is deprecated. Please use " - "Client.reserve_ids_multi or Client.reserve_ids_sequential", - ) - warnings.warn(message, DeprecationWarning) + warnings.warn(_RESERVE_IDS_DEPRECATED_MESSAGE, DeprecationWarning) return self.reserve_ids_sequential( complete_key, num_ids, retry=retry, timeout=timeout ) diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 59588f10ade7..55a45c7f363a 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -1113,6 +1113,8 @@ def test_reserve_ids_sequential_w_non_numeric_key_name(self): client.reserve_ids_sequential(complete_key, num_ids) def test_reserve_ids_w_completed_key(self): + import warnings + num_ids = 2 creds = _make_credentials() client = self._make_one(credentials=creds, _use_grpc=False) @@ -1122,7 +1124,8 @@ def test_reserve_ids_w_completed_key(self): client._datastore_api_internal = ds_api self.assertTrue(not complete_key.is_partial) - client.reserve_ids(complete_key, num_ids) + with warnings.catch_warnings(record=True) as warned: + client.reserve_ids(complete_key, num_ids) reserved_keys = ( _Key(_Key.kind, id) @@ -1133,7 +1136,12 @@ def test_reserve_ids_w_completed_key(self): request={"project_id": self.PROJECT, "keys": expected_keys} ) + self.assertEqual(len(warned), 1) + self.assertIn("Client.reserve_ids is deprecated.", str(warned[0].message)) + def test_reserve_ids_w_completed_key_w_retry_w_timeout(self): + import warnings + num_ids = 2 retry = mock.Mock() timeout = 100000 @@ -1146,7 +1154,8 @@ def test_reserve_ids_w_completed_key_w_retry_w_timeout(self): ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) client._datastore_api_internal = ds_api - client.reserve_ids(complete_key, num_ids, retry=retry, timeout=timeout) + with warnings.catch_warnings(record=True) as warned: + client.reserve_ids(complete_key, num_ids, retry=retry, timeout=timeout) reserved_keys = ( _Key(_Key.kind, id) @@ -1159,7 +1168,12 @@ def test_reserve_ids_w_completed_key_w_retry_w_timeout(self): timeout=timeout, ) + self.assertEqual(len(warned), 1) + self.assertIn("Client.reserve_ids is deprecated.", str(warned[0].message)) + def test_reserve_ids_w_completed_key_w_ancestor(self): + import warnings + num_ids = 2 creds = _make_credentials() client = self._make_one(credentials=creds, _use_grpc=False) @@ -1169,7 +1183,8 @@ def test_reserve_ids_w_completed_key_w_ancestor(self): client._datastore_api_internal = ds_api self.assertTrue(not complete_key.is_partial) - client.reserve_ids(complete_key, num_ids) + with warnings.catch_warnings(record=True) as warned: + client.reserve_ids(complete_key, num_ids) reserved_keys = ( _Key("PARENT", "SINGLETON", _Key.kind, id) @@ -1180,29 +1195,50 @@ def test_reserve_ids_w_completed_key_w_ancestor(self): request={"project_id": self.PROJECT, "keys": expected_keys} ) + self.assertEqual(len(warned), 1) + self.assertIn("Client.reserve_ids is deprecated.", str(warned[0].message)) + def test_reserve_ids_w_partial_key(self): + import warnings + num_ids = 2 incomplete_key = _Key(_Key.kind, None) creds = _make_credentials() client = self._make_one(credentials=creds) with self.assertRaises(ValueError): - client.reserve_ids(incomplete_key, num_ids) + with warnings.catch_warnings(record=True) as warned: + client.reserve_ids(incomplete_key, num_ids) + + self.assertEqual(len(warned), 1) + self.assertIn("Client.reserve_ids is deprecated.", str(warned[0].message)) def test_reserve_ids_w_wrong_num_ids(self): + import warnings + num_ids = "2" complete_key = _Key() creds = _make_credentials() client = self._make_one(credentials=creds) with self.assertRaises(ValueError): - client.reserve_ids(complete_key, num_ids) + with warnings.catch_warnings(record=True) as warned: + client.reserve_ids(complete_key, num_ids) + + self.assertEqual(len(warned), 1) + self.assertIn("Client.reserve_ids is deprecated.", str(warned[0].message)) def test_reserve_ids_w_non_numeric_key_name(self): + import warnings + num_ids = 2 complete_key = _Key(_Key.kind, "batman") creds = _make_credentials() client = self._make_one(credentials=creds) with self.assertRaises(ValueError): - client.reserve_ids(complete_key, num_ids) + with warnings.catch_warnings(record=True) as warned: + client.reserve_ids(complete_key, num_ids) + + self.assertEqual(len(warned), 1) + self.assertIn("Client.reserve_ids is deprecated.", str(warned[0].message)) def test_reserve_ids_multi(self): creds = _make_credentials() From 7d778fdda6e45cdc1bd54bad5e6409c2f34eb1fc Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Fri, 13 Nov 2020 15:46:20 -0500 Subject: [PATCH 306/611] fix: fix id_or_name property of key class (#115) * fix: fix id_or_name property of key class * fix: unit test added --- .../google-cloud-datastore/google/cloud/datastore/key.py | 4 +++- packages/google-cloud-datastore/tests/unit/test_key.py | 5 +++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index d03359bcabb7..c9beaeb2dad2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -441,7 +441,9 @@ def id_or_name(self): :returns: The last element of the key's path if it is either an ``id`` or a ``name``. """ - return self.id or self.name + if self.id is None: + return self.name + return self.id @property def project(self): diff --git a/packages/google-cloud-datastore/tests/unit/test_key.py b/packages/google-cloud-datastore/tests/unit/test_key.py index 73565eadb412..9d130fb49ddb 100644 --- a/packages/google-cloud-datastore/tests/unit/test_key.py +++ b/packages/google-cloud-datastore/tests/unit/test_key.py @@ -488,6 +488,11 @@ def test_id_or_name_w_name_only(self): key = self._make_one("KIND", _NAME, project=self._DEFAULT_PROJECT) self.assertEqual(key.id_or_name, _NAME) + def test_id_or_name_w_id_zero(self): + _ID = 0 + key = self._make_one("KIND", _ID, project=self._DEFAULT_PROJECT) + self.assertEqual(key.id_or_name, _ID) + def test_parent_default(self): key = self._make_one("KIND", project=self._DEFAULT_PROJECT) self.assertIsNone(key.parent) From 209a8026081d666a0b5e01dc391ef9cc06b20dfe Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 16 Nov 2020 10:22:56 -0800 Subject: [PATCH 307/611] chore: release 2.0.1 (#117) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 8 ++++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index d8eb7e089f33..1a3fd6d6536a 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +### [2.0.1](https://www.github.com/googleapis/python-datastore/compare/v2.0.0...v2.0.1) (2020-11-13) + + +### Bug Fixes + +* fix id_or_name property of key class ([#115](https://www.github.com/googleapis/python-datastore/issues/115)) ([6f28b84](https://www.github.com/googleapis/python-datastore/commit/6f28b84fcc8c593bf7fbd6335999f3cc6da56cd4)) +* normalize / test deprecation of 'Client.reserve_ids' ([#103](https://www.github.com/googleapis/python-datastore/issues/103)) ([5851522](https://www.github.com/googleapis/python-datastore/commit/5851522900fc07c9cc13e1af2cf7b54d709c9ddb)), closes [#101](https://www.github.com/googleapis/python-datastore/issues/101) [#100](https://www.github.com/googleapis/python-datastore/issues/100) + ## [2.0.0](https://www.github.com/googleapis/python-datastore/compare/v1.15.3...v2.0.0) (2020-11-06) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index a12de3d254b4..956a957b6500 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.0.0" +__version__ = "2.0.1" From 54e0e16b2accf0a0fd9bc6c7ceca7f5629706197 Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Wed, 25 Nov 2020 12:13:46 -0500 Subject: [PATCH 308/611] feat: remove six dependency (#120) --- .../google/cloud/datastore/_gapic.py | 4 ++-- .../google/cloud/datastore/helpers.py | 11 +++++------ .../google/cloud/datastore/key.py | 11 +++++------ .../tests/system/test_system.py | 7 +++---- .../tests/system/utils/clear_datastore.py | 4 +--- .../tests/system/utils/populate_datastore.py | 4 +--- .../google-cloud-datastore/tests/unit/test__http.py | 6 +++--- 7 files changed, 20 insertions(+), 27 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py b/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py index 3200ea3c3ee1..e901fe6c884a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_gapic.py @@ -15,7 +15,7 @@ """Helpers for making API requests via gapic / gRPC.""" from grpc import insecure_channel -import six +from urllib.parse import urlparse from google.cloud._helpers import make_secure_channel from google.cloud._http import DEFAULT_USER_AGENT @@ -32,7 +32,7 @@ def make_datastore_api(client): :rtype: :class:`.datastore.v1.datastore_client.DatastoreClient` :returns: A datastore API instance with the proper credentials. """ - parse_result = six.moves.urllib_parse.urlparse(client._base_url) + parse_result = urlparse(client._base_url) host = parse_result.netloc if parse_result.scheme == "https": channel = make_secure_channel(client._credentials, DEFAULT_USER_AGENT, host) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index f8b32f38e8e6..eada5f4f1db8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -22,7 +22,6 @@ from google.protobuf import struct_pb2 from google.type import latlng_pb2 -import six from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud.datastore_v1.types import datastore as datastore_pb2 @@ -105,7 +104,7 @@ def _property_tuples(entity_pb): :returns: An iterator that yields tuples of a name and ``Value`` corresponding to properties on the entity. """ - return six.iteritems(entity_pb.properties) + return iter(entity_pb.properties.items()) def entity_from_protobuf(pb): @@ -206,7 +205,7 @@ def _set_pb_meaning_from_entity(entity, name, value, value_pb, is_list=False): if is_list: if not isinstance(meaning, list): meaning = itertools.repeat(meaning) - val_iter = six.moves.zip(value_pb.array_value.values, meaning) + val_iter = zip(value_pb.array_value.values, meaning) for sub_value_pb, sub_meaning in val_iter: if sub_meaning is not None: sub_value_pb.meaning = sub_meaning @@ -359,11 +358,11 @@ def _pb_attr_value(val): name, value = "boolean", val elif isinstance(val, float): name, value = "double", val - elif isinstance(val, six.integer_types): + elif isinstance(val, int): name, value = "integer", val - elif isinstance(val, six.text_type): + elif isinstance(val, str): name, value = "string", val - elif isinstance(val, six.binary_type): + elif isinstance(val, bytes): name, value = "blob", val elif isinstance(val, Entity): name, value = "entity", val diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index c9beaeb2dad2..98502f9c6c61 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -16,7 +16,6 @@ import base64 import copy -import six from google.cloud.datastore_v1.types import entity as _entity_pb2 @@ -185,14 +184,14 @@ def _parse_path(path_args): result = [] for kind, id_or_name in zip(kind_list, id_or_name_list): curr_key_part = {} - if isinstance(kind, six.string_types): + if isinstance(kind, str): curr_key_part["kind"] = kind else: raise ValueError(kind, "Kind was not a string.") - if isinstance(id_or_name, six.string_types): + if isinstance(id_or_name, str): curr_key_part["name"] = id_or_name - elif isinstance(id_or_name, six.integer_types): + elif isinstance(id_or_name, int): curr_key_part["id"] = id_or_name elif id_or_name is not partial_ending: raise ValueError(id_or_name, "ID/name was not a string or integer.") @@ -264,9 +263,9 @@ def completed_key(self, id_or_name): if not self.is_partial: raise ValueError("Only a partial key can be completed.") - if isinstance(id_or_name, six.string_types): + if isinstance(id_or_name, str): id_or_name_key = "name" - elif isinstance(id_or_name, six.integer_types): + elif isinstance(id_or_name, int): id_or_name_key = "id" else: raise ValueError(id_or_name, "ID/name was not a string or integer.") diff --git a/packages/google-cloud-datastore/tests/system/test_system.py b/packages/google-cloud-datastore/tests/system/test_system.py index c807781bc65c..89b0a395962b 100644 --- a/packages/google-cloud-datastore/tests/system/test_system.py +++ b/packages/google-cloud-datastore/tests/system/test_system.py @@ -18,7 +18,6 @@ import warnings import requests -import six from google.cloud._helpers import UTC from google.cloud import datastore @@ -294,7 +293,7 @@ def test_limit_queries(self): # Fetch characters. iterator = query.fetch(limit=limit) - page = six.next(iterator.pages) + page = next(iterator.pages) character_entities = list(page) cursor = iterator.next_page_token self.assertEqual(len(character_entities), limit) @@ -442,7 +441,7 @@ def test_query_paginate_with_offset(self): iterator = page_query.fetch(limit=limit, offset=offset) # Fetch characters. - page = six.next(iterator.pages) + page = next(iterator.pages) entities = list(page) cursor = iterator.next_page_token self.assertEqual(len(entities), limit) @@ -466,7 +465,7 @@ def test_query_paginate_with_start_cursor(self): iterator = page_query.fetch(limit=limit, offset=offset) # Fetch characters. - page = six.next(iterator.pages) + page = next(iterator.pages) entities = list(page) cursor = iterator.next_page_token self.assertEqual(len(entities), limit) diff --git a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py index 3438ff895e26..fa976f60907f 100644 --- a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py @@ -19,8 +19,6 @@ import os import sys -import six - from google.cloud import datastore @@ -96,7 +94,7 @@ def main(): print_func("This command will remove all entities for " "the following kinds:") print_func("\n".join("- " + val for val in kinds)) - response = six.moves.input("Is this OK [y/n]? ") + response = input("Is this OK [y/n]? ") if response.lower() == "y": diff --git a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py index 223741e8897c..06b2895ac0f2 100644 --- a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py @@ -23,8 +23,6 @@ import time import uuid -import six - from google.cloud import datastore @@ -121,7 +119,7 @@ def add_characters(client=None): # Get a client that uses the test dataset. client = datastore.Client() with client.transaction() as xact: - for key_path, character in six.moves.zip(KEY_PATHS, CHARACTERS): + for key_path, character in zip(KEY_PATHS, CHARACTERS): if key_path[-1] != character["name"]: raise ValueError(("Character and key don't agree", key_path, character)) entity = datastore.Entity(key=client.key(*key_path)) diff --git a/packages/google-cloud-datastore/tests/unit/test__http.py b/packages/google-cloud-datastore/tests/unit/test__http.py index 700429ff0179..6048d40be401 100644 --- a/packages/google-cloud-datastore/tests/unit/test__http.py +++ b/packages/google-cloud-datastore/tests/unit/test__http.py @@ -15,7 +15,7 @@ import unittest import mock -from six.moves import http_client +from http import client import requests @@ -72,7 +72,7 @@ def test_failure(self): error.code = code_pb2.FAILED_PRECONDITION http = _make_requests_session( - [_make_response(http_client.BAD_REQUEST, content=error.SerializeToString())] + [_make_response(client.BAD_REQUEST, content=error.SerializeToString())] ) with self.assertRaises(BadRequest) as exc: @@ -808,7 +808,7 @@ def test_allocate_ids_non_empty(self): self.assertEqual(key_before, key_after) -def _make_response(status=http_client.OK, content=b"", headers={}): +def _make_response(status=client.OK, content=b"", headers={}): response = requests.Response() response.status_code = status response._content = content From 88b5793e396cb8794189ecfff289615b75ea0a89 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 2 Dec 2020 11:06:30 -0800 Subject: [PATCH 309/611] chore: release 2.0.1 (#128) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 1a3fd6d6536a..c7b7ad57f5f1 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +### [2.0.1](https://www.github.com/googleapis/python-datastore/compare/v2.0.1...v2.0.1) (2020-12-02) + + +### Features + +* remove six dependency ([#120](https://www.github.com/googleapis/python-datastore/issues/120)) ([b1715e5](https://www.github.com/googleapis/python-datastore/commit/b1715e500f870fd5292bb84232b0039c2ac6be85)) + ### [2.0.1](https://www.github.com/googleapis/python-datastore/compare/v2.0.0...v2.0.1) (2020-11-13) From 43f7a65901bfd1ad82ba766647b05fcd90c63e94 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Fri, 4 Dec 2020 15:57:47 -0800 Subject: [PATCH 310/611] feat: support autoconversion of Entity to Key for purposes of delete & delete_multi (#123) * feat: support autoconversion of Entity to Key for purposes of delete, delete_multi * Update google/cloud/datastore/client.py Co-authored-by: Tres Seaver * test: update typing, use entity delete path * fix: remove warning * test: add unit test for delete multi with an entity * fix: lint/black Co-authored-by: Tres Seaver --- .../google/cloud/datastore/client.py | 8 ++++++-- .../tests/system/test_system.py | 6 ++---- .../tests/unit/test_client.py | 18 ++++++++++++++++++ 3 files changed, 26 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 24b53b543756..28d968ce0a02 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -622,7 +622,8 @@ def delete(self, key, retry=None, timeout=None): The backend API does not make a distinction between a single key or multiple keys in a commit request. - :type key: :class:`google.cloud.datastore.key.Key` + :type key: :class:`google.cloud.datastore.key.Key`, :class:`google.cloud.datastore.entity.Entity` + :param key: The key to be deleted from the datastore. :type retry: :class:`google.api_core.retry.Retry` @@ -643,7 +644,7 @@ def delete(self, key, retry=None, timeout=None): def delete_multi(self, keys, retry=None, timeout=None): """Delete keys from the Cloud Datastore. - :type keys: list of :class:`google.cloud.datastore.key.Key` + :type keys: list of :class:`google.cloud.datastore.key.Key`, :class:`google.cloud.datastore.entity.Entity` :param keys: The keys to be deleted from the Datastore. :type retry: :class:`google.api_core.retry.Retry` @@ -671,6 +672,9 @@ def delete_multi(self, keys, retry=None, timeout=None): current.begin() for key in keys: + if isinstance(key, Entity): + # If the key is in fact an Entity, the key can be extracted. + key = key.key current.delete(key) if not in_batch: diff --git a/packages/google-cloud-datastore/tests/system/test_system.py b/packages/google-cloud-datastore/tests/system/test_system.py index 89b0a395962b..a91b99ae89f9 100644 --- a/packages/google-cloud-datastore/tests/system/test_system.py +++ b/packages/google-cloud-datastore/tests/system/test_system.py @@ -72,9 +72,8 @@ def setUpModule(): def tearDownModule(): - keys = [entity.key for entity in Config.TO_DELETE] with Config.CLIENT.transaction(): - Config.CLIENT.delete_multi(keys) + Config.CLIENT.delete_multi(Config.TO_DELETE) class TestDatastore(unittest.TestCase): @@ -83,8 +82,7 @@ def setUp(self): def tearDown(self): with Config.CLIENT.transaction(): - keys = [entity.key for entity in self.case_entities_to_delete] - Config.CLIENT.delete_multi(keys) + Config.CLIENT.delete_multi(self.case_entities_to_delete) class TestDatastoreAllocateIDs(TestDatastore): diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 55a45c7f363a..3c75a5fba810 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -961,6 +961,24 @@ def test_delete_multi_w_existing_transaction(self): self.assertEqual(mutated_key, key._key) client._datastore_api_internal.commit.assert_not_called() + def test_delete_multi_w_existing_transaction_entity(self): + from google.cloud.datastore.entity import Entity + + creds = _make_credentials() + client = self._make_one(credentials=creds) + client._datastore_api_internal = _make_datastore_api() + + key = _Key() + entity = Entity(key=key) + + with _NoCommitTransaction(client) as CURR_XACT: + result = client.delete_multi([entity]) + + self.assertIsNone(result) + mutated_key = _mutated_pb(self, CURR_XACT.mutations, "delete") + self.assertEqual(mutated_key, key._key) + client._datastore_api_internal.commit.assert_not_called() + def test_allocate_ids_w_partial_key(self): num_ids = 2 From 1b7bb816475505b0aa2c6d43d0a767bb4782b3d2 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 4 Dec 2020 17:07:08 -0700 Subject: [PATCH 311/611] chore: release 2.1.0 (#130) * chore: release 2.1.0 * Update CHANGELOG.md Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Christopher Wilcox --- packages/google-cloud-datastore/CHANGELOG.md | 6 +++++- .../google/cloud/datastore/version.py | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index c7b7ad57f5f1..28948fd6eb45 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,11 +4,15 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history -### [2.0.1](https://www.github.com/googleapis/python-datastore/compare/v2.0.1...v2.0.1) (2020-12-02) +## [2.1.0](https://www.github.com/googleapis/python-datastore/compare/v2.0.1...v2.1.0) (2020-12-04) ### Features +* support autoconversion of Entity to Key for purposes of delete & delete_multi ([#123](https://www.github.com/googleapis/python-datastore/issues/123)) ([bf1dde6](https://www.github.com/googleapis/python-datastore/commit/bf1dde60b2f42e939c7dfa4a5228c3f41d565ece)) + +### Fix + * remove six dependency ([#120](https://www.github.com/googleapis/python-datastore/issues/120)) ([b1715e5](https://www.github.com/googleapis/python-datastore/commit/b1715e500f870fd5292bb84232b0039c2ac6be85)) ### [2.0.1](https://www.github.com/googleapis/python-datastore/compare/v2.0.0...v2.0.1) (2020-11-13) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 956a957b6500..8b5d3328c28b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.0.1" +__version__ = "2.1.0" From 31553e615a5b21f7c138414d843af6a7cc8f7f5f Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 9 Dec 2020 18:09:48 -0500 Subject: [PATCH 312/611] fix: make HTTPDatastoreAPI compatible w/ microgen Gapic API (#136) * tests: refactor to helpers for repeated tests * fix: update 'HTTPDatastoreAPI.allocate_ids' to follow request-only pattern Toward #124 * fix: add missing 'HTTPDatastoreAPI.reserve_ids' method Closes #134. * fix: update 'HTTPDatastoreAPI.rollback' to follow request-only pattern Toward #124 * fix: update 'HTTPDatastoreAPI.commit' to follow request-only pattern Toward #124 * fix: update 'HTTPDatastoreAPI.begin_transaction' to follow request-only pattern Toward #124 * fix: update 'HTTPDatastoreAPI.run_query' to follow request-only pattern Toward #124 * fix: update 'HTTPDatastoreAPI.lookup' to follow request-only pattern Toward #124 * fix: add 'retry' / 'timeout' args to HTTPDatastoreAPI methods Closes #124. * chore: lint * tests: run systests also w/ GRPC disabled. Closes #133. --- .../google/cloud/datastore/_http.py | 264 +++-- packages/google-cloud-datastore/noxfile.py | 13 +- .../tests/unit/test__http.py | 907 +++++++++--------- 3 files changed, 664 insertions(+), 520 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 8f2c9c581686..9e13567b509b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -31,7 +31,30 @@ """A template for the URL of a particular API call.""" -def _request(http, project, method, data, base_url, client_info): +def _make_retry_timeout_kwargs(retry, timeout): + """Helper for methods taking optional retry / timout args.""" + kwargs = {} + + if retry is not None: + kwargs["retry"] = retry + + if timeout is not None: + kwargs["timeout"] = timeout + + return kwargs + + +def _make_request_pb(request, request_pb_type): + """Helper for converting dicts to request messages.""" + if not isinstance(request, request_pb_type): + request = request_pb_type(**request) + + return request + + +def _request( + http, project, method, data, base_url, client_info, retry=None, timeout=None, +): """Make a request over the Http transport to the Cloud Datastore API. :type http: :class:`requests.Session` @@ -54,6 +77,12 @@ def _request(http, project, method, data, base_url, client_info): :type client_info: :class:`google.api_core.client_info.ClientInfo` :param client_info: used to generate user agent. + :type retry: :class:`google.api_core.retry.Retry` + :param retry: (Optional) retry policy for the request + + :type timeout: float or tuple(float, float) + :param timeout: (Optional) timeout for the request + :rtype: str :returns: The string response content from the API call. :raises: :class:`google.cloud.exceptions.GoogleCloudError` if the @@ -67,7 +96,17 @@ def _request(http, project, method, data, base_url, client_info): } api_url = build_api_url(project, method, base_url) - response = http.request(url=api_url, method="POST", headers=headers, data=data) + requester = http.request + + if retry is not None: + requester = retry(requester) + + if timeout is not None: + response = requester( + url=api_url, method="POST", headers=headers, data=data, timeout=timeout, + ) + else: + response = requester(url=api_url, method="POST", headers=headers, data=data) if response.status_code != 200: error_status = status_pb2.Status.FromString(response.content) @@ -78,7 +117,17 @@ def _request(http, project, method, data, base_url, client_info): return response.content -def _rpc(http, project, method, base_url, client_info, request_pb, response_pb_cls): +def _rpc( + http, + project, + method, + base_url, + client_info, + request_pb, + response_pb_cls, + retry=None, + timeout=None, +): """Make a protobuf RPC request. :type http: :class:`requests.Session` @@ -105,11 +154,20 @@ def _rpc(http, project, method, base_url, client_info, request_pb, response_pb_c :param response_pb_cls: The class used to unmarshall the response protobuf. + :type retry: :class:`google.api_core.retry.Retry` + :param retry: (Optional) retry policy for the request + + :type timeout: float or tuple(float, float) + :param timeout: (Optional) timeout for the request + :rtype: :class:`google.protobuf.message.Message` :returns: The RPC message parsed from the response. """ req_data = request_pb._pb.SerializeToString() - response = _request(http, project, method, req_data, base_url, client_info) + kwargs = _make_retry_timeout_kwargs(retry, timeout) + response = _request( + http, project, method, req_data, base_url, client_info, **kwargs + ) return response_pb_cls.deserialize(response) @@ -149,27 +207,25 @@ class HTTPDatastoreAPI(object): def __init__(self, client): self.client = client - def lookup(self, project_id, keys, read_options=None): + def lookup(self, request, retry=None, timeout=None): """Perform a ``lookup`` request. - :type project_id: str - :param project_id: The project to connect to. This is - usually your project name in the cloud console. + :type request: :class:`_datastore_pb2.LookupRequest` or dict + :param request: + Parameter bundle for API request. - :type keys: List[.entity_pb2.Key] - :param keys: The keys to retrieve from the datastore. + :type retry: :class:`google.api_core.retry.Retry` + :param retry: (Optional) retry policy for the request - :type read_options: :class:`.datastore_pb2.ReadOptions` - :param read_options: (Optional) The options for this lookup. Contains - either the transaction for the read or - ``STRONG`` or ``EVENTUAL`` read consistency. + :type timeout: float or tuple(float, float) + :param timeout: (Optional) timeout for the request :rtype: :class:`.datastore_pb2.LookupResponse` :returns: The returned protobuf response object. """ - request_pb = _datastore_pb2.LookupRequest( - project_id=project_id, read_options=read_options, keys=keys - ) + request_pb = _make_request_pb(request, _datastore_pb2.LookupRequest) + project_id = request_pb.project_id + return _rpc( self.client._http, project_id, @@ -178,44 +234,29 @@ def lookup(self, project_id, keys, read_options=None): self.client._client_info, request_pb, _datastore_pb2.LookupResponse, + retry=retry, + timeout=timeout, ) - def run_query( - self, project_id, partition_id, read_options=None, query=None, gql_query=None - ): + def run_query(self, request, retry=None, timeout=None): """Perform a ``runQuery`` request. - :type project_id: str - :param project_id: The project to connect to. This is - usually your project name in the cloud console. + :type request: :class:`_datastore_pb2.BeginTransactionRequest` or dict + :param request: + Parameter bundle for API request. - :type partition_id: :class:`.entity_pb2.PartitionId` - :param partition_id: Partition ID corresponding to an optional - namespace and project ID. + :type retry: :class:`google.api_core.retry.Retry` + :param retry: (Optional) retry policy for the request - :type read_options: :class:`.datastore_pb2.ReadOptions` - :param read_options: (Optional) The options for this query. Contains - either the transaction for the read or - ``STRONG`` or ``EVENTUAL`` read consistency. - - :type query: :class:`.query_pb2.Query` - :param query: (Optional) The query protobuf to run. At most one of - ``query`` and ``gql_query`` can be specified. - - :type gql_query: :class:`.query_pb2.GqlQuery` - :param gql_query: (Optional) The GQL query to run. At most one of - ``query`` and ``gql_query`` can be specified. + :type timeout: float or tuple(float, float) + :param timeout: (Optional) timeout for the request :rtype: :class:`.datastore_pb2.RunQueryResponse` :returns: The returned protobuf response object. """ - request_pb = _datastore_pb2.RunQueryRequest( - project_id=project_id, - partition_id=partition_id, - read_options=read_options, - query=query, - gql_query=gql_query, - ) + request_pb = _make_request_pb(request, _datastore_pb2.RunQueryRequest) + project_id = request_pb.project_id + return _rpc( self.client._http, project_id, @@ -224,22 +265,29 @@ def run_query( self.client._client_info, request_pb, _datastore_pb2.RunQueryResponse, + retry=retry, + timeout=timeout, ) - def begin_transaction(self, project_id, transaction_options=None): + def begin_transaction(self, request, retry=None, timeout=None): """Perform a ``beginTransaction`` request. - :type project_id: str - :param project_id: The project to connect to. This is - usually your project name in the cloud console. + :type request: :class:`_datastore_pb2.BeginTransactionRequest` or dict + :param request: + Parameter bundle for API request. + + :type retry: :class:`google.api_core.retry.Retry` + :param retry: (Optional) retry policy for the request - :type transaction_options: ~.datastore_v1.types.TransactionOptions - :param transaction_options: (Optional) Options for a new transaction. + :type timeout: float or tuple(float, float) + :param timeout: (Optional) timeout for the request :rtype: :class:`.datastore_pb2.BeginTransactionResponse` :returns: The returned protobuf response object. """ - request_pb = _datastore_pb2.BeginTransactionRequest() + request_pb = _make_request_pb(request, _datastore_pb2.BeginTransactionRequest) + project_id = request_pb.project_id + return _rpc( self.client._http, project_id, @@ -248,37 +296,29 @@ def begin_transaction(self, project_id, transaction_options=None): self.client._client_info, request_pb, _datastore_pb2.BeginTransactionResponse, + retry=retry, + timeout=timeout, ) - def commit(self, project_id, mode, mutations, transaction=None): + def commit(self, request, retry=None, timeout=None): """Perform a ``commit`` request. - :type project_id: str - :param project_id: The project to connect to. This is - usually your project name in the cloud console. + :type request: :class:`_datastore_pb2.CommitRequest` or dict + :param request: + Parameter bundle for API request. - :type mode: :class:`.gapic.datastore.v1.enums.CommitRequest.Mode` - :param mode: The type of commit to perform. Expected to be one of - ``TRANSACTIONAL`` or ``NON_TRANSACTIONAL``. + :type retry: :class:`google.api_core.retry.Retry` + :param retry: (Optional) retry policy for the request - :type mutations: list - :param mutations: List of :class:`.datastore_pb2.Mutation`, the - mutations to perform. - - :type transaction: bytes - :param transaction: (Optional) The transaction ID returned from - :meth:`begin_transaction`. Non-transactional - commits must pass :data:`None`. + :type timeout: float or tuple(float, float) + :param timeout: (Optional) timeout for the request :rtype: :class:`.datastore_pb2.CommitResponse` :returns: The returned protobuf response object. """ - request_pb = _datastore_pb2.CommitRequest( - project_id=project_id, - mode=mode, - transaction=transaction, - mutations=mutations, - ) + request_pb = _make_request_pb(request, _datastore_pb2.CommitRequest) + project_id = request_pb.project_id + return _rpc( self.client._http, project_id, @@ -287,25 +327,29 @@ def commit(self, project_id, mode, mutations, transaction=None): self.client._client_info, request_pb, _datastore_pb2.CommitResponse, + retry=retry, + timeout=timeout, ) - def rollback(self, project_id, transaction): + def rollback(self, request, retry=None, timeout=None): """Perform a ``rollback`` request. - :type project_id: str - :param project_id: The project to connect to. This is - usually your project name in the cloud console. + :type request: :class:`_datastore_pb2.RollbackRequest` or dict + :param request: + Parameter bundle for API request. - :type transaction: bytes - :param transaction: The transaction ID to rollback. + :type retry: :class:`google.api_core.retry.Retry` + :param retry: (Optional) retry policy for the request + + :type timeout: float or tuple(float, float) + :param timeout: (Optional) timeout for the request :rtype: :class:`.datastore_pb2.RollbackResponse` :returns: The returned protobuf response object. """ - request_pb = _datastore_pb2.RollbackRequest( - project_id=project_id, transaction=transaction - ) - # Response is empty (i.e. no fields) but we return it anyway. + request_pb = _make_request_pb(request, _datastore_pb2.RollbackRequest) + project_id = request_pb.project_id + return _rpc( self.client._http, project_id, @@ -314,22 +358,29 @@ def rollback(self, project_id, transaction): self.client._client_info, request_pb, _datastore_pb2.RollbackResponse, + retry=retry, + timeout=timeout, ) - def allocate_ids(self, project_id, keys): + def allocate_ids(self, request, retry=None, timeout=None): """Perform an ``allocateIds`` request. - :type project_id: str - :param project_id: The project to connect to. This is - usually your project name in the cloud console. + :type request: :class:`_datastore_pb2.AllocateIdsRequest` or dict + :param request: + Parameter bundle for API request. - :type keys: List[.entity_pb2.Key] - :param keys: The keys for which the backend should allocate IDs. + :type retry: :class:`google.api_core.retry.Retry` + :param retry: (Optional) retry policy for the request + + :type timeout: float or tuple(float, float) + :param timeout: (Optional) timeout for the request :rtype: :class:`.datastore_pb2.AllocateIdsResponse` :returns: The returned protobuf response object. """ - request_pb = _datastore_pb2.AllocateIdsRequest(keys=keys) + request_pb = _make_request_pb(request, _datastore_pb2.AllocateIdsRequest) + project_id = request_pb.project_id + return _rpc( self.client._http, project_id, @@ -338,4 +389,37 @@ def allocate_ids(self, project_id, keys): self.client._client_info, request_pb, _datastore_pb2.AllocateIdsResponse, + retry=retry, + timeout=timeout, + ) + + def reserve_ids(self, request, retry=None, timeout=None): + """Perform an ``reserveIds`` request. + + :type request: :class:`_datastore_pb2.ReserveIdsRequest` or dict + :param request: + Parameter bundle for API request. + + :type retry: :class:`google.api_core.retry.Retry` + :param retry: (Optional) retry policy for the request + + :type timeout: float or tuple(float, float) + :param timeout: (Optional) timeout for the request + + :rtype: :class:`.datastore_pb2.ReserveIdsResponse` + :returns: The returned protobuf response object. + """ + request_pb = _make_request_pb(request, _datastore_pb2.ReserveIdsRequest) + project_id = request_pb.project_id + + return _rpc( + self.client._http, + project_id, + "reserveIds", + self.client._base_url, + self.client._client_info, + request_pb, + _datastore_pb2.ReserveIdsResponse, + retry=retry, + timeout=timeout, ) diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 21cdf161e79a..a4bcedd771fb 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -98,7 +98,8 @@ def unit(session): @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def system(session): +@nox.parametrize("disable_grpc", [False, True]) +def system(session, disable_grpc): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -126,11 +127,17 @@ def system(session): ) session.install("-e", ".") + env = {} + if disable_grpc: + env["GOOGLE_CLOUD_DISABLE_GRPC"] = "True" + # Run py.test against the system tests. if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) + session.run("py.test", "--quiet", system_test_path, env=env, *session.posargs) if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + session.run( + "py.test", "--quiet", system_test_folder_path, env=env, *session.posargs + ) @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/packages/google-cloud-datastore/tests/unit/test__http.py b/packages/google-cloud-datastore/tests/unit/test__http.py index 6048d40be401..2e8da9e940e6 100644 --- a/packages/google-cloud-datastore/tests/unit/test__http.py +++ b/packages/google-cloud-datastore/tests/unit/test__http.py @@ -20,6 +20,82 @@ import requests +class Test__make_retry_timeout_kwargs(unittest.TestCase): + @staticmethod + def _call_fut(retry, timeout): + from google.cloud.datastore._http import _make_retry_timeout_kwargs + + return _make_retry_timeout_kwargs(retry, timeout) + + def test_empty(self): + expected = {} + self.assertEqual(self._call_fut(None, None), expected) + + def test_w_retry(self): + retry = object() + expected = {"retry": retry} + self.assertEqual(self._call_fut(retry, None), expected) + + def test_w_timeout(self): + timeout = 5.0 + expected = {"timeout": timeout} + self.assertEqual(self._call_fut(None, timeout), expected) + + def test_w_retry_w_timeout(self): + retry = object() + timeout = 5.0 + expected = {"retry": retry, "timeout": timeout} + self.assertEqual(self._call_fut(retry, timeout), expected) + + +class Foo: + def __init__(self, bar=None, baz=None): + self.bar = bar + self.baz = baz + + +class Test__make_request_pb(unittest.TestCase): + @staticmethod + def _call_fut(request, request_pb_type): + from google.cloud.datastore._http import _make_request_pb + + return _make_request_pb(request, request_pb_type) + + def test_w_empty_dict(self): + request = {} + + foo = self._call_fut(request, Foo) + + self.assertIsInstance(foo, Foo) + self.assertIsNone(foo.bar) + self.assertIsNone(foo.baz) + + def test_w_partial_dict(self): + request = {"bar": "Bar"} + + foo = self._call_fut(request, Foo) + + self.assertIsInstance(foo, Foo) + self.assertEqual(foo.bar, "Bar") + self.assertIsNone(foo.baz) + + def test_w_complete_dict(self): + request = {"bar": "Bar", "baz": "Baz"} + + foo = self._call_fut(request, Foo) + + self.assertIsInstance(foo, Foo) + self.assertEqual(foo.bar, "Bar") + self.assertEqual(foo.baz, "Baz") + + def test_w_instance(self): + passed = Foo() + + foo = self._call_fut(passed, Foo) + + self.assertIs(foo, passed) + + class Test__request(unittest.TestCase): @staticmethod def _call_fut(*args, **kwargs): @@ -27,7 +103,7 @@ def _call_fut(*args, **kwargs): return _request(*args, **kwargs) - def test_success(self): + def _helper(self, retry=None, timeout=None): from google.cloud import _http as connection_module project = "PROJECT" @@ -40,8 +116,11 @@ def test_success(self): http = _make_requests_session([_make_response(content=response_data)]) - # Call actual function under test. - response = self._call_fut(http, project, method, data, base_url, client_info) + kwargs = _make_retry_timeout_kwargs(retry, timeout, http) + + response = self._call_fut( + http, project, method, data, base_url, client_info, **kwargs + ) self.assertEqual(response, response_data) # Check that the mocks were called as expected. @@ -51,10 +130,30 @@ def test_success(self): "User-Agent": user_agent, connection_module.CLIENT_INFO_HEADER: user_agent, } + + if retry is not None: + retry.assert_called_once_with(http.request) + + kwargs.pop("retry", None) http.request.assert_called_once_with( - method="POST", url=expected_url, headers=expected_headers, data=data + method="POST", + url=expected_url, + headers=expected_headers, + data=data, + **kwargs ) + def test_ok(self): + self._helper() + + def test_w_retry(self): + retry = mock.MagicMock() + self._helper(retry=retry) + + def test_w_timeout(self): + timeout = 5.0 + self._helper(timeout=timeout) + def test_failure(self): from google.cloud.exceptions import BadRequest from google.rpc import code_pb2 @@ -89,7 +188,7 @@ def _call_fut(*args, **kwargs): return _rpc(*args, **kwargs) - def test_it(self): + def _helper(self, retry=None, timeout=None): from google.cloud.datastore_v1.types import datastore as datastore_pb2 http = object() @@ -100,6 +199,9 @@ def test_it(self): request_pb = datastore_pb2.BeginTransactionRequest(project_id=project) response_pb = datastore_pb2.BeginTransactionResponse(transaction=b"7830rmc") + + kwargs = _make_retry_timeout_kwargs(retry, timeout) + patch = mock.patch( "google.cloud.datastore._http._request", return_value=response_pb._pb.SerializeToString(), @@ -113,17 +215,31 @@ def test_it(self): client_info, request_pb, datastore_pb2.BeginTransactionResponse, + **kwargs ) - self.assertEqual(result, response_pb._pb) - mock_request.assert_called_once_with( - http, - project, - method, - request_pb._pb.SerializeToString(), - base_url, - client_info, - ) + self.assertEqual(result, response_pb._pb) + + mock_request.assert_called_once_with( + http, + project, + method, + request_pb._pb.SerializeToString(), + base_url, + client_info, + **kwargs + ) + + def test_defaults(self): + self._helper() + + def test_w_retry(self): + retry = mock.MagicMock() + self._helper(retry=retry) + + def test_w_timeout(self): + timeout = 5.0 + self._helper(timeout=timeout) class TestHTTPDatastoreAPI(unittest.TestCase): @@ -147,52 +263,35 @@ def test_constructor(self): ds_api = self._make_one(client) self.assertIs(ds_api.client, client) - def test_lookup_single_key_empty_response(self): + def _lookup_single_helper( + self, + read_consistency=None, + transaction=None, + empty=True, + retry=None, + timeout=None, + ): from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 project = "PROJECT" key_pb = _make_key_pb(project) - rsp_pb = datastore_pb2.LookupResponse() - read_options = datastore_pb2.ReadOptions() - - # Create mock HTTP and client with response. - http = _make_requests_session( - [_make_response(content=rsp_pb._pb.SerializeToString())] - ) - client_info = _make_client_info() - client = mock.Mock( - _http=http, - _base_url="test.invalid", - _client_info=client_info, - spec=["_http", "_base_url", "_client_info"], - ) - # Make request. - ds_api = self._make_one(client) - response = ds_api.lookup(project, [key_pb], read_options=read_options) + options_kw = {} + if read_consistency is not None: + options_kw["read_consistency"] = read_consistency + if transaction is not None: + options_kw["transaction"] = transaction - # Check the result and verify the callers. - self.assertEqual(response, rsp_pb._pb) - uri = _build_expected_url(client._base_url, project, "lookup") - self.assertEqual(len(response.found), 0) - self.assertEqual(len(response.missing), 0) - self.assertEqual(len(response.deferred), 0) + read_options = datastore_pb2.ReadOptions(**options_kw) - request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) - self.assertEqual(list(request.keys), [key_pb._pb]) - self.assertEqual(request.read_options, read_options._pb) - - def test_lookup_single_key_empty_response_w_eventual(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - - project = "PROJECT" - key_pb = _make_key_pb(project) rsp_pb = datastore_pb2.LookupResponse() - read_options = datastore_pb2.ReadOptions( - read_consistency=datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL - ) - # Create mock HTTP and client with response. + if not empty: + entity = entity_pb2.Entity() + entity.key._pb.CopyFrom(key_pb._pb) + rsp_pb._pb.found.add(entity=entity._pb) + http = _make_requests_session( [_make_response(content=rsp_pb._pb.SerializeToString())] ) @@ -203,110 +302,97 @@ def test_lookup_single_key_empty_response_w_eventual(self): _client_info=client_info, spec=["_http", "_base_url", "_client_info"], ) - - # Make request. ds_api = self._make_one(client) - response = ds_api.lookup(project, [key_pb], read_options=read_options) + request = { + "project_id": project, + "keys": [key_pb], + "read_options": read_options, + } + kwargs = _make_retry_timeout_kwargs(retry, timeout, http) - # Check the result and verify the callers. - self.assertEqual(response, rsp_pb._pb) - uri = _build_expected_url(client._base_url, project, "lookup") - self.assertEqual(len(response.found), 0) - self.assertEqual(len(response.missing), 0) - self.assertEqual(len(response.deferred), 0) + response = ds_api.lookup(request=request, **kwargs) - request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) - self.assertEqual(list(request.keys), [key_pb._pb]) - self.assertEqual(request.read_options, read_options._pb) + self.assertEqual(response, rsp_pb._pb) - def test_lookup_single_key_empty_response_w_transaction(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 + if empty: + self.assertEqual(len(response.found), 0) + else: + self.assertEqual(len(response.found), 1) - project = "PROJECT" - transaction = b"TRANSACTION" - key_pb = _make_key_pb(project) - rsp_pb = datastore_pb2.LookupResponse() - read_options = datastore_pb2.ReadOptions(transaction=transaction) + self.assertEqual(len(response.missing), 0) + self.assertEqual(len(response.deferred), 0) - # Create mock HTTP and client with response. - http = _make_requests_session( - [_make_response(content=rsp_pb._pb.SerializeToString())] - ) - client_info = _make_client_info() - client = mock.Mock( - _http=http, - _base_url="test.invalid", - _client_info=client_info, - spec=["_http", "_base_url", "_client_info"], + uri = _build_expected_url(client._base_url, project, "lookup") + request = _verify_protobuf_call( + http, uri, datastore_pb2.LookupRequest(), retry=retry, timeout=timeout, ) - # Make request. - ds_api = self._make_one(client) - response = ds_api.lookup(project, [key_pb], read_options=read_options) - - # Check the result and verify the callers. - self.assertEqual(response, rsp_pb._pb) - uri = _build_expected_url(client._base_url, project, "lookup") - self.assertEqual(len(response.found), 0) - self.assertEqual(len(response.missing), 0) - self.assertEqual(len(response.deferred), 0) + if retry is not None: + retry.assert_called_once_with(http.request) - request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) self.assertEqual(list(request.keys), [key_pb._pb]) self.assertEqual(request.read_options, read_options._pb) - def test_lookup_single_key_nonempty_response(self): + def test_lookup_single_key_miss(self): + self._lookup_single_helper() + + def test_lookup_single_key_miss_w_read_consistency(self): from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore_v1.types import entity as entity_pb2 - project = "PROJECT" - key_pb = _make_key_pb(project) - rsp_pb = datastore_pb2.LookupResponse() - entity = entity_pb2.Entity() - entity.key._pb.CopyFrom(key_pb._pb) - rsp_pb._pb.found.add(entity=entity._pb) - read_options = datastore_pb2.ReadOptions() + read_consistency = datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL + self._lookup_single_helper(read_consistency=read_consistency) - # Create mock HTTP and client with response. - http = _make_requests_session( - [_make_response(content=rsp_pb._pb.SerializeToString())] - ) - client_info = _make_client_info() - client = mock.Mock( - _http=http, - _base_url="test.invalid", - _client_info=client_info, - spec=["_http", "_base_url", "_client_info"], - ) + def test_lookup_single_key_miss_w_transaction(self): + transaction = b"TRANSACTION" + self._lookup_single_helper(transaction=transaction) - # Make request. - ds_api = self._make_one(client) - response = ds_api.lookup(project, [key_pb], read_options=read_options) + def test_lookup_single_key_hit(self): + self._lookup_single_helper(empty=False) - # Check the result and verify the callers. - self.assertEqual(response, rsp_pb._pb) - uri = _build_expected_url(client._base_url, project, "lookup") - self.assertEqual(len(response.found), 1) - self.assertEqual(len(response.missing), 0) - self.assertEqual(len(response.deferred), 0) - found = response.found[0].entity - self.assertEqual(found.key.path[0].kind, "Kind") - self.assertEqual(found.key.path[0].id, 1234) + def test_lookup_single_key_hit_w_retry(self): + retry = mock.MagicMock() + self._lookup_single_helper(empty=False, retry=retry) - request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) - self.assertEqual(list(request.keys), [key_pb._pb]) - self.assertEqual(request.read_options, read_options._pb) + def test_lookup_single_key_hit_w_timeout(self): + timeout = 5.0 + self._lookup_single_helper(empty=False, timeout=timeout) - def test_lookup_multiple_keys_empty_response(self): + def _lookup_multiple_helper( + self, found=0, missing=0, deferred=0, retry=None, timeout=None, + ): from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 project = "PROJECT" key_pb1 = _make_key_pb(project) key_pb2 = _make_key_pb(project, id_=2345) - rsp_pb = datastore_pb2.LookupResponse() + keys = [key_pb1, key_pb2] read_options = datastore_pb2.ReadOptions() - # Create mock HTTP and client with response. + rsp_pb = datastore_pb2.LookupResponse() + + found_keys = [] + for i_found in range(found): + key = keys[i_found] + found_keys.append(key._pb) + entity = entity_pb2.Entity() + entity.key._pb.CopyFrom(key._pb) + rsp_pb._pb.found.add(entity=entity._pb) + + missing_keys = [] + for i_missing in range(missing): + key = keys[i_missing] + missing_keys.append(key._pb) + entity = entity_pb2.Entity() + entity.key._pb.CopyFrom(key._pb) + rsp_pb._pb.missing.add(entity=entity._pb) + + deferred_keys = [] + for i_deferred in range(deferred): + key = keys[i_deferred] + deferred_keys.append(key._pb) + rsp_pb._pb.deferred.append(key._pb) + http = _make_requests_session( [_make_response(content=rsp_pb._pb.SerializeToString())] ) @@ -317,168 +403,95 @@ def test_lookup_multiple_keys_empty_response(self): _client_info=client_info, spec=["_http", "_base_url", "_client_info"], ) - - # Make request. ds_api = self._make_one(client) - response = ds_api.lookup(project, [key_pb1, key_pb2], read_options=read_options) - - # Check the result and verify the callers. - self.assertEqual(response, rsp_pb._pb) - uri = _build_expected_url(client._base_url, project, "lookup") - self.assertEqual(len(response.found), 0) - self.assertEqual(len(response.missing), 0) - self.assertEqual(len(response.deferred), 0) + request = { + "project_id": project, + "keys": keys, + "read_options": read_options, + } + kwargs = _make_retry_timeout_kwargs(retry, timeout, http) - request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) - self.assertEqual(list(request.keys), [key_pb1._pb, key_pb2._pb]) - self.assertEqual(request.read_options, read_options._pb) + response = ds_api.lookup(request=request, **kwargs) - def test_lookup_multiple_keys_w_missing(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - - project = "PROJECT" - key_pb1 = _make_key_pb(project) - key_pb2 = _make_key_pb(project, id_=2345) - rsp_pb = datastore_pb2.LookupResponse() - er_1 = rsp_pb._pb.missing.add() - er_1.entity.key.CopyFrom(key_pb1._pb) - er_2 = rsp_pb._pb.missing.add() - er_2.entity.key.CopyFrom(key_pb2._pb) - read_options = datastore_pb2.ReadOptions() + self.assertEqual(response, rsp_pb._pb) - # Create mock HTTP and client with response. - http = _make_requests_session( - [_make_response(content=rsp_pb._pb.SerializeToString())] - ) - client_info = _make_client_info() - client = mock.Mock( - _http=http, - _base_url="test.invalid", - _client_info=client_info, - spec=["_http", "_base_url", "_client_info"], + self.assertEqual([found.entity.key for found in response.found], found_keys) + self.assertEqual( + [missing.entity.key for missing in response.missing], missing_keys ) + self.assertEqual(list(response.deferred), deferred_keys) - # Make request. - ds_api = self._make_one(client) - response = ds_api.lookup(project, [key_pb1, key_pb2], read_options=read_options) - - # Check the result and verify the callers. - self.assertEqual(response, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "lookup") - self.assertEqual(len(response.found), 0) - self.assertEqual(len(response.deferred), 0) - missing_keys = [result.entity.key for result in response.missing] - self.assertEqual(missing_keys, [key_pb1._pb, key_pb2._pb]) - - request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) + request = _verify_protobuf_call( + http, uri, datastore_pb2.LookupRequest(), retry=retry, timeout=timeout, + ) self.assertEqual(list(request.keys), [key_pb1._pb, key_pb2._pb]) self.assertEqual(request.read_options, read_options._pb) - def test_lookup_multiple_keys_w_deferred(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - - project = "PROJECT" - key_pb1 = _make_key_pb(project) - key_pb2 = _make_key_pb(project, id_=2345) - rsp_pb = datastore_pb2.LookupResponse() - rsp_pb._pb.deferred.add().CopyFrom(key_pb1._pb) - rsp_pb._pb.deferred.add().CopyFrom(key_pb2._pb) - read_options = datastore_pb2.ReadOptions() + def test_lookup_multiple_keys_w_empty_response(self): + self._lookup_multiple_helper() - # Create mock HTTP and client with response. - http = _make_requests_session( - [_make_response(content=rsp_pb._pb.SerializeToString())] - ) - client_info = _make_client_info() - client = mock.Mock( - _http=http, - _base_url="test.invalid", - _client_info=client_info, - spec=["_http", "_base_url", "_client_info"], - ) + def test_lookup_multiple_keys_w_retry(self): + retry = mock.MagicMock() + self._lookup_multiple_helper(retry=retry) - # Make request. - ds_api = self._make_one(client) - response = ds_api.lookup(project, [key_pb1, key_pb2], read_options=read_options) + def test_lookup_multiple_keys_w_timeout(self): + timeout = 5.0 + self._lookup_multiple_helper(timeout=timeout) - # Check the result and verify the callers. - self.assertEqual(response, rsp_pb._pb) - uri = _build_expected_url(client._base_url, project, "lookup") - self.assertEqual(len(response.found), 0) - self.assertEqual(len(response.missing), 0) - self.assertEqual(list(response.deferred), [key_pb1._pb, key_pb2._pb]) + def test_lookup_multiple_keys_w_found(self): + self._lookup_multiple_helper(found=2) - request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest()) - self.assertEqual(list(request.keys), [key_pb1._pb, key_pb2._pb]) - self.assertEqual(request.read_options, read_options._pb) + def test_lookup_multiple_keys_w_missing(self): + self._lookup_multiple_helper(missing=2) - def test_run_query_w_eventual_no_transaction(self): + def test_lookup_multiple_keys_w_deferred(self): + self._lookup_multiple_helper(deferred=2) + + def _run_query_helper( + self, + read_consistency=None, + transaction=None, + namespace=None, + found=0, + retry=None, + timeout=None, + ): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore_v1.types import query as query_pb2 project = "PROJECT" kind = "Nonesuch" - cursor = b"\x00" query_pb = self._make_query_pb(kind) - partition_id = entity_pb2.PartitionId(project_id=project) - read_options = datastore_pb2.ReadOptions( - read_consistency=datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL - ) - rsp_pb = datastore_pb2.RunQueryResponse( - batch=query_pb2.QueryResultBatch( - entity_result_type=query_pb2.EntityResult.ResultType.FULL, - end_cursor=cursor, - more_results=query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS, - ) - ) - # Create mock HTTP and client with response. - http = _make_requests_session( - [_make_response(content=rsp_pb._pb.SerializeToString())] - ) - client_info = _make_client_info() - client = mock.Mock( - _http=http, - _base_url="test.invalid", - _client_info=client_info, - spec=["_http", "_base_url", "_client_info"], - ) - - # Make request. - ds_api = self._make_one(client) - response = ds_api.run_query(project, partition_id, read_options, query=query_pb) + partition_kw = {"project_id": project} + if namespace is not None: + partition_kw["namespace_id"] = namespace - # Check the result and verify the callers. - self.assertEqual(response, rsp_pb._pb) - - uri = _build_expected_url(client._base_url, project, "runQuery") - request = _verify_protobuf_call(http, uri, datastore_pb2.RunQueryRequest()) - self.assertEqual(request.partition_id, partition_id._pb) - self.assertEqual(request.query, query_pb._pb) - self.assertEqual(request.read_options, read_options._pb) + partition_id = entity_pb2.PartitionId(**partition_kw) - def test_run_query_wo_eventual_w_transaction(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore_v1.types import query as query_pb2 + options_kw = {} + if read_consistency is not None: + options_kw["read_consistency"] = read_consistency + if transaction is not None: + options_kw["transaction"] = transaction + read_options = datastore_pb2.ReadOptions(**options_kw) - project = "PROJECT" - kind = "Nonesuch" cursor = b"\x00" - transaction = b"TRANSACTION" - query_pb = self._make_query_pb(kind) - partition_id = entity_pb2.PartitionId(project_id=project) - read_options = datastore_pb2.ReadOptions(transaction=transaction) + batch_kw = { + "entity_result_type": query_pb2.EntityResult.ResultType.FULL, + "end_cursor": cursor, + "more_results": query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS, + } + if found: + batch_kw["entity_results"] = [ + query_pb2.EntityResult(entity=entity_pb2.Entity()) + ] * found rsp_pb = datastore_pb2.RunQueryResponse( - batch=query_pb2.QueryResultBatch( - entity_result_type=query_pb2.EntityResult.ResultType.FULL, - end_cursor=cursor, - more_results=query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS, - ) + batch=query_pb2.QueryResultBatch(**batch_kw) ) - # Create mock HTTP and client with response. http = _make_requests_session( [_make_response(content=rsp_pb._pb.SerializeToString())] ) @@ -489,110 +502,53 @@ def test_run_query_wo_eventual_w_transaction(self): _client_info=client_info, spec=["_http", "_base_url", "_client_info"], ) - - # Make request. ds_api = self._make_one(client) - response = ds_api.run_query(project, partition_id, read_options, query=query_pb) + request = { + "project_id": project, + "partition_id": partition_id, + "read_options": read_options, + "query": query_pb, + } + kwargs = _make_retry_timeout_kwargs(retry, timeout, http) + + response = ds_api.run_query(request=request, **kwargs) - # Check the result and verify the callers. self.assertEqual(response, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "runQuery") - request = _verify_protobuf_call(http, uri, datastore_pb2.RunQueryRequest()) + request = _verify_protobuf_call( + http, uri, datastore_pb2.RunQueryRequest(), retry=retry, timeout=timeout, + ) self.assertEqual(request.partition_id, partition_id._pb) self.assertEqual(request.query, query_pb._pb) self.assertEqual(request.read_options, read_options._pb) - def test_run_query_wo_namespace_empty_result(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore_v1.types import query as query_pb2 + def test_run_query_simple(self): + self._run_query_helper() - project = "PROJECT" - kind = "Nonesuch" - cursor = b"\x00" - query_pb = self._make_query_pb(kind) - partition_id = entity_pb2.PartitionId(project_id=project) - read_options = datastore_pb2.ReadOptions() - rsp_pb = datastore_pb2.RunQueryResponse( - batch=query_pb2.QueryResultBatch( - entity_result_type=query_pb2.EntityResult.ResultType.FULL, - end_cursor=cursor, - more_results=query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS, - ) - ) + def test_run_query_w_retry(self): + retry = mock.MagicMock() + self._run_query_helper(retry=retry) - # Create mock HTTP and client with response. - http = _make_requests_session( - [_make_response(content=rsp_pb._pb.SerializeToString())] - ) - client_info = _make_client_info() - client = mock.Mock( - _http=http, - _base_url="test.invalid", - _client_info=client_info, - spec=["_http", "_base_url", "_client_info"], - ) + def test_run_query_w_timeout(self): + timeout = 5.0 + self._run_query_helper(timeout=timeout) - # Make request. - ds_api = self._make_one(client) - response = ds_api.run_query(project, partition_id, read_options, query=query_pb) + def test_run_query_w_read_consistency(self): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 - # Check the result and verify the callers. - self.assertEqual(response, rsp_pb._pb) + read_consistency = datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL + self._run_query_helper(read_consistency=read_consistency) - uri = _build_expected_url(client._base_url, project, "runQuery") - request = _verify_protobuf_call(http, uri, datastore_pb2.RunQueryRequest()) - self.assertEqual(request.partition_id, partition_id._pb) - self.assertEqual(request.query, query_pb._pb) - self.assertEqual(request.read_options, read_options._pb) + def test_run_query_w_transaction(self): + transaction = b"TRANSACTION" + self._run_query_helper(transaction=transaction) def test_run_query_w_namespace_nonempty_result(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore_v1.types import query as query_pb2 - - project = "PROJECT" - kind = "Kind" namespace = "NS" - query_pb = self._make_query_pb(kind) - partition_id = entity_pb2.PartitionId( - project_id=project, namespace_id=namespace - ) - read_options = datastore_pb2.ReadOptions() - rsp_pb = datastore_pb2.RunQueryResponse( - batch=query_pb2.QueryResultBatch( - entity_result_type=query_pb2.EntityResult.ResultType.FULL, - entity_results=[query_pb2.EntityResult(entity=entity_pb2.Entity())], - more_results=query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS, - ) - ) - - # Create mock HTTP and client with response. - http = _make_requests_session( - [_make_response(content=rsp_pb._pb.SerializeToString())] - ) - client_info = _make_client_info() - client = mock.Mock( - _http=http, - _base_url="test.invalid", - _client_info=client_info, - spec=["_http", "_base_url", "_client_info"], - ) - - # Make request. - ds_api = self._make_one(client) - response = ds_api.run_query(project, partition_id, read_options, query=query_pb) - - # Check the result and verify the callers. - self.assertEqual(response, rsp_pb._pb) - - uri = _build_expected_url(client._base_url, project, "runQuery") - request = _verify_protobuf_call(http, uri, datastore_pb2.RunQueryRequest()) - self.assertEqual(request.partition_id, partition_id._pb) - self.assertEqual(request.query, query_pb._pb) + self._run_query_helper(namespace=namespace, found=1) - def test_begin_transaction(self): + def _begin_transaction_helper(self, options=None, retry=None, timeout=None): from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" @@ -614,19 +570,46 @@ def test_begin_transaction(self): # Make request. ds_api = self._make_one(client) - response = ds_api.begin_transaction(project) + request = {"project_id": project} + + if options is not None: + request["transaction_options"] = options + + kwargs = _make_retry_timeout_kwargs(retry, timeout, http) + + response = ds_api.begin_transaction(request=request, **kwargs) # Check the result and verify the callers. self.assertEqual(response, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "beginTransaction") request = _verify_protobuf_call( - http, uri, datastore_pb2.BeginTransactionRequest() + http, + uri, + datastore_pb2.BeginTransactionRequest(), + retry=retry, + timeout=timeout, ) - # The RPC-over-HTTP request does not set the project in the request. - self.assertEqual(request.project_id, u"") - def test_commit_wo_transaction(self): + def test_begin_transaction_wo_options(self): + self._begin_transaction_helper() + + def test_begin_transaction_w_options(self): + from google.cloud.datastore_v1.types import TransactionOptions + + read_only = TransactionOptions.ReadOnly._meta.pb() + options = TransactionOptions(read_only=read_only) + self._begin_transaction_helper(options=options) + + def test_begin_transaction_w_retry(self): + retry = mock.MagicMock() + self._begin_transaction_helper(retry=retry) + + def test_begin_transaction_w_timeout(self): + timeout = 5.0 + self._begin_transaction_helper(timeout=timeout) + + def _commit_helper(self, transaction=None, retry=None, timeout=None): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb @@ -640,7 +623,6 @@ def test_commit_wo_transaction(self): value_pb = _new_value_pb(insert, "foo") value_pb.string_value = u"Foo" - # Create mock HTTP and client with response. http = _make_requests_session( [_make_response(content=rsp_pb._pb.SerializeToString())] ) @@ -652,63 +634,52 @@ def test_commit_wo_transaction(self): spec=["_http", "_base_url", "_client_info"], ) - # Make request. rq_class = datastore_pb2.CommitRequest ds_api = self._make_one(client) - mode = rq_class.Mode.NON_TRANSACTIONAL - result = ds_api.commit(project, mode, [mutation]) - # Check the result and verify the callers. + request = {"project_id": project, "mutations": [mutation]} + + if transaction is not None: + request["transaction"] = transaction + mode = request["mode"] = rq_class.Mode.TRANSACTIONAL + else: + mode = request["mode"] = rq_class.Mode.NON_TRANSACTIONAL + + kwargs = _make_retry_timeout_kwargs(retry, timeout, http) + + result = ds_api.commit(request=request, **kwargs) + self.assertEqual(result, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "commit") - request = _verify_protobuf_call(http, uri, rq_class()) - self.assertEqual(request.transaction, b"") + request = _verify_protobuf_call( + http, uri, rq_class(), retry=retry, timeout=timeout, + ) self.assertEqual(list(request.mutations), [mutation]) - self.assertEqual(request.mode, rq_class.Mode.NON_TRANSACTIONAL) + self.assertEqual(request.mode, mode) - def test_commit_w_transaction(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore.helpers import _new_value_pb + if transaction is not None: + self.assertEqual(request.transaction, transaction) + else: + self.assertEqual(request.transaction, b"") - project = "PROJECT" - key_pb = _make_key_pb(project) - rsp_pb = datastore_pb2.CommitResponse() - req_pb = datastore_pb2.CommitRequest() - mutation = req_pb._pb.mutations.add() - insert = mutation.upsert - insert.key.CopyFrom(key_pb._pb) - value_pb = _new_value_pb(insert, "foo") - value_pb.string_value = u"Foo" + def test_commit_wo_transaction(self): + self._commit_helper() - # Create mock HTTP and client with response. - http = _make_requests_session( - [_make_response(content=rsp_pb._pb.SerializeToString())] - ) - client_info = _make_client_info() - client = mock.Mock( - _http=http, - _base_url="test.invalid", - _client_info=client_info, - spec=["_http", "_base_url", "_client_info"], - ) + def test_commit_w_transaction(self): + transaction = b"xact" - # Make request. - rq_class = datastore_pb2.CommitRequest - ds_api = self._make_one(client) - mode = rq_class.Mode.TRANSACTIONAL - result = ds_api.commit(project, mode, [mutation], transaction=b"xact") + self._commit_helper(transaction=transaction) - # Check the result and verify the callers. - self.assertEqual(result, rsp_pb._pb) + def test_commit_w_retry(self): + retry = mock.MagicMock() + self._commit_helper(retry=retry) - uri = _build_expected_url(client._base_url, project, "commit") - request = _verify_protobuf_call(http, uri, rq_class()) - self.assertEqual(request.transaction, b"xact") - self.assertEqual(list(request.mutations), [mutation]) - self.assertEqual(request.mode, rq_class.Mode.TRANSACTIONAL) + def test_commit_w_timeout(self): + timeout = 5.0 + self._commit_helper(timeout=timeout) - def test_rollback_ok(self): + def _rollback_helper(self, retry=None, timeout=None): from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" @@ -729,22 +700,46 @@ def test_rollback_ok(self): # Make request. ds_api = self._make_one(client) - response = ds_api.rollback(project, transaction) + request = {"project_id": project, "transaction": transaction} + kwargs = _make_retry_timeout_kwargs(retry, timeout, http) + + response = ds_api.rollback(request=request, **kwargs) # Check the result and verify the callers. self.assertEqual(response, rsp_pb._pb) uri = _build_expected_url(client._base_url, project, "rollback") - request = _verify_protobuf_call(http, uri, datastore_pb2.RollbackRequest()) + request = _verify_protobuf_call( + http, uri, datastore_pb2.RollbackRequest(), retry=retry, timeout=timeout, + ) self.assertEqual(request.transaction, transaction) - def test_allocate_ids_empty(self): + def test_rollback_ok(self): + self._rollback_helper() + + def test_rollback_w_retry(self): + retry = mock.MagicMock() + self._rollback_helper(retry=retry) + + def test_rollback_w_timeout(self): + timeout = 5.0 + self._rollback_helper(timeout=timeout) + + def _allocate_ids_helper(self, count=0, retry=None, timeout=None): from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" + before_key_pbs = [] + after_key_pbs = [] rsp_pb = datastore_pb2.AllocateIdsResponse() - # Create mock HTTP and client with response. + for i_count in range(count): + requested = _make_key_pb(project, id_=None) + before_key_pbs.append(requested) + allocated = _make_key_pb(project, id_=i_count) + after_key_pbs.append(allocated) + rsp_pb._pb.keys.add().CopyFrom(allocated._pb) + http = _make_requests_session( [_make_response(content=rsp_pb._pb.SerializeToString())] ) @@ -755,33 +750,49 @@ def test_allocate_ids_empty(self): _client_info=client_info, spec=["_http", "_base_url", "_client_info"], ) - - # Make request. ds_api = self._make_one(client) - response = ds_api.allocate_ids(project, []) - # Check the result and verify the callers. + request = {"project_id": project, "keys": before_key_pbs} + kwargs = _make_retry_timeout_kwargs(retry, timeout, http) + + response = ds_api.allocate_ids(request=request, **kwargs) + self.assertEqual(response, rsp_pb._pb) - self.assertEqual(list(response.keys), []) + self.assertEqual(list(response.keys), [i._pb for i in after_key_pbs]) uri = _build_expected_url(client._base_url, project, "allocateIds") - request = _verify_protobuf_call(http, uri, datastore_pb2.AllocateIdsRequest()) - self.assertEqual(list(request.keys), []) + request = _verify_protobuf_call( + http, uri, datastore_pb2.AllocateIdsRequest(), retry=retry, timeout=timeout, + ) + self.assertEqual(len(request.keys), len(before_key_pbs)) + for key_before, key_after in zip(before_key_pbs, request.keys): + self.assertEqual(key_before, key_after) + + def test_allocate_ids_empty(self): + self._allocate_ids_helper() def test_allocate_ids_non_empty(self): + self._allocate_ids_helper(count=2) + + def test_allocate_ids_w_retry(self): + retry = mock.MagicMock() + self._allocate_ids_helper(retry=retry) + + def test_allocate_ids_w_timeout(self): + timeout = 5.0 + self._allocate_ids_helper(timeout=timeout) + + def _reserve_ids_helper(self, count=0, retry=None, timeout=None): from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" - before_key_pbs = [ - _make_key_pb(project, id_=None), - _make_key_pb(project, id_=None), - ] - after_key_pbs = [_make_key_pb(project), _make_key_pb(project, id_=2345)] - rsp_pb = datastore_pb2.AllocateIdsResponse() - rsp_pb._pb.keys.add().CopyFrom(after_key_pbs[0]._pb) - rsp_pb._pb.keys.add().CopyFrom(after_key_pbs[1]._pb) + before_key_pbs = [] + rsp_pb = datastore_pb2.ReserveIdsResponse() + + for i_count in range(count): + requested = _make_key_pb(project, id_=i_count) + before_key_pbs.append(requested) - # Create mock HTTP and client with response. http = _make_requests_session( [_make_response(content=rsp_pb._pb.SerializeToString())] ) @@ -792,21 +803,37 @@ def test_allocate_ids_non_empty(self): _client_info=client_info, spec=["_http", "_base_url", "_client_info"], ) - - # Make request. ds_api = self._make_one(client) - response = ds_api.allocate_ids(project, before_key_pbs) - # Check the result and verify the callers. - self.assertEqual(list(response.keys), [i._pb for i in after_key_pbs]) + request = {"project_id": project, "keys": before_key_pbs} + kwargs = _make_retry_timeout_kwargs(retry, timeout, http) + + response = ds_api.reserve_ids(request=request, **kwargs) + self.assertEqual(response, rsp_pb._pb) - uri = _build_expected_url(client._base_url, project, "allocateIds") - request = _verify_protobuf_call(http, uri, datastore_pb2.AllocateIdsRequest()) + uri = _build_expected_url(client._base_url, project, "reserveIds") + request = _verify_protobuf_call( + http, uri, datastore_pb2.AllocateIdsRequest(), retry=retry, timeout=timeout, + ) self.assertEqual(len(request.keys), len(before_key_pbs)) for key_before, key_after in zip(before_key_pbs, request.keys): self.assertEqual(key_before, key_after) + def test_reserve_ids_empty(self): + self._reserve_ids_helper() + + def test_reserve_ids_non_empty(self): + self._reserve_ids_helper(count=2) + + def test_reserve_ids_w_retry(self): + retry = mock.MagicMock() + self._reserve_ids_helper(retry=retry) + + def test_reserve_ids_w_timeout(self): + timeout = 5.0 + self._reserve_ids_helper(timeout=timeout) + def _make_response(status=client.OK, content=b"", headers={}): response = requests.Response() @@ -849,7 +876,7 @@ def _make_client_info(user_agent=_USER_AGENT): return client_info -def _verify_protobuf_call(http, expected_url, pb): +def _verify_protobuf_call(http, expected_url, pb, retry=None, timeout=None): from google.cloud import _http as connection_module expected_headers = { @@ -858,10 +885,36 @@ def _verify_protobuf_call(http, expected_url, pb): connection_module.CLIENT_INFO_HEADER: _USER_AGENT, } - http.request.assert_called_once_with( - method="POST", url=expected_url, headers=expected_headers, data=mock.ANY - ) + if retry is not None: + retry.assert_called_once_with(http.request) + + if timeout is not None: + http.request.assert_called_once_with( + method="POST", + url=expected_url, + headers=expected_headers, + data=mock.ANY, + timeout=timeout, + ) + else: + http.request.assert_called_once_with( + method="POST", url=expected_url, headers=expected_headers, data=mock.ANY + ) data = http.request.mock_calls[0][2]["data"] pb._pb.ParseFromString(data) return pb + + +def _make_retry_timeout_kwargs(retry, timeout, http=None): + kwargs = {} + + if retry is not None: + kwargs["retry"] = retry + if http is not None: + retry.return_value = http.request + + if timeout is not None: + kwargs["timeout"] = timeout + + return kwargs From 3cd5b716e202149d23a03181fa19440cd6cf334f Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Sat, 12 Dec 2020 14:32:04 -0500 Subject: [PATCH 313/611] tests: enable unit tests for Python 3.9 (#139) --- packages/google-cloud-datastore/noxfile.py | 2 +- packages/google-cloud-datastore/synth.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index a4bcedd771fb..1737a22d3b26 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -28,7 +28,7 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index 5aa94d2505b7..96709221e08c 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -79,7 +79,7 @@ templated_files = common.py_library( unit_cov_level=97, cov_level=97, - unit_test_python_versions=["3.6", "3.7", "3.8"], + unit_test_python_versions=["3.6", "3.7", "3.8", "3.9"], system_test_python_versions=["3.8"], ) s.move(templated_files, excludes=["docs/multiprocessing.rst", ".coveragerc"]) From 70196a5de5963b9f9ff19a1566fd378c0849dca1 Mon Sep 17 00:00:00 2001 From: Justin Beckwith Date: Wed, 3 Feb 2021 15:11:42 -0800 Subject: [PATCH 314/611] build: migrate to flakybot (#149) --- packages/google-cloud-datastore/.kokoro/test-samples.sh | 8 ++++---- packages/google-cloud-datastore/.kokoro/trampoline_v2.sh | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/.kokoro/test-samples.sh b/packages/google-cloud-datastore/.kokoro/test-samples.sh index c4163d795804..4971100ddf77 100755 --- a/packages/google-cloud-datastore/.kokoro/test-samples.sh +++ b/packages/google-cloud-datastore/.kokoro/test-samples.sh @@ -87,11 +87,11 @@ for file in samples/**/requirements.txt; do python3.6 -m nox -s "$RUN_TESTS_SESSION" EXIT=$? - # If this is a periodic build, send the test log to the Build Cop Bot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop - $KOKORO_GFILE_DIR/linux_amd64/buildcop + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot fi if [[ $EXIT -ne 0 ]]; then diff --git a/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh b/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh index 719bcd5ba84d..4af6cdc26dbc 100755 --- a/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh +++ b/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh @@ -159,7 +159,7 @@ if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then "KOKORO_GITHUB_COMMIT" "KOKORO_GITHUB_PULL_REQUEST_NUMBER" "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For Build Cop Bot + # For FlakyBot "KOKORO_GITHUB_COMMIT_URL" "KOKORO_GITHUB_PULL_REQUEST_URL" ) From 296fa606cf452787eebdf64577750c29ea0a94cb Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 3 Feb 2021 17:19:31 -0700 Subject: [PATCH 315/611] chore: add constraints file (#144) * chore: add comnstraints file * chore: add comnstraints file * chore: add comnstraints file * chore: add comnstraints file * chore: add comnstraints file * chore: add comnstraints file Co-authored-by: Christopher Wilcox --- .../testing/constraints-3.10.txt | 0 .../testing/constraints-3.11.txt | 0 .../testing/constraints-3.6.txt | 11 +++++++++++ .../testing/constraints-3.7.txt | 0 .../testing/constraints-3.8.txt | 0 .../testing/constraints-3.9.txt | 0 6 files changed, 11 insertions(+) create mode 100644 packages/google-cloud-datastore/testing/constraints-3.10.txt create mode 100644 packages/google-cloud-datastore/testing/constraints-3.11.txt create mode 100644 packages/google-cloud-datastore/testing/constraints-3.6.txt create mode 100644 packages/google-cloud-datastore/testing/constraints-3.7.txt create mode 100644 packages/google-cloud-datastore/testing/constraints-3.8.txt create mode 100644 packages/google-cloud-datastore/testing/constraints-3.9.txt diff --git a/packages/google-cloud-datastore/testing/constraints-3.10.txt b/packages/google-cloud-datastore/testing/constraints-3.10.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-datastore/testing/constraints-3.11.txt b/packages/google-cloud-datastore/testing/constraints-3.11.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-datastore/testing/constraints-3.6.txt b/packages/google-cloud-datastore/testing/constraints-3.6.txt new file mode 100644 index 000000000000..20a821c2d75d --- /dev/null +++ b/packages/google-cloud-datastore/testing/constraints-3.6.txt @@ -0,0 +1,11 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +# Then this file should have foo==1.14.0 +google-api-core==1.22.2 +google-cloud-core==1.4.0 +proto-plus==1.4.0 +libcst==0.2.5 \ No newline at end of file diff --git a/packages/google-cloud-datastore/testing/constraints-3.7.txt b/packages/google-cloud-datastore/testing/constraints-3.7.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-datastore/testing/constraints-3.8.txt b/packages/google-cloud-datastore/testing/constraints-3.8.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-datastore/testing/constraints-3.9.txt b/packages/google-cloud-datastore/testing/constraints-3.9.txt new file mode 100644 index 000000000000..e69de29bb2d1 From c112cbfcfb70ce27c08d0f4018f613395ee48593 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 20 Apr 2021 15:01:16 -0400 Subject: [PATCH 316/611] chore: add yoshi-python to CODEWONERS (#151) --- packages/google-cloud-datastore/.github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/.github/CODEOWNERS b/packages/google-cloud-datastore/.github/CODEOWNERS index 39a8fc72bc4f..6da5f48a6fbf 100644 --- a/packages/google-cloud-datastore/.github/CODEOWNERS +++ b/packages/google-cloud-datastore/.github/CODEOWNERS @@ -7,4 +7,4 @@ # The firestore-dpe team is the default owner for anything not # explicitly taken by someone else. -* @googleapis/firestore-dpe +* @googleapis/firestore-dpe @googleapis/yoshi-python From 857424d042b17365893b3346fbad6cb691352639 Mon Sep 17 00:00:00 2001 From: Craig Labenz Date: Tue, 20 Apr 2021 13:57:02 -0700 Subject: [PATCH 317/611] fix: optimized protobuf access for performance (#155) More efficiently uses proto-plus wrappers, as well as inner protobuf attribute access, to greatly reduce the performance costs seen in version 2.0.0 (which stemmed from the introduction of proto-plus). The size of the performance improvement scales with the number of attributes on each Entity, but in general, speeds once again closely approximate those from 1.15. Fixes #145 Fixes #150 --- .../google/cloud/datastore/helpers.py | 43 +++++++++++-------- .../tests/unit/test_helpers.py | 9 ++++ 2 files changed, 33 insertions(+), 19 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index eada5f4f1db8..c1d022e32c6d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -49,16 +49,21 @@ def _get_meaning(value_pb, is_list=False): """ meaning = None if is_list: + + values = ( + value_pb._pb.array_value.values + if hasattr(value_pb, "_pb") + else value_pb.array_value.values + ) + # An empty list will have no values, hence no shared meaning # set among them. - if len(value_pb.array_value.values) == 0: + if len(values) == 0: return None # We check among all the meanings, some of which may be None, # the rest which may be enum/int values. - all_meanings = [ - _get_meaning(sub_value_pb) for sub_value_pb in value_pb.array_value.values - ] + all_meanings = [_get_meaning(sub_value_pb) for sub_value_pb in values] unique_meanings = set(all_meanings) if len(unique_meanings) == 1: # If there is a unique meaning, we preserve it. @@ -119,11 +124,8 @@ def entity_from_protobuf(pb): :rtype: :class:`google.cloud.datastore.entity.Entity` :returns: The entity derived from the protobuf. """ - - if not getattr(pb, "_pb", False): - # Coerce raw pb type into proto-plus pythonic type. - proto_pb = entity_pb2.Entity(pb) - pb = pb + if not isinstance(pb, entity_pb2.Entity): + proto_pb = entity_pb2.Entity.wrap(pb) else: proto_pb = pb pb = pb._pb @@ -152,7 +154,7 @@ def entity_from_protobuf(pb): if is_list and len(value) > 0: exclude_values = set( value_pb.exclude_from_indexes - for value_pb in value_pb.array_value.values + for value_pb in value_pb._pb.array_value.values ) if len(exclude_values) != 1: raise ValueError( @@ -402,33 +404,36 @@ def _get_value_from_value_pb(value): """ if not getattr(value, "_pb", False): # Coerce raw pb type into proto-plus pythonic type. - value = entity_pb2.Value(value) + value = entity_pb2.Value.wrap(value) value_type = value._pb.WhichOneof("value_type") if value_type == "timestamp_value": + # Do not access `._pb` here, as that returns a Timestamp proto, + # but this should return a Pythonic `DatetimeWithNanoseconds` value, + # which is found at `value.timestamp_value` result = value.timestamp_value elif value_type == "key_value": - result = key_from_protobuf(value.key_value) + result = key_from_protobuf(value._pb.key_value) elif value_type == "boolean_value": - result = value.boolean_value + result = value._pb.boolean_value elif value_type == "double_value": - result = value.double_value + result = value._pb.double_value elif value_type == "integer_value": - result = value.integer_value + result = value._pb.integer_value elif value_type == "string_value": - result = value.string_value + result = value._pb.string_value elif value_type == "blob_value": - result = value.blob_value + result = value._pb.blob_value elif value_type == "entity_value": - result = entity_from_protobuf(value.entity_value) + result = entity_from_protobuf(value._pb.entity_value) elif value_type == "array_value": result = [ @@ -437,7 +442,7 @@ def _get_value_from_value_pb(value): elif value_type == "geo_point_value": result = GeoPoint( - value.geo_point_value.latitude, value.geo_point_value.longitude, + value._pb.geo_point_value.latitude, value._pb.geo_point_value.longitude, ) elif value_type == "null_value": diff --git a/packages/google-cloud-datastore/tests/unit/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py index 81cae0f3bdf1..5b602cffbb00 100644 --- a/packages/google-cloud-datastore/tests/unit/test_helpers.py +++ b/packages/google-cloud-datastore/tests/unit/test_helpers.py @@ -140,6 +140,15 @@ def test_entity_no_key(self): self.assertIsNone(entity.key) self.assertEqual(dict(entity), {}) + def test_pb2_entity_no_key(self): + from google.cloud.datastore_v1.types import entity as entity_pb2 + + entity_pb = entity_pb2.Entity() + entity = self._call_fut(entity_pb) + + self.assertIsNone(entity.key) + self.assertEqual(dict(entity), {}) + def test_entity_with_meaning(self): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.helpers import _new_value_pb From 0845511b8ec143c68dd38505da758745e784ff41 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 20 Apr 2021 14:14:42 -0700 Subject: [PATCH 318/611] chore: release 2.1.1 (#137) * chore: release 2.1.1 * chore: normalize imperative voice in changelog entry Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Tres Seaver --- packages/google-cloud-datastore/CHANGELOG.md | 8 ++++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 28948fd6eb45..e5535af0aad5 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +### [2.1.1](https://www.github.com/googleapis/python-datastore/compare/v2.1.0...v2.1.1) (2021-04-20) + + +### Bug Fixes + +* make HTTPDatastoreAPI compatible w/ microgen Gapic API ([#136](https://www.github.com/googleapis/python-datastore/issues/136)) ([d522799](https://www.github.com/googleapis/python-datastore/commit/d5227994a4a5e2300905d6619742664dcd909443)) +* optimize protobuf access for performance ([#155](https://www.github.com/googleapis/python-datastore/issues/155)) ([5b67daa](https://www.github.com/googleapis/python-datastore/commit/5b67daa3b2da1f0b5dd5b25e14bd5dee4444120b)), closes [#145](https://www.github.com/googleapis/python-datastore/issues/145) [#150](https://www.github.com/googleapis/python-datastore/issues/150) + ## [2.1.0](https://www.github.com/googleapis/python-datastore/compare/v2.0.1...v2.1.0) (2020-12-04) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 8b5d3328c28b..7945f6f4f462 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.1.0" +__version__ = "2.1.1" From c1832d5fa9cdd1c55261294e3bb53603a56dd8a6 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 21 Apr 2021 19:05:56 -0400 Subject: [PATCH 319/611] chore: preserve gRPC-disabled systests during synth (#141) See #133, #136 --- packages/google-cloud-datastore/synth.py | 50 ++++++++++++++++++++---- 1 file changed, 43 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index 96709221e08c..818a9e80b1c8 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -86,17 +86,53 @@ s.replace("noxfile.py", """["']sphinx['"]""", '''"sphinx<3.0.0"''') -# Add the `sphinx-ext-doctest` extenaion +# Preserve system tests w/ GOOGLE_DISABLE_GRPC set (#133, PR #136) s.replace( - "docs/conf.py", + "noxfile.py", """\ - "sphinx.ext.coverage", - "sphinx.ext.napoleon", +@nox.session\(python=SYSTEM_TEST_PYTHON_VERSIONS\) +def system\(session\): """, """\ - "sphinx.ext.coverage", - "sphinx.ext.doctest", - "sphinx.ext.napoleon", +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +@nox.parametrize("disable_grpc", [False, True]) +def system(session, disable_grpc): +""", +) + +s.replace( + "noxfile.py", + """\ + # Run py.test against the system tests. +""", + """\ + env = {} + if disable_grpc: + env["GOOGLE_CLOUD_DISABLE_GRPC"] = "True" + + # Run py.test against the system tests. +""", +) + +s.replace( + "noxfile.py", + """\ + session.run\("py.test", "--quiet", system_test_path, \*session.posargs\) +""", + """\ + session.run("py.test", "--quiet", system_test_path, env=env, *session.posargs) +""", +) + +s.replace( + "noxfile.py", + """\ + session.run\("py.test", "--quiet", system_test_folder_path, \*session.posargs\) +""", + """\ + session.run( + "py.test", "--quiet", system_test_folder_path, env=env, *session.posargs + ) """, ) From b2a583f82ed8deefef379d642f661fed451026a7 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Wed, 28 Apr 2021 11:56:09 -0400 Subject: [PATCH 320/611] chore: use `gcp-sphinx-docfx-yaml` (#161) makes use of the updated plugin for generating DocFX YAMLs --- packages/google-cloud-datastore/noxfile.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 1737a22d3b26..a6c9bf64193f 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -180,9 +180,7 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - # sphinx-docfx-yaml supports up to sphinx version 1.5.5. - # https://github.com/docascode/sphinx-docfx-yaml/issues/97 - session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( From 7b586cf0c4ffb23a40e1a4b2de32b9dfaab7de9c Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 3 May 2021 08:54:04 -0700 Subject: [PATCH 321/611] chore: switch from flakybot to buildcop (via synth) (#159) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/fdb52ec7-8162-4605-a0be-a825994accd2/targets - [ ] To automatically regenerate this PR, check this box. (May take up to 24 hours.) --- packages/google-cloud-datastore/.kokoro/test-samples.sh | 8 ++++---- packages/google-cloud-datastore/.kokoro/trampoline_v2.sh | 2 +- packages/google-cloud-datastore/synth.metadata | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.kokoro/test-samples.sh b/packages/google-cloud-datastore/.kokoro/test-samples.sh index 4971100ddf77..c4163d795804 100755 --- a/packages/google-cloud-datastore/.kokoro/test-samples.sh +++ b/packages/google-cloud-datastore/.kokoro/test-samples.sh @@ -87,11 +87,11 @@ for file in samples/**/requirements.txt; do python3.6 -m nox -s "$RUN_TESTS_SESSION" EXIT=$? - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + # If this is a periodic build, send the test log to the Build Cop Bot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot + chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop + $KOKORO_GFILE_DIR/linux_amd64/buildcop fi if [[ $EXIT -ne 0 ]]; then diff --git a/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh b/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh index 4af6cdc26dbc..719bcd5ba84d 100755 --- a/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh +++ b/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh @@ -159,7 +159,7 @@ if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then "KOKORO_GITHUB_COMMIT" "KOKORO_GITHUB_PULL_REQUEST_NUMBER" "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For FlakyBot + # For Build Cop Bot "KOKORO_GITHUB_COMMIT_URL" "KOKORO_GITHUB_PULL_REQUEST_URL" ) diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index a8bf8ba9639d..adc5da109eee 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-datastore.git", - "sha": "dd6c0ee43fe6763528374a3494de0124de0ee652" + "sha": "3c8da54de9a6837d6d96b6befd3b0ef084d8ff7f" } }, { From b7caf112a43703e6684aaf5520fb2cb511970213 Mon Sep 17 00:00:00 2001 From: "google-cloud-policy-bot[bot]" <80869356+google-cloud-policy-bot[bot]@users.noreply.github.com> Date: Mon, 3 May 2021 16:06:03 +0000 Subject: [PATCH 322/611] chore: add SECURITY.md (#162) chore: add SECURITY.md --- packages/google-cloud-datastore/SECURITY.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 packages/google-cloud-datastore/SECURITY.md diff --git a/packages/google-cloud-datastore/SECURITY.md b/packages/google-cloud-datastore/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-datastore/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. From 6379fceacbe4c4a2cc05ec8b0313712faa8faaa1 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 3 May 2021 09:18:04 -0700 Subject: [PATCH 323/611] docs: update intersphinx URLs for grpc and auth (#93) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/78f53313-0c78-4a29-8841-f031665a4c6a/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/a073c873f3928c561bdf87fdfbf1d081d1998984 Source-Link: https://github.com/googleapis/synthtool/commit/9a7d9fbb7045c34c9d3d22c1ff766eeae51f04c9 --- packages/google-cloud-datastore/docs/conf.py | 6 +++--- packages/google-cloud-datastore/synth.metadata | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index 2aff2fc7a9d1..46cba6cafd16 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -345,10 +345,10 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), - "grpc": ("https://grpc.io/grpc/python/", None), + "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), } diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata index adc5da109eee..c8b752aec41f 100644 --- a/packages/google-cloud-datastore/synth.metadata +++ b/packages/google-cloud-datastore/synth.metadata @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "b19b401571e77192f8dd38eab5fb2300a0de9324" + "sha": "a073c873f3928c561bdf87fdfbf1d081d1998984" } } ], From 85a201cafc9eb012e0f46609415d906afaec99d9 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 3 May 2021 12:30:09 -0400 Subject: [PATCH 324/611] fix: pass transaction's options to API in 'begin' (#143) Closes #135. --- .../google/cloud/datastore/transaction.py | 8 +- .../tests/unit/test_transaction.py | 135 ++++++++++++------ 2 files changed, 96 insertions(+), 47 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index a1eabed53b02..21cac1a717a8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -176,10 +176,12 @@ def Entity(*args, **kwargs): def __init__(self, client, read_only=False): super(Transaction, self).__init__(client) self._id = None + if read_only: options = TransactionOptions(read_only=TransactionOptions.ReadOnly()) else: options = TransactionOptions() + self._options = options @property @@ -231,9 +233,13 @@ def begin(self, retry=None, timeout=None): kwargs = _make_retry_timeout_kwargs(retry, timeout) + request = { + "project_id": self.project, + "transaction_options": self._options, + } try: response_pb = self._client._datastore_api.begin_transaction( - request={"project_id": self.project}, **kwargs + request=request, **kwargs ) self._id = response_pb.transaction except: # noqa: E722 do not use bare except, specify exception instead diff --git a/packages/google-cloud-datastore/tests/unit/test_transaction.py b/packages/google-cloud-datastore/tests/unit/test_transaction.py index 1bc355cc7339..bae419dfdcd8 100644 --- a/packages/google-cloud-datastore/tests/unit/test_transaction.py +++ b/packages/google-cloud-datastore/tests/unit/test_transaction.py @@ -24,21 +24,25 @@ def _get_target_class(): return Transaction - def _get_options_class(self, **kw): + def _make_one(self, client, **kw): + return self._get_target_class()(client, **kw) + + def _make_options(self, read_only=False, previous_transaction=None): from google.cloud.datastore_v1.types import TransactionOptions - return TransactionOptions + kw = {} - def _make_one(self, client, **kw): - return self._get_target_class()(client, **kw) + if read_only: + kw["read_only"] = TransactionOptions.ReadOnly() - def _make_options(self, **kw): - return self._get_options_class()(**kw) + return TransactionOptions(**kw) def test_ctor_defaults(self): project = "PROJECT" client = _Client(project) + xact = self._make_one(client) + self.assertEqual(xact.project, project) self.assertIs(xact._client, client) self.assertIsNone(xact.id) @@ -46,6 +50,24 @@ def test_ctor_defaults(self): self.assertEqual(xact._mutations, []) self.assertEqual(len(xact._partial_key_entities), 0) + def test_constructor_read_only(self): + project = "PROJECT" + id_ = 850302 + ds_api = _make_datastore_api(xact=id_) + client = _Client(project, datastore_api=ds_api) + options = self._make_options(read_only=True) + + xact = self._make_one(client, read_only=True) + + self.assertEqual(xact._options, options) + + def _make_begin_request(self, project, read_only=False): + expected_options = self._make_options(read_only=read_only) + return { + "project_id": project, + "transaction_options": expected_options, + } + def test_current(self): from google.cloud.datastore_v1.types import datastore as datastore_pb2 @@ -57,24 +79,34 @@ def test_current(self): xact2 = self._make_one(client) self.assertIsNone(xact1.current()) self.assertIsNone(xact2.current()) + with xact1: self.assertIs(xact1.current(), xact1) self.assertIs(xact2.current(), xact1) + with _NoCommitBatch(client): self.assertIsNone(xact1.current()) self.assertIsNone(xact2.current()) + with xact2: self.assertIs(xact1.current(), xact2) self.assertIs(xact2.current(), xact2) + with _NoCommitBatch(client): self.assertIsNone(xact1.current()) self.assertIsNone(xact2.current()) + self.assertIs(xact1.current(), xact1) self.assertIs(xact2.current(), xact1) + self.assertIsNone(xact1.current()) self.assertIsNone(xact2.current()) - ds_api.rollback.assert_not_called() + begin_txn = ds_api.begin_transaction + self.assertEqual(begin_txn.call_count, 2) + expected_request = self._make_begin_request(project) + begin_txn.assert_called_with(request=expected_request) + commit_method = ds_api.commit self.assertEqual(commit_method.call_count, 2) mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL @@ -87,9 +119,7 @@ def test_current(self): } ) - begin_txn = ds_api.begin_transaction - self.assertEqual(begin_txn.call_count, 2) - begin_txn.assert_called_with(request={"project_id": project}) + ds_api.rollback.assert_not_called() def test_begin(self): project = "PROJECT" @@ -97,11 +127,27 @@ def test_begin(self): ds_api = _make_datastore_api(xact_id=id_) client = _Client(project, datastore_api=ds_api) xact = self._make_one(client) + xact.begin() + self.assertEqual(xact.id, id_) - ds_api.begin_transaction.assert_called_once_with( - request={"project_id": project} - ) + + expected_request = self._make_begin_request(project) + ds_api.begin_transaction.assert_called_once_with(request=expected_request) + + def test_begin_w_readonly(self): + project = "PROJECT" + id_ = 889 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) + xact = self._make_one(client, read_only=True) + + xact.begin() + + self.assertEqual(xact.id, id_) + + expected_request = self._make_begin_request(project, read_only=True) + ds_api.begin_transaction.assert_called_once_with(request=expected_request) def test_begin_w_retry_w_timeout(self): project = "PROJECT" @@ -116,8 +162,10 @@ def test_begin_w_retry_w_timeout(self): xact.begin(retry=retry, timeout=timeout) self.assertEqual(xact.id, id_) + + expected_request = self._make_begin_request(project) ds_api.begin_transaction.assert_called_once_with( - request={"project_id": project}, retry=retry, timeout=timeout + request=expected_request, retry=retry, timeout=timeout, ) def test_begin_tombstoned(self): @@ -126,19 +174,23 @@ def test_begin_tombstoned(self): ds_api = _make_datastore_api(xact_id=id_) client = _Client(project, datastore_api=ds_api) xact = self._make_one(client) + xact.begin() + self.assertEqual(xact.id, id_) - ds_api.begin_transaction.assert_called_once_with( - request={"project_id": project} - ) + + expected_request = self._make_begin_request(project) + ds_api.begin_transaction.assert_called_once_with(request=expected_request) xact.rollback() + client._datastore_api.rollback.assert_called_once_with( request={"project_id": project, "transaction": id_} ) self.assertIsNone(xact.id) - self.assertRaises(ValueError, xact.begin) + with self.assertRaises(ValueError): + xact.begin() def test_begin_w_begin_transaction_failure(self): project = "PROJECT" @@ -152,9 +204,9 @@ def test_begin_w_begin_transaction_failure(self): xact.begin() self.assertIsNone(xact.id) - ds_api.begin_transaction.assert_called_once_with( - request={"project_id": project} - ) + + expected_request = self._make_begin_request(project) + ds_api.begin_transaction.assert_called_once_with(request=expected_request) def test_rollback(self): project = "PROJECT" @@ -256,11 +308,14 @@ def test_context_manager_no_raise(self): ds_api = _make_datastore_api(xact_id=id_) client = _Client(project, datastore_api=ds_api) xact = self._make_one(client) + with xact: - self.assertEqual(xact.id, id_) - ds_api.begin_transaction.assert_called_once_with( - request={"project_id": project} - ) + self.assertEqual(xact.id, id_) # only set between begin / commit + + self.assertIsNone(xact.id) + + expected_request = self._make_begin_request(project) + ds_api.begin_transaction.assert_called_once_with(request=expected_request) mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL client._datastore_api.commit.assert_called_once_with( @@ -272,9 +327,6 @@ def test_context_manager_no_raise(self): }, ) - self.assertIsNone(xact.id) - self.assertEqual(ds_api.begin_transaction.call_count, 1) - def test_context_manager_w_raise(self): class Foo(Exception): pass @@ -288,29 +340,20 @@ class Foo(Exception): try: with xact: self.assertEqual(xact.id, id_) - ds_api.begin_transaction.assert_called_once_with( - request={"project_id": project} - ) raise Foo() except Foo: - self.assertIsNone(xact.id) - client._datastore_api.rollback.assert_called_once_with( - request={"project_id": project, "transaction": id_} - ) + pass - client._datastore_api.commit.assert_not_called() self.assertIsNone(xact.id) - self.assertEqual(ds_api.begin_transaction.call_count, 1) - def test_constructor_read_only(self): - project = "PROJECT" - id_ = 850302 - ds_api = _make_datastore_api(xact=id_) - client = _Client(project, datastore_api=ds_api) - read_only = self._get_options_class().ReadOnly() - options = self._make_options(read_only=read_only) - xact = self._make_one(client, read_only=True) - self.assertEqual(xact._options, options) + expected_request = self._make_begin_request(project) + ds_api.begin_transaction.assert_called_once_with(request=expected_request) + + client._datastore_api.commit.assert_not_called() + + client._datastore_api.rollback.assert_called_once_with( + request={"project_id": project, "transaction": id_} + ) def test_put_read_only(self): project = "PROJECT" From 998b46b197c8054c45babb2937e35f2361a79cba Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 3 May 2021 16:40:06 +0000 Subject: [PATCH 325/611] chore: release 2.1.2 (#164) :robot: I have created a release \*beep\* \*boop\* --- ### [2.1.2](https://www.github.com/googleapis/python-datastore/compare/v2.1.1...v2.1.2) (2021-05-03) ### Bug Fixes * pass transaction's options to API in 'begin' ([#143](https://www.github.com/googleapis/python-datastore/issues/143)) ([924b10b](https://www.github.com/googleapis/python-datastore/commit/924b10b11eb7ff52367f388cf5c8e16aa9b2e32e)) ### Documentation * update intersphinx URLs for grpc and auth ([#93](https://www.github.com/googleapis/python-datastore/issues/93)) ([4f90d04](https://www.github.com/googleapis/python-datastore/commit/4f90d04c81aacdbaf83f5a9dc996898fa9c7ba26)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/google-cloud-datastore/CHANGELOG.md | 12 ++++++++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index e5535af0aad5..df56b8e534b2 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +### [2.1.2](https://www.github.com/googleapis/python-datastore/compare/v2.1.1...v2.1.2) (2021-05-03) + + +### Bug Fixes + +* pass transaction's options to API in 'begin' ([#143](https://www.github.com/googleapis/python-datastore/issues/143)) ([924b10b](https://www.github.com/googleapis/python-datastore/commit/924b10b11eb7ff52367f388cf5c8e16aa9b2e32e)) + + +### Documentation + +* update intersphinx URLs for grpc and auth ([#93](https://www.github.com/googleapis/python-datastore/issues/93)) ([4f90d04](https://www.github.com/googleapis/python-datastore/commit/4f90d04c81aacdbaf83f5a9dc996898fa9c7ba26)) + ### [2.1.1](https://www.github.com/googleapis/python-datastore/compare/v2.1.0...v2.1.1) (2021-04-20) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 7945f6f4f462..b6c2aa1a1627 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.1.1" +__version__ = "2.1.2" From 6ac332b1072e3b959ee51f0b68a608fcd61a6af8 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 7 May 2021 13:18:26 -0400 Subject: [PATCH 326/611] chore: fix synth replacment for index creation (#166) Upstream changes broke the previous anchor for the fragment. Closes #160. --- packages/google-cloud-datastore/synth.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/synth.py index 818a9e80b1c8..6a59fc5bafa3 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/synth.py @@ -142,9 +142,12 @@ def system(session, disable_grpc): # tests. num = s.replace( "CONTRIBUTING.rst", - 'app_credentials.json"', - """app_credentials.json" - + """\ +\*\*\*\*\*\*\*\*\*\*\*\*\* +Test Coverage +\*\*\*\*\*\*\*\*\*\*\*\*\* +""", + """\ - You'll need to create composite `indexes `__ with the ``gcloud`` command line @@ -169,7 +172,12 @@ def system(session, disable_grpc): prevents clean-up) you can clear all system test data from your datastore instance via:: - $ python tests/system/utils/clear_datastore.py""") + $ python tests/system/utils/clear_datastore.py + +************* +Test Coverage +************* +""") if num != 1: raise Exception("Required replacement not made.") From 351e01ebbe7a4fdd228db8de8a526bb4cc77ea30 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 12 May 2021 16:18:02 -0400 Subject: [PATCH 327/611] chore: add library type to .repo-metadata.json (#169) --- packages/google-cloud-datastore/.repo-metadata.json | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-datastore/.repo-metadata.json b/packages/google-cloud-datastore/.repo-metadata.json index 10661c59824f..5f4bae89f2a8 100644 --- a/packages/google-cloud-datastore/.repo-metadata.json +++ b/packages/google-cloud-datastore/.repo-metadata.json @@ -6,6 +6,7 @@ "issue_tracker": "https://issuetracker.google.com/savedsearches/559768", "release_level": "ga", "language": "python", + "library_type": "GAPIC_COMBO", "repo": "googleapis/python-datastore", "distribution_name": "google-cloud-datastore", "api_id": "datastore.googleapis.com" From f355743d11876956089b03262d715695ccca03de Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 18 May 2021 15:27:59 -0400 Subject: [PATCH 328/611] tests: add coverage for skip < offset case (#165) Closes #158 --- .../google/cloud/datastore/query.py | 5 ++ .../tests/unit/test_query.py | 60 ++++++++++++++++++- 2 files changed, 63 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 2f455b6ffce2..b4b24ca7b4e8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -593,8 +593,12 @@ def _next_page(self): # skipped all of the results yet. Don't return any results. # Instead, rerun query, adjusting offsets. Datastore doesn't process # more than 1000 skipped results in a query. + old_query_pb = query_pb + query_pb = query_pb2.Query() + query_pb._pb.CopyFrom(old_query_pb._pb) # copy for testability query_pb.start_cursor = response_pb.batch.skipped_cursor query_pb.offset -= response_pb.batch.skipped_results + response_pb = self.client._datastore_api.run_query( request={ "project_id": self._query.project, @@ -604,6 +608,7 @@ def _next_page(self): }, **kwargs, ) + entity_pbs = self._process_query_results(response_pb) return page_iterator.Page(self, entity_pbs, self.item_to_value) diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 97c1db88c7a6..89bc7e2c304c 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -609,6 +609,58 @@ def test__next_page_no_more(self): self.assertIsNone(page) ds_api.run_query.assert_not_called() + def test__next_page_w_skipped_lt_offset(self): + from google.api_core import page_iterator + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 + from google.cloud.datastore.query import Query + + project = "prujekt" + skipped_1 = 100 + skipped_cursor_1 = b"DEADBEEF" + skipped_2 = 50 + skipped_cursor_2 = b"FACEDACE" + + more_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED + + result_1 = _make_query_response([], b"", more_enum, skipped_1) + result_1.batch.skipped_cursor = skipped_cursor_1 + result_2 = _make_query_response([], b"", more_enum, skipped_2) + result_2.batch.skipped_cursor = skipped_cursor_2 + + ds_api = _make_datastore_api(result_1, result_2) + client = _Client(project, datastore_api=ds_api) + + query = Query(client) + offset = 150 + iterator = self._make_one(query, client, offset=offset) + + page = iterator._next_page() + + self.assertIsInstance(page, page_iterator.Page) + self.assertIs(page._parent, iterator) + + partition_id = entity_pb2.PartitionId(project_id=project) + read_options = datastore_pb2.ReadOptions() + + query_1 = query_pb2.Query(offset=offset) + query_2 = query_pb2.Query( + start_cursor=skipped_cursor_1, offset=(offset - skipped_1) + ) + expected_calls = [ + mock.call( + request={ + "project_id": project, + "partition_id": partition_id, + "read_options": read_options, + "query": query, + } + ) + for query in [query_1, query_2] + ] + self.assertEqual(ds_api.run_query.call_args_list, expected_calls) + class Test__item_to_entity(unittest.TestCase): def _call_fut(self, iterator, entity_pb): @@ -789,6 +841,10 @@ def _make_query_response( ) -def _make_datastore_api(result=None): - run_query = mock.Mock(return_value=result, spec=[]) +def _make_datastore_api(*results): + if len(results) == 0: + run_query = mock.Mock(return_value=None, spec=[]) + else: + run_query = mock.Mock(side_effect=results, spec=[]) + return mock.Mock(run_query=run_query, spec=["run_query"]) From 3b0212131be8524f9c5008f95039144a5d30c842 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 25 May 2021 09:34:02 -0400 Subject: [PATCH 329/611] chore: delete unused protos (#174) --- .../proto/datastore_admin.proto | 425 ------------------ .../datastore_admin_v1/proto/index.proto | 115 ----- .../cloud/datastore_v1/proto/datastore.proto | 410 ----------------- .../cloud/datastore_v1/proto/entity.proto | 205 --------- .../cloud/datastore_v1/proto/query.proto | 313 ------------- 5 files changed, 1468 deletions(-) delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin.proto delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index.proto delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore.proto delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity.proto delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query.proto diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin.proto b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin.proto deleted file mode 100644 index c0f470766511..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/datastore_admin.proto +++ /dev/null @@ -1,425 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.datastore.admin.v1; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/datastore/admin/v1/index.proto"; -import "google/longrunning/operations.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.Datastore.Admin.V1"; -option go_package = "google.golang.org/genproto/googleapis/datastore/admin/v1;admin"; -option java_multiple_files = true; -option java_outer_classname = "DatastoreAdminProto"; -option java_package = "com.google.datastore.admin.v1"; -option ruby_package = "Google::Cloud::Datastore::Admin::V1"; - -// Google Cloud Datastore Admin API -// -// -// The Datastore Admin API provides several admin services for Cloud Datastore. -// -// ----------------------------------------------------------------------------- -// ## Concepts -// -// Project, namespace, kind, and entity as defined in the Google Cloud Datastore -// API. -// -// Operation: An Operation represents work being performed in the background. -// -// EntityFilter: Allows specifying a subset of entities in a project. This is -// specified as a combination of kinds and namespaces (either or both of which -// may be all). -// -// ----------------------------------------------------------------------------- -// ## Services -// -// # Export/Import -// -// The Export/Import service provides the ability to copy all or a subset of -// entities to/from Google Cloud Storage. -// -// Exported data may be imported into Cloud Datastore for any Google Cloud -// Platform project. It is not restricted to the export source project. It is -// possible to export from one project and then import into another. -// -// Exported data can also be loaded into Google BigQuery for analysis. -// -// Exports and imports are performed asynchronously. An Operation resource is -// created for each export/import. The state (including any errors encountered) -// of the export/import may be queried via the Operation resource. -// -// # Index -// -// The index service manages Cloud Datastore composite indexes. -// -// Index creation and deletion are performed asynchronously. -// An Operation resource is created for each such asynchronous operation. -// The state of the operation (including any errors encountered) -// may be queried via the Operation resource. -// -// # Operation -// -// The Operations collection provides a record of actions performed for the -// specified project (including any operations in progress). Operations are not -// created directly but through calls on other collections or resources. -// -// An operation that is not yet done may be cancelled. The request to cancel is -// asynchronous and the operation may continue to run for some time after the -// request to cancel is made. -// -// An operation that is done may be deleted so that it is no longer listed as -// part of the Operation collection. -// -// ListOperations returns all pending operations, but not completed operations. -// -// Operations are created by service DatastoreAdmin, -// but are accessed via service google.longrunning.Operations. -service DatastoreAdmin { - option (google.api.default_host) = "datastore.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-platform," - "https://www.googleapis.com/auth/datastore"; - - // Exports a copy of all or a subset of entities from Google Cloud Datastore - // to another storage system, such as Google Cloud Storage. Recent updates to - // entities may not be reflected in the export. The export occurs in the - // background and its progress can be monitored and managed via the - // Operation resource that is created. The output of an export may only be - // used once the associated operation is done. If an export operation is - // cancelled before completion it may leave partial data behind in Google - // Cloud Storage. - rpc ExportEntities(ExportEntitiesRequest) returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v1/projects/{project_id}:export" - body: "*" - }; - option (google.api.method_signature) = "project_id,labels,entity_filter,output_url_prefix"; - option (google.longrunning.operation_info) = { - response_type: "ExportEntitiesResponse" - metadata_type: "ExportEntitiesMetadata" - }; - } - - // Imports entities into Google Cloud Datastore. Existing entities with the - // same key are overwritten. The import occurs in the background and its - // progress can be monitored and managed via the Operation resource that is - // created. If an ImportEntities operation is cancelled, it is possible - // that a subset of the data has already been imported to Cloud Datastore. - rpc ImportEntities(ImportEntitiesRequest) returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v1/projects/{project_id}:import" - body: "*" - }; - option (google.api.method_signature) = "project_id,labels,input_url,entity_filter"; - option (google.longrunning.operation_info) = { - response_type: "google.protobuf.Empty" - metadata_type: "ImportEntitiesMetadata" - }; - } - - // Gets an index. - rpc GetIndex(GetIndexRequest) returns (Index) { - option (google.api.http) = { - get: "/v1/projects/{project_id}/indexes/{index_id}" - }; - } - - // Lists the indexes that match the specified filters. Datastore uses an - // eventually consistent query to fetch the list of indexes and may - // occasionally return stale results. - rpc ListIndexes(ListIndexesRequest) returns (ListIndexesResponse) { - option (google.api.http) = { - get: "/v1/projects/{project_id}/indexes" - }; - } -} - -// Metadata common to all Datastore Admin operations. -message CommonMetadata { - // The various possible states for an ongoing Operation. - enum State { - // Unspecified. - STATE_UNSPECIFIED = 0; - - // Request is being prepared for processing. - INITIALIZING = 1; - - // Request is actively being processed. - PROCESSING = 2; - - // Request is in the process of being cancelled after user called - // google.longrunning.Operations.CancelOperation on the operation. - CANCELLING = 3; - - // Request has been processed and is in its finalization stage. - FINALIZING = 4; - - // Request has completed successfully. - SUCCESSFUL = 5; - - // Request has finished being processed, but encountered an error. - FAILED = 6; - - // Request has finished being cancelled after user called - // google.longrunning.Operations.CancelOperation. - CANCELLED = 7; - } - - // The time that work began on the operation. - google.protobuf.Timestamp start_time = 1; - - // The time the operation ended, either successfully or otherwise. - google.protobuf.Timestamp end_time = 2; - - // The type of the operation. Can be used as a filter in - // ListOperationsRequest. - OperationType operation_type = 3; - - // The client-assigned labels which were provided when the operation was - // created. May also include additional labels. - map labels = 4; - - // The current state of the Operation. - State state = 5; -} - -// Operation types. -enum OperationType { - // Unspecified. - OPERATION_TYPE_UNSPECIFIED = 0; - - // ExportEntities. - EXPORT_ENTITIES = 1; - - // ImportEntities. - IMPORT_ENTITIES = 2; - - // CreateIndex. - CREATE_INDEX = 3; - - // DeleteIndex. - DELETE_INDEX = 4; -} - -// Measures the progress of a particular metric. -message Progress { - // The amount of work that has been completed. Note that this may be greater - // than work_estimated. - int64 work_completed = 1; - - // An estimate of how much work needs to be performed. May be zero if the - // work estimate is unavailable. - int64 work_estimated = 2; -} - -// The request for -// [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. -message ExportEntitiesRequest { - // Required. Project ID against which to make the request. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Client-assigned labels. - map labels = 2; - - // Description of what data from the project is included in the export. - EntityFilter entity_filter = 3; - - // Required. Location for the export metadata and data files. - // - // The full resource URL of the external storage location. Currently, only - // Google Cloud Storage is supported. So output_url_prefix should be of the - // form: `gs://BUCKET_NAME[/NAMESPACE_PATH]`, where `BUCKET_NAME` is the - // name of the Cloud Storage bucket and `NAMESPACE_PATH` is an optional Cloud - // Storage namespace path (this is not a Cloud Datastore namespace). For more - // information about Cloud Storage namespace paths, see - // [Object name - // considerations](https://cloud.google.com/storage/docs/naming#object-considerations). - // - // The resulting files will be nested deeper than the specified URL prefix. - // The final output URL will be provided in the - // [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url] field. That - // value should be used for subsequent ImportEntities operations. - // - // By nesting the data files deeper, the same Cloud Storage bucket can be used - // in multiple ExportEntities operations without conflict. - string output_url_prefix = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// The request for -// [google.datastore.admin.v1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. -message ImportEntitiesRequest { - // Required. Project ID against which to make the request. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Client-assigned labels. - map labels = 2; - - // Required. The full resource URL of the external storage location. Currently, only - // Google Cloud Storage is supported. So input_url should be of the form: - // `gs://BUCKET_NAME[/NAMESPACE_PATH]/OVERALL_EXPORT_METADATA_FILE`, where - // `BUCKET_NAME` is the name of the Cloud Storage bucket, `NAMESPACE_PATH` is - // an optional Cloud Storage namespace path (this is not a Cloud Datastore - // namespace), and `OVERALL_EXPORT_METADATA_FILE` is the metadata file written - // by the ExportEntities operation. For more information about Cloud Storage - // namespace paths, see - // [Object name - // considerations](https://cloud.google.com/storage/docs/naming#object-considerations). - // - // For more information, see - // [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. - string input_url = 3 [(google.api.field_behavior) = REQUIRED]; - - // Optionally specify which kinds/namespaces are to be imported. If provided, - // the list must be a subset of the EntityFilter used in creating the export, - // otherwise a FAILED_PRECONDITION error will be returned. If no filter is - // specified then all entities from the export are imported. - EntityFilter entity_filter = 4; -} - -// The response for -// [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. -message ExportEntitiesResponse { - // Location of the output metadata file. This can be used to begin an import - // into Cloud Datastore (this project or another project). See - // [google.datastore.admin.v1.ImportEntitiesRequest.input_url][google.datastore.admin.v1.ImportEntitiesRequest.input_url]. - // Only present if the operation completed successfully. - string output_url = 1; -} - -// Metadata for ExportEntities operations. -message ExportEntitiesMetadata { - // Metadata common to all Datastore Admin operations. - CommonMetadata common = 1; - - // An estimate of the number of entities processed. - Progress progress_entities = 2; - - // An estimate of the number of bytes processed. - Progress progress_bytes = 3; - - // Description of which entities are being exported. - EntityFilter entity_filter = 4; - - // Location for the export metadata and data files. This will be the same - // value as the - // [google.datastore.admin.v1.ExportEntitiesRequest.output_url_prefix][google.datastore.admin.v1.ExportEntitiesRequest.output_url_prefix] - // field. The final output location is provided in - // [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. - string output_url_prefix = 5; -} - -// Metadata for ImportEntities operations. -message ImportEntitiesMetadata { - // Metadata common to all Datastore Admin operations. - CommonMetadata common = 1; - - // An estimate of the number of entities processed. - Progress progress_entities = 2; - - // An estimate of the number of bytes processed. - Progress progress_bytes = 3; - - // Description of which entities are being imported. - EntityFilter entity_filter = 4; - - // The location of the import metadata file. This will be the same value as - // the [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url] field. - string input_url = 5; -} - -// Identifies a subset of entities in a project. This is specified as -// combinations of kinds and namespaces (either or both of which may be all, as -// described in the following examples). -// Example usage: -// -// Entire project: -// kinds=[], namespace_ids=[] -// -// Kinds Foo and Bar in all namespaces: -// kinds=['Foo', 'Bar'], namespace_ids=[] -// -// Kinds Foo and Bar only in the default namespace: -// kinds=['Foo', 'Bar'], namespace_ids=[''] -// -// Kinds Foo and Bar in both the default and Baz namespaces: -// kinds=['Foo', 'Bar'], namespace_ids=['', 'Baz'] -// -// The entire Baz namespace: -// kinds=[], namespace_ids=['Baz'] -message EntityFilter { - // If empty, then this represents all kinds. - repeated string kinds = 1; - - // An empty list represents all namespaces. This is the preferred - // usage for projects that don't use namespaces. - // - // An empty string element represents the default namespace. This should be - // used if the project has data in non-default namespaces, but doesn't want to - // include them. - // Each namespace in this list must be unique. - repeated string namespace_ids = 2; -} - -// The request for [google.datastore.admin.v1.DatastoreAdmin.GetIndex][google.datastore.admin.v1.DatastoreAdmin.GetIndex]. -message GetIndexRequest { - // Project ID against which to make the request. - string project_id = 1; - - // The resource ID of the index to get. - string index_id = 3; -} - -// The request for -// [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. -message ListIndexesRequest { - // Project ID against which to make the request. - string project_id = 1; - - string filter = 3; - - // The maximum number of items to return. If zero, then all results will be - // returned. - int32 page_size = 4; - - // The next_page_token value returned from a previous List request, if any. - string page_token = 5; -} - -// The response for -// [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. -message ListIndexesResponse { - // The indexes. - repeated Index indexes = 1; - - // The standard List next-page token. - string next_page_token = 2; -} - -// Metadata for Index operations. -message IndexOperationMetadata { - // Metadata common to all Datastore Admin operations. - CommonMetadata common = 1; - - // An estimate of the number of entities processed. - Progress progress_entities = 2; - - // The index resource ID that this operation is acting on. - string index_id = 3; -} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index.proto b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index.proto deleted file mode 100644 index 96c2278b3b63..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/proto/index.proto +++ /dev/null @@ -1,115 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.datastore.admin.v1; - -import "google/api/field_behavior.proto"; -import "google/api/annotations.proto"; - -option csharp_namespace = "Google.Cloud.Datastore.Admin.V1"; -option go_package = "google.golang.org/genproto/googleapis/datastore/admin/v1;admin"; -option java_multiple_files = true; -option java_outer_classname = "IndexProto"; -option java_package = "com.google.datastore.admin.v1"; -option ruby_package = "Google::Cloud::Datastore::Admin::V1"; - -// A minimal index definition. -message Index { - // A property of an index. - message IndexedProperty { - // Required. The property name to index. - string name = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The indexed property's direction. Must not be DIRECTION_UNSPECIFIED. - Direction direction = 2 [(google.api.field_behavior) = REQUIRED]; - } - - // For an ordered index, specifies whether each of the entity's ancestors - // will be included. - enum AncestorMode { - // The ancestor mode is unspecified. - ANCESTOR_MODE_UNSPECIFIED = 0; - - // Do not include the entity's ancestors in the index. - NONE = 1; - - // Include all the entity's ancestors in the index. - ALL_ANCESTORS = 2; - } - - // The direction determines how a property is indexed. - enum Direction { - // The direction is unspecified. - DIRECTION_UNSPECIFIED = 0; - - // The property's values are indexed so as to support sequencing in - // ascending order and also query by <, >, <=, >=, and =. - ASCENDING = 1; - - // The property's values are indexed so as to support sequencing in - // descending order and also query by <, >, <=, >=, and =. - DESCENDING = 2; - } - - // The possible set of states of an index. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The index is being created, and cannot be used by queries. - // There is an active long-running operation for the index. - // The index is updated when writing an entity. - // Some index data may exist. - CREATING = 1; - - // The index is ready to be used. - // The index is updated when writing an entity. - // The index is fully populated from all stored entities it applies to. - READY = 2; - - // The index is being deleted, and cannot be used by queries. - // There is an active long-running operation for the index. - // The index is not updated when writing an entity. - // Some index data may exist. - DELETING = 3; - - // The index was being created or deleted, but something went wrong. - // The index cannot by used by queries. - // There is no active long-running operation for the index, - // and the most recently finished long-running operation failed. - // The index is not updated when writing an entity. - // Some index data may exist. - ERROR = 4; - } - - // Output only. Project ID. - string project_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The resource ID of the index. - string index_id = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Required. The entity kind to which this index applies. - string kind = 4 [(google.api.field_behavior) = REQUIRED]; - - // Required. The index's ancestor mode. Must not be ANCESTOR_MODE_UNSPECIFIED. - AncestorMode ancestor = 5 [(google.api.field_behavior) = REQUIRED]; - - // Required. An ordered sequence of property names and their index attributes. - repeated IndexedProperty properties = 6 [(google.api.field_behavior) = REQUIRED]; - - // Output only. The state of the index. - State state = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; -} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore.proto b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore.proto deleted file mode 100644 index ad016194ab2d..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/datastore.proto +++ /dev/null @@ -1,410 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.datastore.v1; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/datastore/v1/entity.proto"; -import "google/datastore/v1/query.proto"; - -option csharp_namespace = "Google.Cloud.Datastore.V1"; -option go_package = "google.golang.org/genproto/googleapis/datastore/v1;datastore"; -option java_multiple_files = true; -option java_outer_classname = "DatastoreProto"; -option java_package = "com.google.datastore.v1"; -option php_namespace = "Google\\Cloud\\Datastore\\V1"; -option ruby_package = "Google::Cloud::Datastore::V1"; - -// Each RPC normalizes the partition IDs of the keys in its input entities, -// and always returns entities with keys with normalized partition IDs. -// This applies to all keys and entities, including those in values, except keys -// with both an empty path and an empty or unset partition ID. Normalization of -// input keys sets the project ID (if not already set) to the project ID from -// the request. -// -service Datastore { - option (google.api.default_host) = "datastore.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-platform," - "https://www.googleapis.com/auth/datastore"; - - // Looks up entities by key. - rpc Lookup(LookupRequest) returns (LookupResponse) { - option (google.api.http) = { - post: "/v1/projects/{project_id}:lookup" - body: "*" - }; - option (google.api.method_signature) = "project_id,read_options,keys"; - } - - // Queries for entities. - rpc RunQuery(RunQueryRequest) returns (RunQueryResponse) { - option (google.api.http) = { - post: "/v1/projects/{project_id}:runQuery" - body: "*" - }; - } - - // Begins a new transaction. - rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) { - option (google.api.http) = { - post: "/v1/projects/{project_id}:beginTransaction" - body: "*" - }; - option (google.api.method_signature) = "project_id"; - } - - // Commits a transaction, optionally creating, deleting or modifying some - // entities. - rpc Commit(CommitRequest) returns (CommitResponse) { - option (google.api.http) = { - post: "/v1/projects/{project_id}:commit" - body: "*" - }; - option (google.api.method_signature) = "project_id,mode,transaction,mutations"; - option (google.api.method_signature) = "project_id,mode,mutations"; - } - - // Rolls back a transaction. - rpc Rollback(RollbackRequest) returns (RollbackResponse) { - option (google.api.http) = { - post: "/v1/projects/{project_id}:rollback" - body: "*" - }; - option (google.api.method_signature) = "project_id,transaction"; - } - - // Allocates IDs for the given keys, which is useful for referencing an entity - // before it is inserted. - rpc AllocateIds(AllocateIdsRequest) returns (AllocateIdsResponse) { - option (google.api.http) = { - post: "/v1/projects/{project_id}:allocateIds" - body: "*" - }; - option (google.api.method_signature) = "project_id,keys"; - } - - // Prevents the supplied keys' IDs from being auto-allocated by Cloud - // Datastore. - rpc ReserveIds(ReserveIdsRequest) returns (ReserveIdsResponse) { - option (google.api.http) = { - post: "/v1/projects/{project_id}:reserveIds" - body: "*" - }; - option (google.api.method_signature) = "project_id,keys"; - } -} - -// The request for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. -message LookupRequest { - // Required. The ID of the project against which to make the request. - string project_id = 8 [(google.api.field_behavior) = REQUIRED]; - - // The options for this lookup request. - ReadOptions read_options = 1; - - // Required. Keys of entities to look up. - repeated Key keys = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// The response for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. -message LookupResponse { - // Entities found as `ResultType.FULL` entities. The order of results in this - // field is undefined and has no relation to the order of the keys in the - // input. - repeated EntityResult found = 1; - - // Entities not found as `ResultType.KEY_ONLY` entities. The order of results - // in this field is undefined and has no relation to the order of the keys - // in the input. - repeated EntityResult missing = 2; - - // A list of keys that were not looked up due to resource constraints. The - // order of results in this field is undefined and has no relation to the - // order of the keys in the input. - repeated Key deferred = 3; -} - -// The request for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. -message RunQueryRequest { - // Required. The ID of the project against which to make the request. - string project_id = 8 [(google.api.field_behavior) = REQUIRED]; - - // Entities are partitioned into subsets, identified by a partition ID. - // Queries are scoped to a single partition. - // This partition ID is normalized with the standard default context - // partition ID. - PartitionId partition_id = 2; - - // The options for this query. - ReadOptions read_options = 1; - - // The type of query. - oneof query_type { - // The query to run. - Query query = 3; - - // The GQL query to run. - GqlQuery gql_query = 7; - } -} - -// The response for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. -message RunQueryResponse { - // A batch of query results (always present). - QueryResultBatch batch = 1; - - // The parsed form of the `GqlQuery` from the request, if it was set. - Query query = 2; -} - -// The request for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. -message BeginTransactionRequest { - // Required. The ID of the project against which to make the request. - string project_id = 8 [(google.api.field_behavior) = REQUIRED]; - - // Options for a new transaction. - TransactionOptions transaction_options = 10; -} - -// The response for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. -message BeginTransactionResponse { - // The transaction identifier (always present). - bytes transaction = 1; -} - -// The request for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. -message RollbackRequest { - // Required. The ID of the project against which to make the request. - string project_id = 8 [(google.api.field_behavior) = REQUIRED]; - - // Required. The transaction identifier, returned by a call to - // [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - bytes transaction = 1 [(google.api.field_behavior) = REQUIRED]; -} - -// The response for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. -// (an empty message). -message RollbackResponse { - -} - -// The request for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. -message CommitRequest { - // The modes available for commits. - enum Mode { - // Unspecified. This value must not be used. - MODE_UNSPECIFIED = 0; - - // Transactional: The mutations are either all applied, or none are applied. - // Learn about transactions - // [here](https://cloud.google.com/datastore/docs/concepts/transactions). - TRANSACTIONAL = 1; - - // Non-transactional: The mutations may not apply as all or none. - NON_TRANSACTIONAL = 2; - } - - // Required. The ID of the project against which to make the request. - string project_id = 8 [(google.api.field_behavior) = REQUIRED]; - - // The type of commit to perform. Defaults to `TRANSACTIONAL`. - Mode mode = 5; - - // Must be set when mode is `TRANSACTIONAL`. - oneof transaction_selector { - // The identifier of the transaction associated with the commit. A - // transaction identifier is returned by a call to - // [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - bytes transaction = 1; - } - - // The mutations to perform. - // - // When mode is `TRANSACTIONAL`, mutations affecting a single entity are - // applied in order. The following sequences of mutations affecting a single - // entity are not permitted in a single `Commit` request: - // - // - `insert` followed by `insert` - // - `update` followed by `insert` - // - `upsert` followed by `insert` - // - `delete` followed by `update` - // - // When mode is `NON_TRANSACTIONAL`, no two mutations may affect a single - // entity. - repeated Mutation mutations = 6; -} - -// The response for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. -message CommitResponse { - // The result of performing the mutations. - // The i-th mutation result corresponds to the i-th mutation in the request. - repeated MutationResult mutation_results = 3; - - // The number of index entries updated during the commit, or zero if none were - // updated. - int32 index_updates = 4; -} - -// The request for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. -message AllocateIdsRequest { - // Required. The ID of the project against which to make the request. - string project_id = 8 [(google.api.field_behavior) = REQUIRED]; - - // Required. A list of keys with incomplete key paths for which to allocate IDs. - // No key may be reserved/read-only. - repeated Key keys = 1 [(google.api.field_behavior) = REQUIRED]; -} - -// The response for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. -message AllocateIdsResponse { - // The keys specified in the request (in the same order), each with - // its key path completed with a newly allocated ID. - repeated Key keys = 1; -} - -// The request for [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. -message ReserveIdsRequest { - // Required. The ID of the project against which to make the request. - string project_id = 8 [(google.api.field_behavior) = REQUIRED]; - - // If not empty, the ID of the database against which to make the request. - string database_id = 9; - - // Required. A list of keys with complete key paths whose numeric IDs should not be - // auto-allocated. - repeated Key keys = 1 [(google.api.field_behavior) = REQUIRED]; -} - -// The response for [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. -message ReserveIdsResponse { - -} - -// A mutation to apply to an entity. -message Mutation { - // The mutation operation. - // - // For `insert`, `update`, and `upsert`: - // - The entity's key must not be reserved/read-only. - // - No property in the entity may have a reserved name, - // not even a property in an entity in a value. - // - No value in the entity may have meaning 18, - // not even a value in an entity in another value. - oneof operation { - // The entity to insert. The entity must not already exist. - // The entity key's final path element may be incomplete. - Entity insert = 4; - - // The entity to update. The entity must already exist. - // Must have a complete key path. - Entity update = 5; - - // The entity to upsert. The entity may or may not already exist. - // The entity key's final path element may be incomplete. - Entity upsert = 6; - - // The key of the entity to delete. The entity may or may not already exist. - // Must have a complete key path and must not be reserved/read-only. - Key delete = 7; - } - - // When set, the server will detect whether or not this mutation conflicts - // with the current version of the entity on the server. Conflicting mutations - // are not applied, and are marked as such in MutationResult. - oneof conflict_detection_strategy { - // The version of the entity that this mutation is being applied to. If this - // does not match the current version on the server, the mutation conflicts. - int64 base_version = 8; - } -} - -// The result of applying a mutation. -message MutationResult { - // The automatically allocated key. - // Set only when the mutation allocated a key. - Key key = 3; - - // The version of the entity on the server after processing the mutation. If - // the mutation doesn't change anything on the server, then the version will - // be the version of the current entity or, if no entity is present, a version - // that is strictly greater than the version of any previous entity and less - // than the version of any possible future entity. - int64 version = 4; - - // Whether a conflict was detected for this mutation. Always false when a - // conflict detection strategy field is not set in the mutation. - bool conflict_detected = 5; -} - -// The options shared by read requests. -message ReadOptions { - // The possible values for read consistencies. - enum ReadConsistency { - // Unspecified. This value must not be used. - READ_CONSISTENCY_UNSPECIFIED = 0; - - // Strong consistency. - STRONG = 1; - - // Eventual consistency. - EVENTUAL = 2; - } - - // If not specified, lookups and ancestor queries default to - // `read_consistency`=`STRONG`, global queries default to - // `read_consistency`=`EVENTUAL`. - oneof consistency_type { - // The non-transactional read consistency to use. - // Cannot be set to `STRONG` for global queries. - ReadConsistency read_consistency = 1; - - // The identifier of the transaction in which to read. A - // transaction identifier is returned by a call to - // [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - bytes transaction = 2; - } -} - -// Options for beginning a new transaction. -// -// Transactions can be created explicitly with calls to -// [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction] or implicitly by setting -// [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] in read requests. -message TransactionOptions { - // Options specific to read / write transactions. - message ReadWrite { - // The transaction identifier of the transaction being retried. - bytes previous_transaction = 1; - } - - // Options specific to read-only transactions. - message ReadOnly { - - } - - // The `mode` of the transaction, indicating whether write operations are - // supported. - oneof mode { - // The transaction should allow both reads and writes. - ReadWrite read_write = 1; - - // The transaction should only allow reads. - ReadOnly read_only = 2; - } -} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity.proto b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity.proto deleted file mode 100644 index 61286cd7a2c0..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/entity.proto +++ /dev/null @@ -1,205 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.datastore.v1; - -import "google/api/annotations.proto"; -import "google/protobuf/struct.proto"; -import "google/protobuf/timestamp.proto"; -import "google/type/latlng.proto"; - -option csharp_namespace = "Google.Cloud.Datastore.V1"; -option go_package = "google.golang.org/genproto/googleapis/datastore/v1;datastore"; -option java_multiple_files = true; -option java_outer_classname = "EntityProto"; -option java_package = "com.google.datastore.v1"; -option php_namespace = "Google\\Cloud\\Datastore\\V1"; -option ruby_package = "Google::Cloud::Datastore::V1"; - -// A partition ID identifies a grouping of entities. The grouping is always -// by project and namespace, however the namespace ID may be empty. -// -// A partition ID contains several dimensions: -// project ID and namespace ID. -// -// Partition dimensions: -// -// - May be `""`. -// - Must be valid UTF-8 bytes. -// - Must have values that match regex `[A-Za-z\d\.\-_]{1,100}` -// If the value of any dimension matches regex `__.*__`, the partition is -// reserved/read-only. -// A reserved/read-only partition ID is forbidden in certain documented -// contexts. -// -// Foreign partition IDs (in which the project ID does -// not match the context project ID ) are discouraged. -// Reads and writes of foreign partition IDs may fail if the project is not in -// an active state. -message PartitionId { - // The ID of the project to which the entities belong. - string project_id = 2; - - // If not empty, the ID of the namespace to which the entities belong. - string namespace_id = 4; -} - -// A unique identifier for an entity. -// If a key's partition ID or any of its path kinds or names are -// reserved/read-only, the key is reserved/read-only. -// A reserved/read-only key is forbidden in certain documented contexts. -message Key { - // A (kind, ID/name) pair used to construct a key path. - // - // If either name or ID is set, the element is complete. - // If neither is set, the element is incomplete. - message PathElement { - // The kind of the entity. - // A kind matching regex `__.*__` is reserved/read-only. - // A kind must not contain more than 1500 bytes when UTF-8 encoded. - // Cannot be `""`. - string kind = 1; - - // The type of ID. - oneof id_type { - // The auto-allocated ID of the entity. - // Never equal to zero. Values less than zero are discouraged and may not - // be supported in the future. - int64 id = 2; - - // The name of the entity. - // A name matching regex `__.*__` is reserved/read-only. - // A name must not be more than 1500 bytes when UTF-8 encoded. - // Cannot be `""`. - string name = 3; - } - } - - // Entities are partitioned into subsets, currently identified by a project - // ID and namespace ID. - // Queries are scoped to a single partition. - PartitionId partition_id = 1; - - // The entity path. - // An entity path consists of one or more elements composed of a kind and a - // string or numerical identifier, which identify entities. The first - // element identifies a _root entity_, the second element identifies - // a _child_ of the root entity, the third element identifies a child of the - // second entity, and so forth. The entities identified by all prefixes of - // the path are called the element's _ancestors_. - // - // An entity path is always fully complete: *all* of the entity's ancestors - // are required to be in the path along with the entity identifier itself. - // The only exception is that in some documented cases, the identifier in the - // last path element (for the entity) itself may be omitted. For example, - // the last path element of the key of `Mutation.insert` may have no - // identifier. - // - // A path can never be empty, and a path can have at most 100 elements. - repeated PathElement path = 2; -} - -// An array value. -message ArrayValue { - // Values in the array. - // The order of values in an array is preserved as long as all values have - // identical settings for 'exclude_from_indexes'. - repeated Value values = 1; -} - -// A message that can hold any of the supported value types and associated -// metadata. -message Value { - // Must have a value set. - oneof value_type { - // A null value. - google.protobuf.NullValue null_value = 11; - - // A boolean value. - bool boolean_value = 1; - - // An integer value. - int64 integer_value = 2; - - // A double value. - double double_value = 3; - - // A timestamp value. - // When stored in the Datastore, precise only to microseconds; - // any additional precision is rounded down. - google.protobuf.Timestamp timestamp_value = 10; - - // A key value. - Key key_value = 5; - - // A UTF-8 encoded string value. - // When `exclude_from_indexes` is false (it is indexed), may have at most - // 1500 bytes. Otherwise, may be set to at most 1,000,000 bytes. - string string_value = 17; - - // A blob value. - // May have at most 1,000,000 bytes. - // When `exclude_from_indexes` is false, may have at most 1500 bytes. - // In JSON requests, must be base64-encoded. - bytes blob_value = 18; - - // A geo point value representing a point on the surface of Earth. - google.type.LatLng geo_point_value = 8; - - // An entity value. - // - // - May have no key. - // - May have a key with an incomplete key path. - // - May have a reserved/read-only key. - Entity entity_value = 6; - - // An array value. - // Cannot contain another array value. - // A `Value` instance that sets field `array_value` must not set fields - // `meaning` or `exclude_from_indexes`. - ArrayValue array_value = 9; - } - - // The `meaning` field should only be populated for backwards compatibility. - int32 meaning = 14; - - // If the value should be excluded from all indexes including those defined - // explicitly. - bool exclude_from_indexes = 19; -} - -// A Datastore data object. -// -// An entity is limited to 1 megabyte when stored. That _roughly_ -// corresponds to a limit of 1 megabyte for the serialized form of this -// message. -message Entity { - // The entity's key. - // - // An entity must have a key, unless otherwise documented (for example, - // an entity in `Value.entity_value` may have no key). - // An entity's kind is its key path's last element's kind, - // or null if it has no key. - Key key = 1; - - // The entity's properties. - // The map's keys are property names. - // A property name matching regex `__.*__` is reserved. - // A reserved property name is forbidden in certain documented contexts. - // The name must not contain more than 500 characters. - // The name cannot be `""`. - map properties = 3; -} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query.proto b/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query.proto deleted file mode 100644 index 4cb3ef99b839..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/proto/query.proto +++ /dev/null @@ -1,313 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.datastore.v1; - -import "google/api/annotations.proto"; -import "google/datastore/v1/entity.proto"; -import "google/protobuf/wrappers.proto"; -import "google/type/latlng.proto"; - -option csharp_namespace = "Google.Cloud.Datastore.V1"; -option go_package = "google.golang.org/genproto/googleapis/datastore/v1;datastore"; -option java_multiple_files = true; -option java_outer_classname = "QueryProto"; -option java_package = "com.google.datastore.v1"; -option php_namespace = "Google\\Cloud\\Datastore\\V1"; -option ruby_package = "Google::Cloud::Datastore::V1"; - -// The result of fetching an entity from Datastore. -message EntityResult { - // Specifies what data the 'entity' field contains. - // A `ResultType` is either implied (for example, in `LookupResponse.missing` - // from `datastore.proto`, it is always `KEY_ONLY`) or specified by context - // (for example, in message `QueryResultBatch`, field `entity_result_type` - // specifies a `ResultType` for all the values in field `entity_results`). - enum ResultType { - // Unspecified. This value is never used. - RESULT_TYPE_UNSPECIFIED = 0; - - // The key and properties. - FULL = 1; - - // A projected subset of properties. The entity may have no key. - PROJECTION = 2; - - // Only the key. - KEY_ONLY = 3; - } - - // The resulting entity. - Entity entity = 1; - - // The version of the entity, a strictly positive number that monotonically - // increases with changes to the entity. - // - // This field is set for [`FULL`][google.datastore.v1.EntityResult.ResultType.FULL] entity - // results. - // - // For [missing][google.datastore.v1.LookupResponse.missing] entities in `LookupResponse`, this - // is the version of the snapshot that was used to look up the entity, and it - // is always set except for eventually consistent reads. - int64 version = 4; - - // A cursor that points to the position after the result entity. - // Set only when the `EntityResult` is part of a `QueryResultBatch` message. - bytes cursor = 3; -} - -// A query for entities. -message Query { - // The projection to return. Defaults to returning all properties. - repeated Projection projection = 2; - - // The kinds to query (if empty, returns entities of all kinds). - // Currently at most 1 kind may be specified. - repeated KindExpression kind = 3; - - // The filter to apply. - Filter filter = 4; - - // The order to apply to the query results (if empty, order is unspecified). - repeated PropertyOrder order = 5; - - // The properties to make distinct. The query results will contain the first - // result for each distinct combination of values for the given properties - // (if empty, all results are returned). - repeated PropertyReference distinct_on = 6; - - // A starting point for the query results. Query cursors are - // returned in query result batches and - // [can only be used to continue the same - // query](https://cloud.google.com/datastore/docs/concepts/queries#cursors_limits_and_offsets). - bytes start_cursor = 7; - - // An ending point for the query results. Query cursors are - // returned in query result batches and - // [can only be used to limit the same - // query](https://cloud.google.com/datastore/docs/concepts/queries#cursors_limits_and_offsets). - bytes end_cursor = 8; - - // The number of results to skip. Applies before limit, but after all other - // constraints. Optional. Must be >= 0 if specified. - int32 offset = 10; - - // The maximum number of results to return. Applies after all other - // constraints. Optional. - // Unspecified is interpreted as no limit. - // Must be >= 0 if specified. - google.protobuf.Int32Value limit = 12; -} - -// A representation of a kind. -message KindExpression { - // The name of the kind. - string name = 1; -} - -// A reference to a property relative to the kind expressions. -message PropertyReference { - // The name of the property. - // If name includes "."s, it may be interpreted as a property name path. - string name = 2; -} - -// A representation of a property in a projection. -message Projection { - // The property to project. - PropertyReference property = 1; -} - -// The desired order for a specific property. -message PropertyOrder { - // The sort direction. - enum Direction { - // Unspecified. This value must not be used. - DIRECTION_UNSPECIFIED = 0; - - // Ascending. - ASCENDING = 1; - - // Descending. - DESCENDING = 2; - } - - // The property to order by. - PropertyReference property = 1; - - // The direction to order by. Defaults to `ASCENDING`. - Direction direction = 2; -} - -// A holder for any type of filter. -message Filter { - // The type of filter. - oneof filter_type { - // A composite filter. - CompositeFilter composite_filter = 1; - - // A filter on a property. - PropertyFilter property_filter = 2; - } -} - -// A filter that merges multiple other filters using the given operator. -message CompositeFilter { - // A composite filter operator. - enum Operator { - // Unspecified. This value must not be used. - OPERATOR_UNSPECIFIED = 0; - - // The results are required to satisfy each of the combined filters. - AND = 1; - } - - // The operator for combining multiple filters. - Operator op = 1; - - // The list of filters to combine. - // Must contain at least one filter. - repeated Filter filters = 2; -} - -// A filter on a specific property. -message PropertyFilter { - // A property filter operator. - enum Operator { - // Unspecified. This value must not be used. - OPERATOR_UNSPECIFIED = 0; - - // Less than. - LESS_THAN = 1; - - // Less than or equal. - LESS_THAN_OR_EQUAL = 2; - - // Greater than. - GREATER_THAN = 3; - - // Greater than or equal. - GREATER_THAN_OR_EQUAL = 4; - - // Equal. - EQUAL = 5; - - // Has ancestor. - HAS_ANCESTOR = 11; - } - - // The property to filter by. - PropertyReference property = 1; - - // The operator to filter by. - Operator op = 2; - - // The value to compare the property to. - Value value = 3; -} - -// A [GQL -// query](https://cloud.google.com/datastore/docs/apis/gql/gql_reference). -message GqlQuery { - // A string of the format described - // [here](https://cloud.google.com/datastore/docs/apis/gql/gql_reference). - string query_string = 1; - - // When false, the query string must not contain any literals and instead must - // bind all values. For example, - // `SELECT * FROM Kind WHERE a = 'string literal'` is not allowed, while - // `SELECT * FROM Kind WHERE a = @value` is. - bool allow_literals = 2; - - // For each non-reserved named binding site in the query string, there must be - // a named parameter with that name, but not necessarily the inverse. - // - // Key must match regex `[A-Za-z_$][A-Za-z_$0-9]*`, must not match regex - // `__.*__`, and must not be `""`. - map named_bindings = 5; - - // Numbered binding site @1 references the first numbered parameter, - // effectively using 1-based indexing, rather than the usual 0. - // - // For each binding site numbered i in `query_string`, there must be an i-th - // numbered parameter. The inverse must also be true. - repeated GqlQueryParameter positional_bindings = 4; -} - -// A binding parameter for a GQL query. -message GqlQueryParameter { - // The type of parameter. - oneof parameter_type { - // A value parameter. - Value value = 2; - - // A query cursor. Query cursors are returned in query - // result batches. - bytes cursor = 3; - } -} - -// A batch of results produced by a query. -message QueryResultBatch { - // The possible values for the `more_results` field. - enum MoreResultsType { - // Unspecified. This value is never used. - MORE_RESULTS_TYPE_UNSPECIFIED = 0; - - // There may be additional batches to fetch from this query. - NOT_FINISHED = 1; - - // The query is finished, but there may be more results after the limit. - MORE_RESULTS_AFTER_LIMIT = 2; - - // The query is finished, but there may be more results after the end - // cursor. - MORE_RESULTS_AFTER_CURSOR = 4; - - // The query is finished, and there are no more results. - NO_MORE_RESULTS = 3; - } - - // The number of results skipped, typically because of an offset. - int32 skipped_results = 6; - - // A cursor that points to the position after the last skipped result. - // Will be set when `skipped_results` != 0. - bytes skipped_cursor = 3; - - // The result type for every entity in `entity_results`. - EntityResult.ResultType entity_result_type = 1; - - // The results for this batch. - repeated EntityResult entity_results = 2; - - // A cursor that points to the position after the last result in the batch. - bytes end_cursor = 4; - - // The state of the query after the current batch. - MoreResultsType more_results = 5; - - // The version number of the snapshot this batch was returned from. - // This applies to the range of results from the query's `start_cursor` (or - // the beginning of the query if no cursor was given) to this batch's - // `end_cursor` (not the query's `end_cursor`). - // - // In a single transaction, subsequent query result batches for the same query - // can have a greater snapshot version number. Each batch's snapshot version - // is valid for all preceding batches. - // The value will be zero for eventually consistent queries. - int64 snapshot_version = 7; -} From 740ee5ba88cf3f2a6bb4bb83a0b43f191df871f9 Mon Sep 17 00:00:00 2001 From: Craig Labenz Date: Tue, 25 May 2021 09:58:10 -0700 Subject: [PATCH 330/611] fix(perf): improve performance unmarshalling entities from protobuf2 (#175) Uses direct access of pb2 object when iterating over Entity.properties. This reduces the time required to hydrate query results by 50% --- .../google-cloud-datastore/google/cloud/datastore/helpers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index c1d022e32c6d..5627d8a332a8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -138,7 +138,7 @@ def entity_from_protobuf(pb): entity_meanings = {} exclude_from_indexes = [] - for prop_name, value_pb in _property_tuples(proto_pb): + for prop_name, value_pb in _property_tuples(proto_pb._pb): value = _get_value_from_value_pb(value_pb) entity_props[prop_name] = value @@ -154,7 +154,7 @@ def entity_from_protobuf(pb): if is_list and len(value) > 0: exclude_values = set( value_pb.exclude_from_indexes - for value_pb in value_pb._pb.array_value.values + for value_pb in value_pb.array_value.values ) if len(exclude_values) != 1: raise ValueError( From cd359b454e30b90bb05ea542b755071e34e5a9c7 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 25 May 2021 17:08:10 +0000 Subject: [PATCH 331/611] chore: release 2.1.3 (#176) :robot: I have created a release \*beep\* \*boop\* --- ### [2.1.3](https://www.github.com/googleapis/python-datastore/compare/v2.1.2...v2.1.3) (2021-05-25) ### Bug Fixes * **perf:** improve performance unmarshalling entities from protobuf2 ([#175](https://www.github.com/googleapis/python-datastore/issues/175)) ([0e5b718](https://www.github.com/googleapis/python-datastore/commit/0e5b718a70368f656ede3a27174ef74ca324ab65)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index df56b8e534b2..880add6efd35 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +### [2.1.3](https://www.github.com/googleapis/python-datastore/compare/v2.1.2...v2.1.3) (2021-05-25) + + +### Bug Fixes + +* **perf:** improve performance unmarshalling entities from protobuf2 ([#175](https://www.github.com/googleapis/python-datastore/issues/175)) ([0e5b718](https://www.github.com/googleapis/python-datastore/commit/0e5b718a70368f656ede3a27174ef74ca324ab65)) + ### [2.1.2](https://www.github.com/googleapis/python-datastore/compare/v2.1.1...v2.1.2) (2021-05-03) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index b6c2aa1a1627..28ad2844bab7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.1.2" +__version__ = "2.1.3" From fa564a92141ca8cc068e3310852ad64c2d58f637 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 7 Jun 2021 15:56:57 -0400 Subject: [PATCH 332/611] chore: migrate to owl bot (#170) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: migrate to owl bot * chore: use the latest post processor image * run the post processor * remove synth.py and synth.metadata * attempt to fix coverage failure * chore: raise coverage requirements to 100 * 🦉 Updates from OwlBot Co-authored-by: Bu Sun Kim Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.flake8 | 1 + .../.github/.OwlBot.lock.yaml | 4 + .../.github/.OwlBot.yaml | 28 ++++ .../.github/header-checker-lint.yml | 15 ++ packages/google-cloud-datastore/.gitignore | 4 +- .../google-cloud-datastore/.kokoro/build.sh | 26 ++- .../.kokoro/docs/docs-presubmit.cfg | 11 ++ .../google-cloud-datastore/.kokoro/release.sh | 4 +- .../.kokoro/release/common.cfg | 14 +- .../samples/python3.6/periodic-head.cfg | 11 ++ .../samples/python3.7/periodic-head.cfg | 11 ++ .../samples/python3.8/periodic-head.cfg | 11 ++ .../.kokoro/test-samples-against-head.sh | 28 ++++ .../.kokoro/test-samples-impl.sh | 102 ++++++++++++ .../.kokoro/test-samples.sh | 96 ++--------- .../.kokoro/trampoline_v2.sh | 2 +- .../.pre-commit-config.yaml | 31 ++++ packages/google-cloud-datastore/.trampolinerc | 1 + .../google-cloud-datastore/CONTRIBUTING.rst | 59 ++++--- packages/google-cloud-datastore/LICENSE | 7 +- packages/google-cloud-datastore/MANIFEST.in | 4 +- .../docs/_static/custom.css | 18 ++- packages/google-cloud-datastore/docs/conf.py | 13 ++ packages/google-cloud-datastore/noxfile.py | 85 ++++++---- .../{synth.py => owlbot.py} | 125 +++++++-------- packages/google-cloud-datastore/renovate.json | 6 +- .../google-cloud-datastore/synth.metadata | 149 ------------------ .../testing/constraints-3.6.txt | 3 +- 28 files changed, 491 insertions(+), 378 deletions(-) create mode 100644 packages/google-cloud-datastore/.github/.OwlBot.lock.yaml create mode 100644 packages/google-cloud-datastore/.github/.OwlBot.yaml create mode 100644 packages/google-cloud-datastore/.github/header-checker-lint.yml create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic-head.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic-head.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic-head.cfg create mode 100755 packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh create mode 100755 packages/google-cloud-datastore/.kokoro/test-samples-impl.sh create mode 100644 packages/google-cloud-datastore/.pre-commit-config.yaml rename packages/google-cloud-datastore/{synth.py => owlbot.py} (60%) delete mode 100644 packages/google-cloud-datastore/synth.metadata diff --git a/packages/google-cloud-datastore/.flake8 b/packages/google-cloud-datastore/.flake8 index ed9316381c9c..29227d4cf419 100644 --- a/packages/google-cloud-datastore/.flake8 +++ b/packages/google-cloud-datastore/.flake8 @@ -26,6 +26,7 @@ exclude = *_pb2.py # Standard linting exemptions. + **/.nox/** __pycache__, .git, *.pyc, diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml new file mode 100644 index 000000000000..8051d189541c --- /dev/null +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -0,0 +1,4 @@ +docker: + digest: sha256:0856ca711da1fd5ec9d6d7da6c50aa0bbf550fb94acb47b55159a640791987bf + image: gcr.io/repo-automation-bots/owlbot-python:latest + diff --git a/packages/google-cloud-datastore/.github/.OwlBot.yaml b/packages/google-cloud-datastore/.github/.OwlBot.yaml new file mode 100644 index 000000000000..83c15486953f --- /dev/null +++ b/packages/google-cloud-datastore/.github/.OwlBot.yaml @@ -0,0 +1,28 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/repo-automation-bots/owlbot-python:latest + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/datastore/(v.*)/.*-py/(.*) + dest: /owl-bot-staging/datastore/$1/$2 + - source: /google/datastore/admin/(v.*)/.*-py/(.*) + dest: /owl-bot-staging/datastore_admin/$1/$2 + +begin-after-commit-hash: de97bb0aeade880aba2cd71a55c06dbc4cd2b337 + diff --git a/packages/google-cloud-datastore/.github/header-checker-lint.yml b/packages/google-cloud-datastore/.github/header-checker-lint.yml new file mode 100644 index 000000000000..6fe78aa7987a --- /dev/null +++ b/packages/google-cloud-datastore/.github/header-checker-lint.yml @@ -0,0 +1,15 @@ +{"allowedCopyrightHolders": ["Google LLC"], + "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], + "sourceFileExtensions": [ + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", + "py", + "html", + "txt" + ] +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.gitignore b/packages/google-cloud-datastore/.gitignore index b9daa52f118d..b4243ced74e4 100644 --- a/packages/google-cloud-datastore/.gitignore +++ b/packages/google-cloud-datastore/.gitignore @@ -50,8 +50,10 @@ docs.metadata # Virtual environment env/ + +# Test logs coverage.xml -sponge_log.xml +*sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/packages/google-cloud-datastore/.kokoro/build.sh b/packages/google-cloud-datastore/.kokoro/build.sh index df77f9793578..59bd450ec1d8 100755 --- a/packages/google-cloud-datastore/.kokoro/build.sh +++ b/packages/google-cloud-datastore/.kokoro/build.sh @@ -15,7 +15,11 @@ set -eo pipefail -cd github/python-datastore +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/python-datastore" +fi + +cd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -30,16 +34,26 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") # Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +python3 -m pip uninstall --yes --quiet nox-automation # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --upgrade --quiet nox +python3 -m nox --version + +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3.6 -m nox -s "${NOX_SESSION:-}" + python3 -m nox -s ${NOX_SESSION:-} else - python3.6 -m nox + python3 -m nox fi diff --git a/packages/google-cloud-datastore/.kokoro/docs/docs-presubmit.cfg b/packages/google-cloud-datastore/.kokoro/docs/docs-presubmit.cfg index 1118107829b7..049a9863541a 100644 --- a/packages/google-cloud-datastore/.kokoro/docs/docs-presubmit.cfg +++ b/packages/google-cloud-datastore/.kokoro/docs/docs-presubmit.cfg @@ -15,3 +15,14 @@ env_vars: { key: "TRAMPOLINE_IMAGE_UPLOAD" value: "false" } + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/build.sh" +} + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "docs docfx" +} diff --git a/packages/google-cloud-datastore/.kokoro/release.sh b/packages/google-cloud-datastore/.kokoro/release.sh index 95282f08e11f..c593dd9dabfd 100755 --- a/packages/google-cloud-datastore/.kokoro/release.sh +++ b/packages/google-cloud-datastore/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password") +TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") cd github/python-datastore python3 setup.py sdist bdist_wheel -twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/* +twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/google-cloud-datastore/.kokoro/release/common.cfg b/packages/google-cloud-datastore/.kokoro/release/common.cfg index b4f0c6ad18f6..8571f251b63b 100644 --- a/packages/google-cloud-datastore/.kokoro/release/common.cfg +++ b/packages/google-cloud-datastore/.kokoro/release/common.cfg @@ -23,18 +23,8 @@ env_vars: { value: "github/python-datastore/.kokoro/release.sh" } -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} \ No newline at end of file + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" +} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh b/packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh new file mode 100755 index 000000000000..7503e7624993 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A customized test runner for samples. +# +# For periodic builds, you can specify this file for testing against head. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-datastore + +exec .kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh b/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh new file mode 100755 index 000000000000..cf5de74c17a5 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/packages/google-cloud-datastore/.kokoro/test-samples.sh b/packages/google-cloud-datastore/.kokoro/test-samples.sh index c4163d795804..b3014bb9cdf4 100755 --- a/packages/google-cloud-datastore/.kokoro/test-samples.sh +++ b/packages/google-cloud-datastore/.kokoro/test-samples.sh @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# The default test runner for samples. +# +# For periodic builds, we rewinds the repo to the latest release, and +# run test-samples-impl.sh. # `-e` enables the script to automatically fail when a command fails # `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero @@ -24,87 +28,19 @@ cd github/python-datastore # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + # preserving the test runner implementation. + cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + echo "Now we rewind the repo back to the latest release..." LATEST_RELEASE=$(git describe --abbrev=0 --tags) git checkout $LATEST_RELEASE -fi - -# Exit early if samples directory doesn't exist -if [ ! -d "./samples" ]; then - echo "No tests run. `./samples` not found" - exit 0 -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -python3.6 -m pip install --upgrade --quiet nox - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the Build Cop Bot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop - $KOKORO_GFILE_DIR/linux_amd64/buildcop + echo "The current head is: " + echo $(git rev-parse --verify HEAD) + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + # move back the test runner implementation if there's no file. + if [ ! -f .kokoro/test-samples-impl.sh ]; then + cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh fi +fi - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" +exec .kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh b/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh index 719bcd5ba84d..4af6cdc26dbc 100755 --- a/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh +++ b/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh @@ -159,7 +159,7 @@ if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then "KOKORO_GITHUB_COMMIT" "KOKORO_GITHUB_PULL_REQUEST_NUMBER" "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For Build Cop Bot + # For FlakyBot "KOKORO_GITHUB_COMMIT_URL" "KOKORO_GITHUB_PULL_REQUEST_URL" ) diff --git a/packages/google-cloud-datastore/.pre-commit-config.yaml b/packages/google-cloud-datastore/.pre-commit-config.yaml new file mode 100644 index 000000000000..4f00c7cffcfd --- /dev/null +++ b/packages/google-cloud-datastore/.pre-commit-config.yaml @@ -0,0 +1,31 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.4.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml +- repo: https://github.com/psf/black + rev: 19.10b0 + hooks: + - id: black +- repo: https://gitlab.com/pycqa/flake8 + rev: 3.9.2 + hooks: + - id: flake8 diff --git a/packages/google-cloud-datastore/.trampolinerc b/packages/google-cloud-datastore/.trampolinerc index 995ee29111e1..383b6ec89fbc 100644 --- a/packages/google-cloud-datastore/.trampolinerc +++ b/packages/google-cloud-datastore/.trampolinerc @@ -24,6 +24,7 @@ required_envvars+=( pass_down_envvars+=( "STAGING_BUCKET" "V2_STAGING_BUCKET" + "NOX_SESSION" ) # Prevent unintentional override on the default image. diff --git a/packages/google-cloud-datastore/CONTRIBUTING.rst b/packages/google-cloud-datastore/CONTRIBUTING.rst index 2002603b3c7d..0f183f3c77a0 100644 --- a/packages/google-cloud-datastore/CONTRIBUTING.rst +++ b/packages/google-cloud-datastore/CONTRIBUTING.rst @@ -21,8 +21,8 @@ In order to add a feature: - The feature must be documented in both the API and narrative documentation. -- The feature must work fully on the following CPython versions: 2.7, - 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows. +- The feature must work fully on the following CPython versions: + 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: $ nox -s unit-2.7 - $ nox -s unit-3.7 + $ nox -s unit-3.8 $ ... +- Args to pytest can be passed through the nox command separated by a `--`. For + example, to run a single test:: + + $ nox -s unit-3.8 -- -k + .. note:: The unit tests and system tests are described in the @@ -93,8 +98,12 @@ On Debian/Ubuntu:: ************ Coding Style ************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken -- PEP8 compliance, with exceptions defined in the linter configuration. +- PEP8 compliance is required, with exceptions defined in the linter configuration. If you have ``nox`` installed, you can test that you have not introduced any non-compliant code via:: @@ -111,6 +120,16 @@ Coding Style should point to the official ``googleapis`` checkout and the the branch should be the main branch on that remote (``master``). +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + Exceptions to PEP8: - Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for @@ -123,33 +142,25 @@ Running System Tests - To run system tests, you can execute:: - $ nox -s system-3.7 + # Run all system tests + $ nox -s system-3.8 $ nox -s system-2.7 + # Run a single system test + $ nox -s system-3.8 -- -k + + .. note:: System tests are only configured to run under Python 2.7 and - Python 3.7. For expediency, we do not run them in older versions + Python 3.8. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local auth settings and change some configuration in your project to run all the tests. -- System tests will be run against an actual project and - so you'll need to provide some environment variables to facilitate - authentication to your project: - - - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file; - Such a file can be downloaded directly from the developer's console by clicking - "Generate new JSON key". See private key - `docs `__ - for more details. - -- Once you have downloaded your json keys, set the environment variable - ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file:: - - $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json" +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. - You'll need to create composite `indexes `__ @@ -176,7 +187,6 @@ Running System Tests $ python tests/system/utils/clear_datastore.py - ************* Test Coverage ************* @@ -217,25 +227,24 @@ Supported Python Versions We support: -- `Python 3.5`_ - `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ +- `Python 3.9`_ -.. _Python 3.5: https://docs.python.org/3.5/ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-datastore/blob/master/noxfile.py -Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. We also explicitly decided to support Python 3 beginning with version -3.5. Reasons for this include: +3.6. Reasons for this include: - Encouraging use of newest versions of Python 3 - Taking the lead of `prominent`_ open-source `projects`_ diff --git a/packages/google-cloud-datastore/LICENSE b/packages/google-cloud-datastore/LICENSE index a8ee855de2aa..d64569567334 100644 --- a/packages/google-cloud-datastore/LICENSE +++ b/packages/google-cloud-datastore/LICENSE @@ -1,6 +1,7 @@ - Apache License + + Apache License Version 2.0, January 2004 - https://www.apache.org/licenses/ + http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -192,7 +193,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - https://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/packages/google-cloud-datastore/MANIFEST.in b/packages/google-cloud-datastore/MANIFEST.in index e9e29d12033d..e783f4c6209b 100644 --- a/packages/google-cloud-datastore/MANIFEST.in +++ b/packages/google-cloud-datastore/MANIFEST.in @@ -16,10 +16,10 @@ # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE -recursive-include google *.json *.proto +recursive-include google *.json *.proto py.typed recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ # Exclude scripts for samples readmegen -prune scripts/readme-gen \ No newline at end of file +prune scripts/readme-gen diff --git a/packages/google-cloud-datastore/docs/_static/custom.css b/packages/google-cloud-datastore/docs/_static/custom.css index 0abaf229fce3..b0a295464b23 100644 --- a/packages/google-cloud-datastore/docs/_static/custom.css +++ b/packages/google-cloud-datastore/docs/_static/custom.css @@ -1,4 +1,20 @@ div#python2-eol { border-color: red; border-width: medium; -} \ No newline at end of file +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index 46cba6cafd16..ee4a5a8f9144 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -1,4 +1,17 @@ # -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # # google-cloud-datastore documentation build configuration file # diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index a6c9bf64193f..4dd9e06c67a0 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -18,6 +18,7 @@ from __future__ import absolute_import import os +import pathlib import shutil import nox @@ -30,6 +31,22 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): @@ -45,16 +62,9 @@ def lint(session): session.run("flake8", "google", "tests") -@nox.session(python="3.6") +@nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): - """Run black. - - Format code to uniform standard. - - This currently uses Python 3.6 due to the automated Kokoro run of synthtool. - That run uses an image that doesn't have 3.6 installed. Before updating this - check the state of the `gcp_ubuntu_config` we use for that Kokoro run. - """ + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( "black", *BLACK_PATHS, @@ -70,18 +80,23 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. - session.install( - "mock", "pytest", "pytest-cov", + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install("-e", ".") + session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) + + session.install("mock", "pytest", "pytest-cov", "-c", constraints_path) + + session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. session.run( "py.test", "--quiet", - "--cov=google.cloud.datastore", - "--cov=google.cloud", - "--cov=tests.unit", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google/cloud", + "--cov=tests/unit", "--cov-append", "--cov-config=.coveragerc", "--cov-report=", @@ -101,15 +116,18 @@ def unit(session): @nox.parametrize("disable_grpc", [False, True]) def system(session, disable_grpc): """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") system_test_exists = os.path.exists(system_test_path) system_test_folder_exists = os.path.exists(system_test_folder_path) @@ -122,10 +140,8 @@ def system(session, disable_grpc): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install( - "mock", "pytest", "google-cloud-testutils", - ) - session.install("-e", ".") + session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) + session.install("-e", ".", "-c", constraints_path) env = {} if disable_grpc: @@ -133,10 +149,23 @@ def system(session, disable_grpc): # Run py.test against the system tests. if system_test_exists: - session.run("py.test", "--quiet", system_test_path, env=env, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + env=env, + *session.posargs, + ) + if system_test_folder_exists: session.run( - "py.test", "--quiet", system_test_folder_path, env=env, *session.posargs + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + env=env, + *session.posargs, ) @@ -148,7 +177,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=97") + session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") @@ -158,7 +187,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -180,7 +209,9 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") + session.install( + "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/google-cloud-datastore/synth.py b/packages/google-cloud-datastore/owlbot.py similarity index 60% rename from packages/google-cloud-datastore/synth.py rename to packages/google-cloud-datastore/owlbot.py index 6a59fc5bafa3..8017fb4a6390 100644 --- a/packages/google-cloud-datastore/synth.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -16,75 +16,48 @@ import synthtool as s from synthtool import gcp -gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() -# ---------------------------------------------------------------------------- -# Generate datastore GAPIC layer -# ---------------------------------------------------------------------------- -library = gapic.py_library( - service="datastore", - version="v1", - bazel_target="//google/datastore/v1:datastore-v1-py", - include_protos=True, -) - -s.move(library / "google/cloud/datastore_v1") - -s.move( - library / f"tests/", - f"tests", -) -s.move(library / "scripts") - -# ---------------------------------------------------------------------------- -# Generate datastore admin GAPIC layer -# ---------------------------------------------------------------------------- -library = gapic.py_library( - service="datastore_admin", - version="v1", - bazel_target="//google/datastore/admin/v1:datastore-admin-v1-py", - include_protos=True, -) - -s.move( - library / "google/cloud/datastore_admin_v1", - "google/cloud/datastore_admin_v1" -) - +# This library ships clients for two different APIs, +# Datastore and Datastore Admin +datastore_default_version = "v1" +datastore_admin_default_version = "v1" + +for library in s.get_staging_dirs(datastore_default_version): + if library.parent.absolute() == 'datastore': + s.move(library / f"google/cloud/datastore_{library.name}") + s.move(library / f"tests/") + s.move(library / "scripts") + +for library in s.get_staging_dirs(datastore_admin_default_version): + if library.parent.absolute() == 'datastore_admin': + s.replace( + library / "google/**/datastore_admin_client.py", + "google-cloud-datastore-admin", + "google-cloud-datstore" + ) -s.move( - library / f"tests/", - f"tests", -) + # Remove spurious markup + s.replace( + "google/**/datastore_admin/client.py", + "\s+---------------------------------(-)+", + "" + ) -s.move(library / "scripts") -s.replace( - "google/**/datastore_admin_client.py", - "google-cloud-datastore-admin", - "google-cloud-datstore" -) + s.move(library / f"google/cloud/datastore_admin_{library.name}") + s.move(library / f"tests") + s.move(library / "scripts") -# Remove spurious markup -s.replace( - "google/**/datastore_admin/client.py", - "\s+---------------------------------(-)+", - "" -) +s.remove_staging_dirs() # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -# TODO: cov_level should be 99%, reduced due to regression in test coverage. templated_files = common.py_library( - unit_cov_level=97, - cov_level=97, - unit_test_python_versions=["3.6", "3.7", "3.8", "3.9"], - system_test_python_versions=["3.8"], + microgenerator=True, ) s.move(templated_files, excludes=["docs/multiprocessing.rst", ".coveragerc"]) -s.replace("noxfile.py", """["']sphinx['"]""", '''"sphinx<3.0.0"''') # Preserve system tests w/ GOOGLE_DISABLE_GRPC set (#133, PR #136) s.replace( @@ -116,22 +89,40 @@ def system(session, disable_grpc): s.replace( "noxfile.py", - """\ - session.run\("py.test", "--quiet", system_test_path, \*session.posargs\) -""", - """\ - session.run("py.test", "--quiet", system_test_path, env=env, *session.posargs) + """session\.run\( + "py\.test", + "--quiet", + f"--junitxml=system_\{session\.python\}_sponge_log\.xml", + system_test_path, + \*session\.posargs + \)""", + """session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + env=env, + *session.posargs + ) """, ) s.replace( "noxfile.py", - """\ - session.run\("py.test", "--quiet", system_test_folder_path, \*session.posargs\) -""", - """\ - session.run( - "py.test", "--quiet", system_test_folder_path, env=env, *session.posargs + """session\.run\( + "py\.test", + "--quiet", + f"--junitxml=system_\{session\.python\}_sponge_log\.xml", + system_test_folder_path, + \*session\.posargs + \)""", + """session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + env=env, + *session.posargs ) """, ) diff --git a/packages/google-cloud-datastore/renovate.json b/packages/google-cloud-datastore/renovate.json index 4fa949311b20..c04895563e69 100644 --- a/packages/google-cloud-datastore/renovate.json +++ b/packages/google-cloud-datastore/renovate.json @@ -1,5 +1,9 @@ { "extends": [ "config:base", ":preserveSemverRanges" - ] + ], + "ignorePaths": [".pre-commit-config.yaml"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } } diff --git a/packages/google-cloud-datastore/synth.metadata b/packages/google-cloud-datastore/synth.metadata deleted file mode 100644 index c8b752aec41f..000000000000 --- a/packages/google-cloud-datastore/synth.metadata +++ /dev/null @@ -1,149 +0,0 @@ -{ - "sources": [ - { - "git": { - "name": ".", - "remote": "https://github.com/googleapis/python-datastore.git", - "sha": "3c8da54de9a6837d6d96b6befd3b0ef084d8ff7f" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "7f31f40209008ad24058579e7112e45fc9d5715e", - "internalRef": "339939234" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "a073c873f3928c561bdf87fdfbf1d081d1998984" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "datastore", - "apiVersion": "v1", - "language": "python", - "generator": "bazel" - } - }, - { - "client": { - "source": "googleapis", - "apiName": "datastore_admin", - "apiVersion": "v1", - "language": "python", - "generator": "bazel" - } - } - ], - "generatedFiles": [ - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "google/cloud/datastore_admin_v1/__init__.py", - "google/cloud/datastore_admin_v1/proto/datastore_admin.proto", - "google/cloud/datastore_admin_v1/proto/index.proto", - "google/cloud/datastore_admin_v1/py.typed", - "google/cloud/datastore_admin_v1/services/__init__.py", - "google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py", - "google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py", - "google/cloud/datastore_admin_v1/services/datastore_admin/client.py", - "google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py", - "google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py", - "google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py", - "google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py", - "google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py", - "google/cloud/datastore_admin_v1/types/__init__.py", - "google/cloud/datastore_admin_v1/types/datastore_admin.py", - "google/cloud/datastore_admin_v1/types/index.py", - "google/cloud/datastore_v1/__init__.py", - "google/cloud/datastore_v1/proto/datastore.proto", - "google/cloud/datastore_v1/proto/entity.proto", - "google/cloud/datastore_v1/proto/query.proto", - "google/cloud/datastore_v1/py.typed", - "google/cloud/datastore_v1/services/__init__.py", - "google/cloud/datastore_v1/services/datastore/__init__.py", - "google/cloud/datastore_v1/services/datastore/async_client.py", - "google/cloud/datastore_v1/services/datastore/client.py", - "google/cloud/datastore_v1/services/datastore/transports/__init__.py", - "google/cloud/datastore_v1/services/datastore/transports/base.py", - "google/cloud/datastore_v1/services/datastore/transports/grpc.py", - "google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py", - "google/cloud/datastore_v1/types/__init__.py", - "google/cloud/datastore_v1/types/datastore.py", - "google/cloud/datastore_v1/types/entity.py", - "google/cloud/datastore_v1/types/query.py", - "noxfile.py", - "renovate.json", - "scripts/decrypt-secrets.sh", - "scripts/fixup_datastore_admin_v1_keywords.py", - "scripts/fixup_datastore_v1_keywords.py", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore", - "tests/unit/gapic/datastore_admin_v1/__init__.py", - "tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py", - "tests/unit/gapic/datastore_v1/__init__.py", - "tests/unit/gapic/datastore_v1/test_datastore.py" - ] -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/testing/constraints-3.6.txt b/packages/google-cloud-datastore/testing/constraints-3.6.txt index 20a821c2d75d..01fc45a4c0f7 100644 --- a/packages/google-cloud-datastore/testing/constraints-3.6.txt +++ b/packages/google-cloud-datastore/testing/constraints-3.6.txt @@ -8,4 +8,5 @@ google-api-core==1.22.2 google-cloud-core==1.4.0 proto-plus==1.4.0 -libcst==0.2.5 \ No newline at end of file +libcst==0.2.5 +google-auth==1.24.0 # TODO: remove when google-auth>=1.25.0 is required through google-api-core \ No newline at end of file From 6ad3edd08c0802327614d9e73bc0151bcacefbb9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 14 Jun 2021 15:39:56 -0400 Subject: [PATCH 333/611] chore: new owl bot post processor docker image (#177) Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:9d6a2d613e2c04c07ecdb6c287e3931890f6d30266ab5ee4ee412f748dc98341 Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 5 ++--- packages/google-cloud-datastore/docs/conf.py | 1 + 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 8051d189541c..c1ef6e601d58 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,4 +1,3 @@ docker: - digest: sha256:0856ca711da1fd5ec9d6d7da6c50aa0bbf550fb94acb47b55159a640791987bf - image: gcr.io/repo-automation-bots/owlbot-python:latest - + image: gcr.io/repo-automation-bots/owlbot-python:latest + digest: sha256:9d6a2d613e2c04c07ecdb6c287e3931890f6d30266ab5ee4ee412f748dc98341 diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index ee4a5a8f9144..2c48bd4816a4 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -363,6 +363,7 @@ "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } From 114054c28670fb435382a8e7de50a0fb2c31b97f Mon Sep 17 00:00:00 2001 From: Takashi Matsuo Date: Tue, 15 Jun 2021 14:38:55 -0700 Subject: [PATCH 334/611] chore: create flakybot.yaml (#178) --- packages/google-cloud-datastore/.github/flakybot.yaml | 1 + 1 file changed, 1 insertion(+) create mode 100644 packages/google-cloud-datastore/.github/flakybot.yaml diff --git a/packages/google-cloud-datastore/.github/flakybot.yaml b/packages/google-cloud-datastore/.github/flakybot.yaml new file mode 100644 index 000000000000..cb83375f9893 --- /dev/null +++ b/packages/google-cloud-datastore/.github/flakybot.yaml @@ -0,0 +1 @@ +issuePriority: p2 From 245be61e17854cb964f04d320b17c96c64902f0c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 17 Jun 2021 10:58:08 +0000 Subject: [PATCH 335/611] chore: new owl bot post processor docker image (#179) Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:58c7342b0bccf85028100adaa3d856cb4a871c22ca9c01960d996e66c40548ce --- .../google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/docs/conf.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index c1ef6e601d58..ea06d395ea2b 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:9d6a2d613e2c04c07ecdb6c287e3931890f6d30266ab5ee4ee412f748dc98341 + digest: sha256:58c7342b0bccf85028100adaa3d856cb4a871c22ca9c01960d996e66c40548ce diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index 2c48bd4816a4..f1b507871245 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -80,9 +80,9 @@ master_doc = "index" # General information about the project. -project = u"google-cloud-datastore" -copyright = u"2019, Google" -author = u"Google APIs" +project = "google-cloud-datastore" +copyright = "2019, Google" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -281,7 +281,7 @@ ( master_doc, "google-cloud-datastore.tex", - u"google-cloud-datastore Documentation", + "google-cloud-datastore Documentation", author, "manual", ) @@ -316,7 +316,7 @@ ( master_doc, "google-cloud-datastore", - u"google-cloud-datastore Documentation", + "google-cloud-datastore Documentation", [author], 1, ) @@ -335,7 +335,7 @@ ( master_doc, "google-cloud-datastore", - u"google-cloud-datastore Documentation", + "google-cloud-datastore Documentation", author, "google-cloud-datastore", "google-cloud-datastore Library", From 16904fd4cc265084ecacc4ee494f5aa3c2a2e991 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 19 Jun 2021 02:02:03 +0000 Subject: [PATCH 336/611] docs: omit mention of Python 2.7 in 'CONTRIBUTING.rst' (#1127) (#181) Source-Link: https://github.com/googleapis/synthtool/commit/b91f129527853d5b756146a0b5044481fb4e09a8 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:b6169fc6a5207b11800a7c002d0c5c2bc6d82697185ca12e666f44031468cfcd --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/CONTRIBUTING.rst | 7 ++----- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index ea06d395ea2b..cc49c6a3dfac 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:58c7342b0bccf85028100adaa3d856cb4a871c22ca9c01960d996e66c40548ce + digest: sha256:b6169fc6a5207b11800a7c002d0c5c2bc6d82697185ca12e666f44031468cfcd diff --git a/packages/google-cloud-datastore/CONTRIBUTING.rst b/packages/google-cloud-datastore/CONTRIBUTING.rst index 0f183f3c77a0..d70e876a1f45 100644 --- a/packages/google-cloud-datastore/CONTRIBUTING.rst +++ b/packages/google-cloud-datastore/CONTRIBUTING.rst @@ -69,7 +69,6 @@ We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: - $ nox -s unit-2.7 $ nox -s unit-3.8 $ ... @@ -144,7 +143,6 @@ Running System Tests # Run all system tests $ nox -s system-3.8 - $ nox -s system-2.7 # Run a single system test $ nox -s system-3.8 -- -k @@ -152,9 +150,8 @@ Running System Tests .. note:: - System tests are only configured to run under Python 2.7 and - Python 3.8. For expediency, we do not run them in older versions - of Python 3. + System tests are only configured to run under Python 3.8. + For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local auth settings and change some configuration in your project to From ccfcb16907dc184eecbe4b08ff0f2bcdbfba47ff Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 20 Jun 2021 01:34:03 +0000 Subject: [PATCH 337/611] chore: update precommit hook pre-commit/pre-commit-hooks to v4 (#1083) (#183) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [pre-commit/pre-commit-hooks](https://togithub.com/pre-commit/pre-commit-hooks) | repository | major | `v3.4.0` -> `v4.0.1` | --- ### Release Notes
pre-commit/pre-commit-hooks ### [`v4.0.1`](https://togithub.com/pre-commit/pre-commit-hooks/releases/v4.0.1) [Compare Source](https://togithub.com/pre-commit/pre-commit-hooks/compare/v4.0.0...v4.0.1) ##### Fixes - `check-shebang-scripts-are-executable` fix entry point. - [#​602](https://togithub.com/pre-commit/pre-commit-hooks/issues/602) issue by [@​Person-93](https://togithub.com/Person-93). - [#​603](https://togithub.com/pre-commit/pre-commit-hooks/issues/603) PR by [@​scop](https://togithub.com/scop). ### [`v4.0.0`](https://togithub.com/pre-commit/pre-commit-hooks/releases/v4.0.0) [Compare Source](https://togithub.com/pre-commit/pre-commit-hooks/compare/v3.4.0...v4.0.0) ##### Features - `check-json`: report duplicate keys. - [#​558](https://togithub.com/pre-commit/pre-commit-hooks/issues/558) PR by [@​AdityaKhursale](https://togithub.com/AdityaKhursale). - [#​554](https://togithub.com/pre-commit/pre-commit-hooks/issues/554) issue by [@​adamchainz](https://togithub.com/adamchainz). - `no-commit-to-branch`: add `main` to default blocked branches. - [#​565](https://togithub.com/pre-commit/pre-commit-hooks/issues/565) PR by [@​ndevenish](https://togithub.com/ndevenish). - `check-case-conflict`: check conflicts in directory names as well. - [#​575](https://togithub.com/pre-commit/pre-commit-hooks/issues/575) PR by [@​slsyy](https://togithub.com/slsyy). - [#​70](https://togithub.com/pre-commit/pre-commit-hooks/issues/70) issue by [@​andyjack](https://togithub.com/andyjack). - `check-vcs-permalinks`: forbid other branch names. - [#​582](https://togithub.com/pre-commit/pre-commit-hooks/issues/582) PR by [@​jack1142](https://togithub.com/jack1142). - [#​581](https://togithub.com/pre-commit/pre-commit-hooks/issues/581) issue by [@​jack1142](https://togithub.com/jack1142). - `check-shebang-scripts-are-executable`: new hook which ensures shebang'd scripts are executable. - [#​545](https://togithub.com/pre-commit/pre-commit-hooks/issues/545) PR by [@​scop](https://togithub.com/scop). ##### Fixes - `check-executables-have-shebangs`: Short circuit shebang lookup on windows. - [#​544](https://togithub.com/pre-commit/pre-commit-hooks/issues/544) PR by [@​scop](https://togithub.com/scop). - `requirements-txt-fixer`: Fix comments which have indentation - [#​549](https://togithub.com/pre-commit/pre-commit-hooks/issues/549) PR by [@​greshilov](https://togithub.com/greshilov). - [#​548](https://togithub.com/pre-commit/pre-commit-hooks/issues/548) issue by [@​greshilov](https://togithub.com/greshilov). - `pretty-format-json`: write to stdout using UTF-8 encoding. - [#​571](https://togithub.com/pre-commit/pre-commit-hooks/issues/571) PR by [@​jack1142](https://togithub.com/jack1142). - [#​570](https://togithub.com/pre-commit/pre-commit-hooks/issues/570) issue by [@​jack1142](https://togithub.com/jack1142). - Use more inclusive language. - [#​599](https://togithub.com/pre-commit/pre-commit-hooks/issues/599) PR by [@​asottile](https://togithub.com/asottile). ##### Breaking changes - Remove deprecated hooks: `flake8`, `pyflakes`, `autopep8-wrapper`. - [#​597](https://togithub.com/pre-commit/pre-commit-hooks/issues/597) PR by [@​asottile](https://togithub.com/asottile).
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻️ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/synthtool). Source-Link: https://github.com/googleapis/synthtool/commit/333fd90856f1454380514bc59fc0936cdaf1c202 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:b8c131c558606d3cea6e18f8e87befbd448c1482319b0db3c5d5388fa6ea72e3 --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/.pre-commit-config.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index cc49c6a3dfac..9602d540595e 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:b6169fc6a5207b11800a7c002d0c5c2bc6d82697185ca12e666f44031468cfcd + digest: sha256:b8c131c558606d3cea6e18f8e87befbd448c1482319b0db3c5d5388fa6ea72e3 diff --git a/packages/google-cloud-datastore/.pre-commit-config.yaml b/packages/google-cloud-datastore/.pre-commit-config.yaml index 4f00c7cffcfd..62eb5a77d9a3 100644 --- a/packages/google-cloud-datastore/.pre-commit-config.yaml +++ b/packages/google-cloud-datastore/.pre-commit-config.yaml @@ -16,7 +16,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.4.0 + rev: v4.0.1 hooks: - id: trailing-whitespace - id: end-of-file-fixer From 28f42c5d0ca903fd814ee1f7b7f4c89d9c1204e8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 22 Jun 2021 20:08:32 +0000 Subject: [PATCH 338/611] chore: add kokoro 3.9 config templates (#1128) (#185) Source-Link: https://github.com/googleapis/synthtool/commit/b0eb8a8b30b46a3c98d23c23107acb748c6601a1 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:df50e8d462f86d6bcb42f27ecad55bb12c404f1c65de9c6fe4c4d25120080bd6 --- .../.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/samples/python3.9/common.cfg | 40 +++++++++++++++++++ .../.kokoro/samples/python3.9/continuous.cfg | 6 +++ .../samples/python3.9/periodic-head.cfg | 11 +++++ .../.kokoro/samples/python3.9/periodic.cfg | 6 +++ .../.kokoro/samples/python3.9/presubmit.cfg | 6 +++ 6 files changed, 70 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.9/common.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.9/continuous.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic-head.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.9/presubmit.cfg diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 9602d540595e..0954585f2833 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:b8c131c558606d3cea6e18f8e87befbd448c1482319b0db3c5d5388fa6ea72e3 + digest: sha256:df50e8d462f86d6bcb42f27ecad55bb12c404f1c65de9c6fe4c4d25120080bd6 diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.9/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.9/common.cfg new file mode 100644 index 000000000000..6e6deb8407f0 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.9/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.9" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py39" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-datastore/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.9/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.9/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.9/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.9/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.9/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.9/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file From ab7bc1ed9267305a450021e51dd4d98f48208a20 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 25 Jun 2021 17:15:01 -0400 Subject: [PATCH 339/611] chore(python): simplify nox steps in CONTRIBUTING.rst (#187) Source-Link: https://github.com/googleapis/synthtool/commit/26558bae8976a985d73c2d98c31d8612273f907d Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:99d90d097e4a4710cc8658ee0b5b963f4426d0e424819787c3ac1405c9a26719 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/CONTRIBUTING.rst | 14 ++++++-------- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 0954585f2833..e2b39f946040 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:df50e8d462f86d6bcb42f27ecad55bb12c404f1c65de9c6fe4c4d25120080bd6 + digest: sha256:99d90d097e4a4710cc8658ee0b5b963f4426d0e424819787c3ac1405c9a26719 diff --git a/packages/google-cloud-datastore/CONTRIBUTING.rst b/packages/google-cloud-datastore/CONTRIBUTING.rst index d70e876a1f45..63d4d9de2c87 100644 --- a/packages/google-cloud-datastore/CONTRIBUTING.rst +++ b/packages/google-cloud-datastore/CONTRIBUTING.rst @@ -68,14 +68,12 @@ Using ``nox`` We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: + $ nox -s unit - $ nox -s unit-3.8 - $ ... +- To run a single unit test:: -- Args to pytest can be passed through the nox command separated by a `--`. For - example, to run a single test:: + $ nox -s unit-3.9 -- -k - $ nox -s unit-3.8 -- -k .. note:: @@ -142,7 +140,7 @@ Running System Tests - To run system tests, you can execute:: # Run all system tests - $ nox -s system-3.8 + $ nox -s system # Run a single system test $ nox -s system-3.8 -- -k @@ -240,8 +238,8 @@ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-datastore/blob/master/noxfile.py -We also explicitly decided to support Python 3 beginning with version -3.6. Reasons for this include: +We also explicitly decided to support Python 3 beginning with version 3.6. +Reasons for this include: - Encouraging use of newest versions of Python 3 - Taking the lead of `prominent`_ open-source `projects`_ From 9ec2e75a5dba0ffad6eb22802777750b80a955fd Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 9 Jul 2021 14:36:04 -0400 Subject: [PATCH 340/611] perf: further avoid using proto-plus wrapper when unmarshalling entities (#190) Always unwrap to get the raw protobuf message, rather than the proto-plus wrapper. We are back to within a few percent of the older/faster version on my comparison test. Closes #150. --- .../google/cloud/datastore/helpers.py | 93 ++++++---------- .../tests/unit/test_batch.py | 4 +- .../tests/unit/test_client.py | 7 +- .../tests/unit/test_helpers.py | 100 +++++++----------- .../tests/unit/test_query.py | 2 + 5 files changed, 73 insertions(+), 133 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index 5627d8a332a8..7222fbdff73f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -22,6 +22,7 @@ from google.protobuf import struct_pb2 from google.type import latlng_pb2 +from proto.datetime_helpers import DatetimeWithNanoseconds from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud.datastore_v1.types import datastore as datastore_pb2 @@ -33,8 +34,8 @@ def _get_meaning(value_pb, is_list=False): """Get the meaning from a protobuf value. - :type value_pb: :class:`.entity_pb2.Value` - :param value_pb: The protobuf value to be checked for an + :type value_pb: :class:`.entity_pb2.Value._pb` + :param value_pb: The *raw* protobuf value to be checked for an associated meaning. :type is_list: bool @@ -47,14 +48,9 @@ def _get_meaning(value_pb, is_list=False): means it just returns a list of meanings. If all the list meanings agree, it just condenses them. """ - meaning = None if is_list: - values = ( - value_pb._pb.array_value.values - if hasattr(value_pb, "_pb") - else value_pb.array_value.values - ) + values = value_pb.array_value.values # An empty list will have no values, hence no shared meaning # set among them. @@ -65,16 +61,18 @@ def _get_meaning(value_pb, is_list=False): # the rest which may be enum/int values. all_meanings = [_get_meaning(sub_value_pb) for sub_value_pb in values] unique_meanings = set(all_meanings) + if len(unique_meanings) == 1: # If there is a unique meaning, we preserve it. - meaning = unique_meanings.pop() + return unique_meanings.pop() else: # We know len(value_pb.array_value.values) > 0. # If the meaning is not unique, just return all of them. - meaning = all_meanings + return all_meanings + elif value_pb.meaning: # Simple field (int32). - meaning = value_pb.meaning + return value_pb.meaning - return meaning + return None def _new_value_pb(entity_pb, name): @@ -89,29 +87,12 @@ def _new_value_pb(entity_pb, name): :rtype: :class:`.entity_pb2.Value` :returns: The new ``Value`` protobuf that was added to the entity. """ - properties = entity_pb.properties - try: - properties = properties._pb - except AttributeError: - # TODO(microgenerator): shouldn't need this. the issue is that - # we have wrapped and non-wrapped protos coming here. - pass + # TODO(microgenerator): shouldn't need this. the issue is that + # we have wrapped and non-wrapped protos coming here. + properties = getattr(entity_pb.properties, "_pb", entity_pb.properties) return properties.get_or_create(name) -def _property_tuples(entity_pb): - """Iterator of name, ``Value`` tuples from entity properties. - - :type entity_pb: :class:`.entity_pb2.Entity` - :param entity_pb: An entity protobuf to add a new property to. - - :rtype: :class:`generator` - :returns: An iterator that yields tuples of a name and ``Value`` - corresponding to properties on the entity. - """ - return iter(entity_pb.properties.items()) - - def entity_from_protobuf(pb): """Factory method for creating an entity based on a protobuf. @@ -124,21 +105,18 @@ def entity_from_protobuf(pb): :rtype: :class:`google.cloud.datastore.entity.Entity` :returns: The entity derived from the protobuf. """ - if not isinstance(pb, entity_pb2.Entity): - proto_pb = entity_pb2.Entity.wrap(pb) - else: - proto_pb = pb + if isinstance(pb, entity_pb2.Entity): pb = pb._pb key = None - if "key" in proto_pb: # Message field (Key) - key = key_from_protobuf(proto_pb.key) + if pb.HasField("key"): # Message field (Key) + key = key_from_protobuf(pb.key) entity_props = {} entity_meanings = {} exclude_from_indexes = [] - for prop_name, value_pb in _property_tuples(proto_pb._pb): + for prop_name, value_pb in pb.properties.items(): value = _get_value_from_value_pb(value_pb) entity_props[prop_name] = value @@ -384,7 +362,7 @@ def _pb_attr_value(val): return name + "_value", value -def _get_value_from_value_pb(value): +def _get_value_from_value_pb(pb): """Given a protobuf for a Value, get the correct value. The Cloud Datastore Protobuf API returns a Property Protobuf which @@ -394,56 +372,47 @@ def _get_value_from_value_pb(value): Some work is done to coerce the return value into a more useful type (particularly in the case of a timestamp value, or a key value). - :type value_pb: :class:`.entity_pb2.Value` - :param value_pb: The Value Protobuf. + :type pb: :class:`.entity_pb2.Value._pb` + :param pb: The *raw* Value Protobuf. :rtype: object :returns: The value provided by the Protobuf. :raises: :class:`ValueError ` if no value type has been set. """ - if not getattr(value, "_pb", False): - # Coerce raw pb type into proto-plus pythonic type. - value = entity_pb2.Value.wrap(value) - - value_type = value._pb.WhichOneof("value_type") + value_type = pb.WhichOneof("value_type") if value_type == "timestamp_value": - # Do not access `._pb` here, as that returns a Timestamp proto, - # but this should return a Pythonic `DatetimeWithNanoseconds` value, - # which is found at `value.timestamp_value` - result = value.timestamp_value + result = DatetimeWithNanoseconds.from_timestamp_pb(pb.timestamp_value) elif value_type == "key_value": - result = key_from_protobuf(value._pb.key_value) + result = key_from_protobuf(pb.key_value) elif value_type == "boolean_value": - result = value._pb.boolean_value + result = pb.boolean_value elif value_type == "double_value": - result = value._pb.double_value + result = pb.double_value elif value_type == "integer_value": - result = value._pb.integer_value + result = pb.integer_value elif value_type == "string_value": - result = value._pb.string_value + result = pb.string_value elif value_type == "blob_value": - result = value._pb.blob_value + result = pb.blob_value elif value_type == "entity_value": - result = entity_from_protobuf(value._pb.entity_value) + result = entity_from_protobuf(pb.entity_value) elif value_type == "array_value": result = [ - _get_value_from_value_pb(value) for value in value._pb.array_value.values + _get_value_from_value_pb(item_value) for item_value in pb.array_value.values ] elif value_type == "geo_point_value": - result = GeoPoint( - value._pb.geo_point_value.latitude, value._pb.geo_point_value.longitude, - ) + result = GeoPoint(pb.geo_point_value.latitude, pb.geo_point_value.longitude,) elif value_type == "null_value": result = None diff --git a/packages/google-cloud-datastore/tests/unit/test_batch.py b/packages/google-cloud-datastore/tests/unit/test_batch.py index 78c1db20e37b..ead00623c16c 100644 --- a/packages/google-cloud-datastore/tests/unit/test_batch.py +++ b/packages/google-cloud-datastore/tests/unit/test_batch.py @@ -118,8 +118,6 @@ def test_put_entity_w_partial_key(self): self.assertEqual(batch._partial_key_entities, [entity]) def test_put_entity_w_completed_key(self): - from google.cloud.datastore.helpers import _property_tuples - project = "PROJECT" properties = {"foo": "bar", "baz": "qux", "spam": [1, 2, 3], "frotz": []} client = _Client(project) @@ -134,7 +132,7 @@ def test_put_entity_w_completed_key(self): mutated_entity = _mutated_pb(self, batch.mutations, "upsert") self.assertEqual(mutated_entity.key, key._key) - prop_dict = dict(_property_tuples(mutated_entity)) + prop_dict = dict(mutated_entity.properties.items()) self.assertEqual(len(prop_dict), 4) self.assertFalse(prop_dict["foo"].exclude_from_indexes) self.assertTrue(prop_dict["baz"].exclude_from_indexes) diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 3c75a5fba810..5127fd60730b 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -802,7 +802,6 @@ def test_put_multi_w_single_empty_entity(self): def test_put_multi_no_batch_w_partial_key_w_retry_w_timeout(self): from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore.helpers import _property_tuples entity = _Entity(foo=u"bar") key = entity.key = _Key(_Key.kind, None) @@ -838,15 +837,13 @@ def test_put_multi_no_batch_w_partial_key_w_retry_w_timeout(self): mutated_entity = _mutated_pb(self, mutations, "insert") self.assertEqual(mutated_entity.key, key.to_protobuf()) - prop_list = list(_property_tuples(mutated_entity)) + prop_list = list(mutated_entity.properties.items()) self.assertTrue(len(prop_list), 1) name, value_pb = prop_list[0] self.assertEqual(name, "foo") self.assertEqual(value_pb.string_value, u"bar") def test_put_multi_existing_batch_w_completed_key(self): - from google.cloud.datastore.helpers import _property_tuples - creds = _make_credentials() client = self._make_one(credentials=creds) entity = _Entity(foo=u"bar") @@ -859,7 +856,7 @@ def test_put_multi_existing_batch_w_completed_key(self): mutated_entity = _mutated_pb(self, CURR_BATCH.mutations, "upsert") self.assertEqual(mutated_entity.key, key.to_protobuf()) - prop_list = list(_property_tuples(mutated_entity)) + prop_list = list(mutated_entity.properties.items()) self.assertTrue(len(prop_list), 1) name, value_pb = prop_list[0] self.assertEqual(name, "foo") diff --git a/packages/google-cloud-datastore/tests/unit/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py index 5b602cffbb00..c37499caa029 100644 --- a/packages/google-cloud-datastore/tests/unit/test_helpers.py +++ b/packages/google-cloud-datastore/tests/unit/test_helpers.py @@ -33,28 +33,6 @@ def test_it(self): self.assertEqual(entity_pb._pb.properties[name], result) -class Test__property_tuples(unittest.TestCase): - def _call_fut(self, entity_pb): - from google.cloud.datastore.helpers import _property_tuples - - return _property_tuples(entity_pb) - - def test_it(self): - import types - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.helpers import _new_value_pb - - entity_pb = entity_pb2.Entity() - name1 = "foo" - name2 = "bar" - val_pb1 = _new_value_pb(entity_pb, name1) - val_pb2 = _new_value_pb(entity_pb, name2) - - result = self._call_fut(entity_pb) - self.assertIsInstance(result, types.GeneratorType) - self.assertEqual(sorted(result), sorted([(name1, val_pb1), (name2, val_pb2)])) - - class Test_entity_from_protobuf(unittest.TestCase): def _call_fut(self, val): from google.cloud.datastore.helpers import entity_from_protobuf @@ -221,11 +199,9 @@ def _call_fut(self, entity): return entity_to_protobuf(entity) def _compare_entity_proto(self, entity_pb1, entity_pb2): - from google.cloud.datastore.helpers import _property_tuples - self.assertEqual(entity_pb1.key, entity_pb2.key) - value_list1 = sorted(_property_tuples(entity_pb1)) - value_list2 = sorted(_property_tuples(entity_pb2)) + value_list1 = sorted(entity_pb1.properties.items()) + value_list2 = sorted(entity_pb2.properties.items()) self.assertEqual(len(value_list1), len(value_list2)) for pair1, pair2 in zip(value_list1, value_list2): name1, val1 = pair1 @@ -668,12 +644,12 @@ def _call_fut(self, pb): return _get_value_from_value_pb(pb) - def _makePB(self, attr_name, value): + def _makePB(self, attr_name, attr_value): from google.cloud.datastore_v1.types import entity as entity_pb2 - pb = entity_pb2.Value() - setattr(pb, attr_name, value) - return pb + value = entity_pb2.Value() + setattr(value._pb, attr_name, attr_value) + return value def test_datetime(self): import calendar @@ -683,67 +659,67 @@ def test_datetime(self): micros = 4375 utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) - pb = entity_pb2.Value() - pb._pb.timestamp_value.seconds = calendar.timegm(utc.timetuple()) - pb._pb.timestamp_value.nanos = 1000 * micros - self.assertEqual(self._call_fut(pb), utc) + value = entity_pb2.Value() + value._pb.timestamp_value.seconds = calendar.timegm(utc.timetuple()) + value._pb.timestamp_value.nanos = 1000 * micros + self.assertEqual(self._call_fut(value._pb), utc) def test_key(self): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.key import Key - pb = entity_pb2.Value() + value = entity_pb2.Value() expected = Key("KIND", 1234, project="PROJECT").to_protobuf() - pb.key_value._pb.CopyFrom(expected._pb) - found = self._call_fut(pb) + value.key_value._pb.CopyFrom(expected._pb) + found = self._call_fut(value._pb) self.assertEqual(found.to_protobuf(), expected) def test_bool(self): - pb = self._makePB("boolean_value", False) - self.assertEqual(self._call_fut(pb), False) + value = self._makePB("boolean_value", False) + self.assertEqual(self._call_fut(value._pb), False) def test_float(self): - pb = self._makePB("double_value", 3.1415926) - self.assertEqual(self._call_fut(pb), 3.1415926) + value = self._makePB("double_value", 3.1415926) + self.assertEqual(self._call_fut(value._pb), 3.1415926) def test_int(self): - pb = self._makePB("integer_value", 42) - self.assertEqual(self._call_fut(pb), 42) + value = self._makePB("integer_value", 42) + self.assertEqual(self._call_fut(value._pb), 42) def test_bytes(self): - pb = self._makePB("blob_value", b"str") - self.assertEqual(self._call_fut(pb), b"str") + value = self._makePB("blob_value", b"str") + self.assertEqual(self._call_fut(value._pb), b"str") def test_unicode(self): - pb = self._makePB("string_value", u"str") - self.assertEqual(self._call_fut(pb), u"str") + value = self._makePB("string_value", u"str") + self.assertEqual(self._call_fut(value._pb), u"str") def test_entity(self): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb - pb = entity_pb2.Value() - entity_pb = pb.entity_value + value = entity_pb2.Value() + entity_pb = value.entity_value entity_pb._pb.key.path.add(kind="KIND") entity_pb.key.partition_id.project_id = "PROJECT" value_pb = _new_value_pb(entity_pb, "foo") value_pb.string_value = "Foo" - entity = self._call_fut(pb) + entity = self._call_fut(value._pb) self.assertIsInstance(entity, Entity) self.assertEqual(entity["foo"], "Foo") def test_array(self): from google.cloud.datastore_v1.types import entity as entity_pb2 - pb = entity_pb2.Value() - array_pb = pb.array_value.values + value = entity_pb2.Value() + array_pb = value.array_value.values item_pb = array_pb._pb.add() item_pb.string_value = "Foo" item_pb = array_pb._pb.add() item_pb.string_value = "Bar" - items = self._call_fut(pb) + items = self._call_fut(value._pb) self.assertEqual(items, ["Foo", "Bar"]) def test_geo_point(self): @@ -754,8 +730,8 @@ def test_geo_point(self): lat = -3.14 lng = 13.37 geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) - pb = entity_pb2.Value(geo_point_value=geo_pt_pb) - result = self._call_fut(pb) + value = entity_pb2.Value(geo_point_value=geo_pt_pb) + result = self._call_fut(value._pb) self.assertIsInstance(result, GeoPoint) self.assertEqual(result.latitude, lat) self.assertEqual(result.longitude, lng) @@ -764,16 +740,16 @@ def test_null(self): from google.protobuf import struct_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 - pb = entity_pb2.Value(null_value=struct_pb2.NULL_VALUE) - result = self._call_fut(pb) + value = entity_pb2.Value(null_value=struct_pb2.NULL_VALUE) + result = self._call_fut(value._pb) self.assertIsNone(result) def test_unknown(self): from google.cloud.datastore_v1.types import entity as entity_pb2 - pb = entity_pb2.Value() + value = entity_pb2.Value() with self.assertRaises(ValueError): - self._call_fut(pb) + self._call_fut(value._pb) class Test_set_protobuf_value(unittest.TestCase): @@ -860,18 +836,16 @@ def test_unicode(self): def test_entity_empty_wo_key(self): from google.cloud.datastore.entity import Entity - from google.cloud.datastore.helpers import _property_tuples pb = self._makePB() entity = Entity() self._call_fut(pb, entity) value = pb.entity_value self.assertEqual(value.key.SerializeToString(), b"") - self.assertEqual(len(list(_property_tuples(value))), 0) + self.assertEqual(len(list(value.properties.items())), 0) def test_entity_w_key(self): from google.cloud.datastore.entity import Entity - from google.cloud.datastore.helpers import _property_tuples from google.cloud.datastore.key import Key name = "foo" @@ -884,7 +858,7 @@ def test_entity_w_key(self): entity_pb = pb.entity_value self.assertEqual(entity_pb.key, key.to_protobuf()._pb) - prop_dict = dict(_property_tuples(entity_pb)) + prop_dict = dict(entity_pb.properties.items()) self.assertEqual(len(prop_dict), 1) self.assertEqual(list(prop_dict.keys()), [name]) self.assertEqual(prop_dict[name].string_value, value) diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 89bc7e2c304c..dcb4e9f53ab3 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -15,6 +15,7 @@ import unittest import mock +import pytest class TestQuery(unittest.TestCase): @@ -527,6 +528,7 @@ def test__process_query_results_done(self): self.assertIsNone(iterator.next_page_token) self.assertFalse(iterator._more_results) + @pytest.mark.filterwarnings("ignore") def test__process_query_results_bad_enum(self): iterator = self._make_one(None, None) more_results_enum = 999 From 2790983c4214eae521709e3c183a93949c2e4e76 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 9 Jul 2021 18:46:23 +0000 Subject: [PATCH 341/611] chore: release 2.1.4 (#184) :robot: I have created a release \*beep\* \*boop\* --- ### [2.1.4](https://www.github.com/googleapis/python-datastore/compare/v2.1.3...v2.1.4) (2021-07-09) ### Performance Improvements * further avoid using proto-plus wrapper when unmarshalling entities ([#190](https://www.github.com/googleapis/python-datastore/issues/190)) ([d0481bf](https://www.github.com/googleapis/python-datastore/commit/d0481bf8caa84a829808e7f512fda8709f38d0cc)) ### Documentation * omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-datastore/issues/1127)) ([#181](https://www.github.com/googleapis/python-datastore/issues/181)) ([6efde70](https://www.github.com/googleapis/python-datastore/commit/6efde70db751bf708091b24a932ab8571bd981a6)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/google-cloud-datastore/CHANGELOG.md | 12 ++++++++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 880add6efd35..a8c636477c72 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +### [2.1.4](https://www.github.com/googleapis/python-datastore/compare/v2.1.3...v2.1.4) (2021-07-09) + + +### Performance Improvements + +* further avoid using proto-plus wrapper when unmarshalling entities ([#190](https://www.github.com/googleapis/python-datastore/issues/190)) ([d0481bf](https://www.github.com/googleapis/python-datastore/commit/d0481bf8caa84a829808e7f512fda8709f38d0cc)) + + +### Documentation + +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-datastore/issues/1127)) ([#181](https://www.github.com/googleapis/python-datastore/issues/181)) ([6efde70](https://www.github.com/googleapis/python-datastore/commit/6efde70db751bf708091b24a932ab8571bd981a6)) + ### [2.1.3](https://www.github.com/googleapis/python-datastore/compare/v2.1.2...v2.1.3) (2021-05-25) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 28ad2844bab7..a1c746740d83 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.1.3" +__version__ = "2.1.4" From d63377300e9ff4ce299ca8ae14d4e0e8782bfa00 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 14 Jul 2021 15:26:32 +0000 Subject: [PATCH 342/611] build(python): exit with success status if no samples found (#191) Source-Link: https://github.com/googleapis/synthtool/commit/53ea3896a52f87c758e79b5a19fa338c83925a98 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:e1793a23ae0ee9aafb2e3a53b564a351f74790dbe3c2d75f8fc3b8c43e5c036c --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- .../google-cloud-datastore/.kokoro/test-samples-impl.sh | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index e2b39f946040..a5d3697f2167 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:99d90d097e4a4710cc8658ee0b5b963f4426d0e424819787c3ac1405c9a26719 + digest: sha256:e1793a23ae0ee9aafb2e3a53b564a351f74790dbe3c2d75f8fc3b8c43e5c036c diff --git a/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh b/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh index cf5de74c17a5..311a8d54b9f1 100755 --- a/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh +++ b/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh @@ -20,9 +20,9 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -# Exit early if samples directory doesn't exist -if [ ! -d "./samples" ]; then - echo "No tests run. `./samples` not found" +# Exit early if samples don't exist +if ! find samples -name 'requirements.txt' | grep -q .; then + echo "No tests run. './samples/**/requirements.txt' not found" exit 0 fi From ca39b42c338f961aaf7792c29ac8c0481f15c50e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 14 Jul 2021 12:44:14 -0400 Subject: [PATCH 343/611] build(python): remove python 3.7 from kokoro Dockerfile (#192) Source-Link: https://github.com/googleapis/synthtool/commit/e44dc0c742b1230887a73552357e0c18dcc30b92 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:5ff7446edeaede81c3ed58b23a4e76a5403fba1350ce28478045657303b6479d Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/docker/docs/Dockerfile | 35 ++----------------- 2 files changed, 3 insertions(+), 34 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index a5d3697f2167..cb06536dab0b 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:e1793a23ae0ee9aafb2e3a53b564a351f74790dbe3c2d75f8fc3b8c43e5c036c + digest: sha256:5ff7446edeaede81c3ed58b23a4e76a5403fba1350ce28478045657303b6479d diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile index 412b0b56a921..4e1b1fb8b5a5 100644 --- a/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile @@ -40,6 +40,7 @@ RUN apt-get update \ libssl-dev \ libsqlite3-dev \ portaudio19-dev \ + python3-distutils \ redis-server \ software-properties-common \ ssh \ @@ -59,40 +60,8 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb - -COPY fetch_gpg_keys.sh /tmp -# Install the desired versions of Python. -RUN set -ex \ - && export GNUPGHOME="$(mktemp -d)" \ - && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \ - && /tmp/fetch_gpg_keys.sh \ - && for PYTHON_VERSION in 3.7.8 3.8.5; do \ - wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ - && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ - && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \ - && rm -r python-${PYTHON_VERSION}.tar.xz.asc \ - && mkdir -p /usr/src/python-${PYTHON_VERSION} \ - && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \ - && rm python-${PYTHON_VERSION}.tar.xz \ - && cd /usr/src/python-${PYTHON_VERSION} \ - && ./configure \ - --enable-shared \ - # This works only on Python 2.7 and throws a warning on every other - # version, but seems otherwise harmless. - --enable-unicode=ucs4 \ - --with-system-ffi \ - --without-ensurepip \ - && make -j$(nproc) \ - && make install \ - && ldconfig \ - ; done \ - && rm -rf "${GNUPGHOME}" \ - && rm -rf /usr/src/python* \ - && rm -rf ~/.cache/ - RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.7 /tmp/get-pip.py \ && python3.8 /tmp/get-pip.py \ && rm /tmp/get-pip.py -CMD ["python3.7"] +CMD ["python3.8"] From 2b830e7c5190f6b914ad09c29af57c4f1b3d85e6 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 16 Jul 2021 10:39:55 -0400 Subject: [PATCH 344/611] chore: pin 'google-{api,cloud}-core' to allow 2.x versions (#193) --- packages/google-cloud-datastore/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 2df2e821a081..b577da3165e5 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -29,8 +29,8 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", - "google-cloud-core >= 1.4.0, < 2.0dev", + "google-api-core[grpc] >= 1.22.2, < 3.0.0dev", + "google-cloud-core >= 1.4.0, < 3.0dev", "proto-plus >= 1.4.0", "libcst >= 0.2.5", ] From 576288bad76a388c063229824fd26d013a8b184f Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 20 Jul 2021 03:48:12 -0600 Subject: [PATCH 345/611] fix(deps): pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions (#194) Expand pins on library dependencies in preparation for these dependencies taking a new major version. See https://github.com/googleapis/google-cloud-python/issues/10566. --- packages/google-cloud-datastore/setup.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index b577da3165e5..19fe77117875 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -29,8 +29,14 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.22.2, < 3.0.0dev", - "google-cloud-core >= 1.4.0, < 3.0dev", + # NOTE: Maintainers, please do not require google-api-core>=2.x.x + # Until this issue is closed + # https://github.com/googleapis/google-cloud-python/issues/10566 + "google-api-core[grpc] >= 1.22.2, <3.0.0dev", + # NOTE: Maintainers, please do not require google-api-core>=2.x.x + # Until this issue is closed + # https://github.com/googleapis/google-cloud-python/issues/10566 + "google-cloud-core >= 1.4.0, <3.0.0dev", "proto-plus >= 1.4.0", "libcst >= 0.2.5", ] From cac7286fed0b431dd7619eacd2ce6a70225ae86f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 21 Jul 2021 16:41:17 -0600 Subject: [PATCH 346/611] chore: release 2.1.5 (#196) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index a8c636477c72..f7d6a1a68eb6 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +### [2.1.5](https://www.github.com/googleapis/python-datastore/compare/v2.1.4...v2.1.5) (2021-07-20) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#194](https://www.github.com/googleapis/python-datastore/issues/194)) ([e94f97c](https://www.github.com/googleapis/python-datastore/commit/e94f97ce42b04ba76766737eb69cdaf92bc2ac05)) + ### [2.1.4](https://www.github.com/googleapis/python-datastore/compare/v2.1.3...v2.1.4) (2021-07-09) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index a1c746740d83..54b693bd5c80 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.1.4" +__version__ = "2.1.5" From 409c3239b7492b147da30d74792e84f2ea7972f9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 22 Jul 2021 14:10:10 +0000 Subject: [PATCH 347/611] feat: add Samples section to CONTRIBUTING.rst (#195) Source-Link: https://github.com/googleapis/synthtool/commit/52e4e46eff2a0b70e3ff5506a02929d089d077d4 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:6186535cbdbf6b9fe61f00294929221d060634dae4a0795c1cefdbc995b2d605 --- .../.github/.OwlBot.lock.yaml | 2 +- .../google-cloud-datastore/CONTRIBUTING.rst | 24 +++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index cb06536dab0b..d57f74204625 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:5ff7446edeaede81c3ed58b23a4e76a5403fba1350ce28478045657303b6479d + digest: sha256:6186535cbdbf6b9fe61f00294929221d060634dae4a0795c1cefdbc995b2d605 diff --git a/packages/google-cloud-datastore/CONTRIBUTING.rst b/packages/google-cloud-datastore/CONTRIBUTING.rst index 63d4d9de2c87..5cb48ca79f5d 100644 --- a/packages/google-cloud-datastore/CONTRIBUTING.rst +++ b/packages/google-cloud-datastore/CONTRIBUTING.rst @@ -202,6 +202,30 @@ Build the docs via: $ nox -s docs +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + ******************************************** Note About ``README`` as it pertains to PyPI ******************************************** From 6323f211302e95a00a0df9903cdf01d1de04001b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 23 Jul 2021 15:32:49 +0000 Subject: [PATCH 348/611] chore: fix kokoro config for samples (#201) Source-Link: https://github.com/googleapis/synthtool/commit/dd05f9d12f134871c9e45282349c9856fbebecdd Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:aea14a583128771ae8aefa364e1652f3c56070168ef31beb203534222d842b8b --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/samples/python3.6/periodic-head.cfg | 2 +- .../.kokoro/samples/python3.7/periodic-head.cfg | 2 +- .../.kokoro/samples/python3.8/periodic-head.cfg | 2 +- .../.kokoro/samples/python3.9/periodic-head.cfg | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index d57f74204625..9ee60f7e4850 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:6186535cbdbf6b9fe61f00294929221d060634dae4a0795c1cefdbc995b2d605 + digest: sha256:aea14a583128771ae8aefa364e1652f3c56070168ef31beb203534222d842b8b diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic-head.cfg index f9cfcd33e058..714045a75ed7 100644 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic-head.cfg +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-datastore/.kokoro/test-samples-against-head.sh" } diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic-head.cfg index f9cfcd33e058..714045a75ed7 100644 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic-head.cfg +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-datastore/.kokoro/test-samples-against-head.sh" } diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic-head.cfg index f9cfcd33e058..714045a75ed7 100644 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic-head.cfg +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-datastore/.kokoro/test-samples-against-head.sh" } diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic-head.cfg index f9cfcd33e058..714045a75ed7 100644 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic-head.cfg +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-datastore/.kokoro/test-samples-against-head.sh" } From 3dd646eefbeab7004b7ed813407135b55de50eec Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 27 Jul 2021 16:16:24 +0000 Subject: [PATCH 349/611] chore: release 2.1.6 (#203) :robot: I have created a release \*beep\* \*boop\* --- ### [2.1.6](https://www.github.com/googleapis/python-datastore/compare/v2.1.5...v2.1.6) (2021-07-26) ### Documentation * add Samples section to CONTRIBUTING.rst ([#195](https://www.github.com/googleapis/python-datastore/issues/195)) ([f607fb5](https://www.github.com/googleapis/python-datastore/commit/f607fb544a2f7279267e5a5a534fc31e573b6b74)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/google-cloud-datastore/CHANGELOG.md | 8 ++++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index f7d6a1a68eb6..91b6b8520416 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +### [2.1.6](https://www.github.com/googleapis/python-datastore/compare/v2.1.5...v2.1.6) (2021-07-26) + + +### Documentation + +* add Samples section to CONTRIBUTING.rst ([#195](https://www.github.com/googleapis/python-datastore/issues/195)) ([f607fb5](https://www.github.com/googleapis/python-datastore/commit/f607fb544a2f7279267e5a5a534fc31e573b6b74)) + + ### [2.1.5](https://www.github.com/googleapis/python-datastore/compare/v2.1.4...v2.1.5) (2021-07-20) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 54b693bd5c80..5c544daee541 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.1.5" +__version__ = "2.1.6" From 7526cde6e005635c74b5f388bdd2752ec447d38e Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 3 Aug 2021 17:20:02 -0400 Subject: [PATCH 350/611] tests: split systests to separate kokoro job (#205) Closes #204. --- .../.kokoro/presubmit/presubmit.cfg | 8 +- .../.kokoro/presubmit/system-3.8.cfg | 7 ++ packages/google-cloud-datastore/noxfile.py | 1 - packages/google-cloud-datastore/owlbot.py | 86 +++++++------------ 4 files changed, 46 insertions(+), 56 deletions(-) create mode 100644 packages/google-cloud-datastore/.kokoro/presubmit/system-3.8.cfg diff --git a/packages/google-cloud-datastore/.kokoro/presubmit/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/presubmit/presubmit.cfg index 8f43917d92fe..b158096f0ae2 100644 --- a/packages/google-cloud-datastore/.kokoro/presubmit/presubmit.cfg +++ b/packages/google-cloud-datastore/.kokoro/presubmit/presubmit.cfg @@ -1 +1,7 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file +# Format: //devtools/kokoro/config/proto/build.proto + +# Disable system tests. +env_vars: { + key: "RUN_SYSTEM_TESTS" + value: "false" +} diff --git a/packages/google-cloud-datastore/.kokoro/presubmit/system-3.8.cfg b/packages/google-cloud-datastore/.kokoro/presubmit/system-3.8.cfg new file mode 100644 index 000000000000..f4bcee3db0f0 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/presubmit/system-3.8.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "system-3.8" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 4dd9e06c67a0..518a3697a0a5 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -157,7 +157,6 @@ def system(session, disable_grpc): env=env, *session.posargs, ) - if system_test_folder_exists: session.run( "py.test", diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 8017fb4a6390..03f96773018a 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -24,28 +24,28 @@ datastore_admin_default_version = "v1" for library in s.get_staging_dirs(datastore_default_version): - if library.parent.absolute() == 'datastore': + if library.parent.absolute() == "datastore": s.move(library / f"google/cloud/datastore_{library.name}") - s.move(library / f"tests/") + s.move(library / "tests/") s.move(library / "scripts") for library in s.get_staging_dirs(datastore_admin_default_version): - if library.parent.absolute() == 'datastore_admin': + if library.parent.absolute() == "datastore_admin": s.replace( library / "google/**/datastore_admin_client.py", "google-cloud-datastore-admin", - "google-cloud-datstore" + "google-cloud-datstore", ) # Remove spurious markup s.replace( "google/**/datastore_admin/client.py", - "\s+---------------------------------(-)+", - "" + r"\s+---------------------------------(-)+", + "", ) s.move(library / f"google/cloud/datastore_admin_{library.name}") - s.move(library / f"tests") + s.move(library / "tests") s.move(library / "scripts") s.remove_staging_dirs() @@ -53,16 +53,14 @@ # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library( - microgenerator=True, -) +templated_files = common.py_library(microgenerator=True, split_system_tests=True,) s.move(templated_files, excludes=["docs/multiprocessing.rst", ".coveragerc"]) # Preserve system tests w/ GOOGLE_DISABLE_GRPC set (#133, PR #136) -s.replace( +assert 1 == s.replace( "noxfile.py", - """\ + r"""\ @nox.session\(python=SYSTEM_TEST_PYTHON_VERSIONS\) def system\(session\): """, @@ -73,7 +71,7 @@ def system(session, disable_grpc): """, ) -s.replace( +assert 1 == s.replace( "noxfile.py", """\ # Run py.test against the system tests. @@ -83,62 +81,42 @@ def system(session, disable_grpc): if disable_grpc: env["GOOGLE_CLOUD_DISABLE_GRPC"] = "True" - # Run py.test against the system tests. +# Run py.test against the system tests. """, ) -s.replace( +assert 1 == s.replace( "noxfile.py", - """session\.run\( - "py\.test", - "--quiet", - f"--junitxml=system_\{session\.python\}_sponge_log\.xml", - system_test_path, - \*session\.posargs - \)""", - """session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - env=env, - *session.posargs - ) + """\ + system_test_path, +""", + """\ + system_test_path, + env=env, """, ) -s.replace( +assert 1 == s.replace( "noxfile.py", - """session\.run\( - "py\.test", - "--quiet", - f"--junitxml=system_\{session\.python\}_sponge_log\.xml", - system_test_folder_path, - \*session\.posargs - \)""", - """session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - env=env, - *session.posargs - ) + """\ + system_test_folder_path, +""", + """\ + system_test_folder_path, + env=env, """, ) -s.shell.run(["nox", "-s", "blacken"], hide_output=False) - # Add documentation about creating indexes and populating data for system # tests. -num = s.replace( +assert 1 == s.replace( "CONTRIBUTING.rst", - """\ + r""" \*\*\*\*\*\*\*\*\*\*\*\*\* Test Coverage \*\*\*\*\*\*\*\*\*\*\*\*\* """, - """\ + """ - You'll need to create composite `indexes `__ with the ``gcloud`` command line @@ -168,7 +146,7 @@ def system(session, disable_grpc): ************* Test Coverage ************* -""") +""", +) -if num != 1: - raise Exception("Required replacement not made.") +s.shell.run(["nox", "-s", "blacken"], hide_output=False) From a0adcc32da31e9da0948b4408a8dfd6118f8987f Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 5 Aug 2021 14:46:46 -0400 Subject: [PATCH 351/611] tests: allow prerelease deps on Python 3.9 (#207) Closes #206. --- packages/google-cloud-datastore/testing/constraints-3.9.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/google-cloud-datastore/testing/constraints-3.9.txt b/packages/google-cloud-datastore/testing/constraints-3.9.txt index e69de29bb2d1..6d34489a53a4 100644 --- a/packages/google-cloud-datastore/testing/constraints-3.9.txt +++ b/packages/google-cloud-datastore/testing/constraints-3.9.txt @@ -0,0 +1,2 @@ +# Allow prerelease requirements +--pre From 757c6e36f43555d7eb97f48a66616bb084097f30 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 11 Aug 2021 14:23:42 -0400 Subject: [PATCH 352/611] chore: avoid `.nox` directories when building docs (#211) Source-Link: https://github.com/googleapis/synthtool/commit/7e1f6da50524b5d98eb67adbf6dd0805df54233d Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:a1a891041baa4ffbe1a809ac1b8b9b4a71887293c9101c88e8e255943c5aec2d Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/docs/conf.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 9ee60f7e4850..b771c37caef8 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:aea14a583128771ae8aefa364e1652f3c56070168ef31beb203534222d842b8b + digest: sha256:a1a891041baa4ffbe1a809ac1b8b9b4a71887293c9101c88e8e255943c5aec2d diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index f1b507871245..adfa53c42084 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -110,6 +110,7 @@ # directories to ignore when looking for source files. exclude_patterns = [ "_build", + "**/.nox/**/*", "samples/AUTHORING_GUIDE.md", "samples/CONTRIBUTING.md", "samples/snippets/README.rst", From 6596fb3eafb573b9e2a180f16a2ee47b46cdf2ba Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 13 Aug 2021 11:26:42 -0400 Subject: [PATCH 353/611] chore: drop mention of Python 2.7 from templates (#212) Source-Link: https://github.com/googleapis/synthtool/commit/facee4cc1ea096cd8bcc008bb85929daa7c414c0 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:9743664022bd63a8084be67f144898314c7ca12f0a03e422ac17c733c129d803 Co-authored-by: Owl Bot --- .../google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/noxfile.py | 12 +++++++++--- .../readme-gen/templates/install_deps.tmpl.rst | 2 +- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index b771c37caef8..a9fcd07cc43b 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:a1a891041baa4ffbe1a809ac1b8b9b4a71887293c9101c88e8e255943c5aec2d + digest: sha256:9743664022bd63a8084be67f144898314c7ca12f0a03e422ac17c733c129d803 diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 518a3697a0a5..e7f995cf9a65 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -84,9 +84,15 @@ def default(session): constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) - - session.install("mock", "pytest", "pytest-cov", "-c", constraints_path) + session.install( + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", + "-c", + constraints_path, + ) session.install("-e", ".", "-c", constraints_path) diff --git a/packages/google-cloud-datastore/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-datastore/scripts/readme-gen/templates/install_deps.tmpl.rst index a0406dba8c84..275d649890d7 100644 --- a/packages/google-cloud-datastore/scripts/readme-gen/templates/install_deps.tmpl.rst +++ b/packages/google-cloud-datastore/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -12,7 +12,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. +#. Create a virtualenv. Samples are compatible with Python 3.6+. .. code-block:: bash From 2e7ffc065eac157915678b4e10cfa8c445f3326c Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 18 Aug 2021 07:40:42 -0600 Subject: [PATCH 354/611] chore: generate python samples templates in owlbot.py (#214) Generate python samples templates in owlbot.py --- packages/google-cloud-datastore/owlbot.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 03f96773018a..460bd06f6a40 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -54,6 +54,7 @@ # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library(microgenerator=True, split_system_tests=True,) +python.py_samples(skip_readmes=True) s.move(templated_files, excludes=["docs/multiprocessing.rst", ".coveragerc"]) From c845dddf901f715c6abf13944975c342c7c25800 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 18 Aug 2021 11:42:44 -0400 Subject: [PATCH 355/611] chore: add missing import in owlbot.py (#215) --- packages/google-cloud-datastore/owlbot.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 460bd06f6a40..0be97a88a169 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -15,6 +15,7 @@ """This script is used to synthesize generated parts of this library.""" import synthtool as s from synthtool import gcp +from synthtool.languages import python common = gcp.CommonTemplates() @@ -54,9 +55,9 @@ # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library(microgenerator=True, split_system_tests=True,) -python.py_samples(skip_readmes=True) s.move(templated_files, excludes=["docs/multiprocessing.rst", ".coveragerc"]) +python.py_samples(skip_readmes=True) # Preserve system tests w/ GOOGLE_DISABLE_GRPC set (#133, PR #136) assert 1 == s.replace( From 462048145b6372ca69787ec3aa3c8ee473160106 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 18 Aug 2021 12:12:09 -0400 Subject: [PATCH 356/611] tests: revert testing against prerelease deps on Python 3.9 (#213) Reverts googleapis/python-datastore#207 Consensus from today's meeting is that testing prereleases of third-party dependencies needs to happen outside the normal `presubmit` path. --- packages/google-cloud-datastore/testing/constraints-3.9.txt | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/google-cloud-datastore/testing/constraints-3.9.txt b/packages/google-cloud-datastore/testing/constraints-3.9.txt index 6d34489a53a4..e69de29bb2d1 100644 --- a/packages/google-cloud-datastore/testing/constraints-3.9.txt +++ b/packages/google-cloud-datastore/testing/constraints-3.9.txt @@ -1,2 +0,0 @@ -# Allow prerelease requirements ---pre From 221c0947c6a8bf163231f42bcfb2ccd66c2bd06f Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 19 Aug 2021 14:56:58 -0400 Subject: [PATCH 357/611] tests: refactor systests to use pytest fixtures / idioms (#210) Use pytest fixtures / idioms, rather than legacy setup / teardown. Also, remove fossilized doctest setup code: Closes #208. --- .../google/cloud/datastore/client.py | 22 - .../google/cloud/datastore/entity.py | 17 - .../google/cloud/datastore/transaction.py | 36 - .../tests/system/_helpers.py | 44 ++ .../tests/system/conftest.py | 50 ++ .../tests/system/test_allocate_reserve_ids.py | 61 ++ .../tests/system/test_put.py | 163 +++++ .../tests/system/test_query.py | 324 +++++++++ .../tests/system/test_system.py | 636 ------------------ .../tests/system/test_transaction.py | 106 +++ .../tests/system/utils/populate_datastore.py | 20 +- 11 files changed, 759 insertions(+), 720 deletions(-) create mode 100644 packages/google-cloud-datastore/tests/system/_helpers.py create mode 100644 packages/google-cloud-datastore/tests/system/conftest.py create mode 100644 packages/google-cloud-datastore/tests/system/test_allocate_reserve_ids.py create mode 100644 packages/google-cloud-datastore/tests/system/test_put.py create mode 100644 packages/google-cloud-datastore/tests/system/test_query.py delete mode 100644 packages/google-cloud-datastore/tests/system/test_system.py create mode 100644 packages/google-cloud-datastore/tests/system/test_transaction.py diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 28d968ce0a02..bacb33535555 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -784,28 +784,6 @@ def do_something(entity): or manually page through results - .. testsetup:: query-page - - import os - import uuid - - from google.cloud import datastore - from tests.system.test_system import Config # system tests - - unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8]) - client = datastore.Client(namespace='ns{}'.format(unique)) - - key = client.key('_Doctest') - entity1 = datastore.Entity(key=key) - entity1['foo'] = 1337 - entity2 = datastore.Entity(key=key) - entity2['foo'] = 42 - Config.TO_DELETE.extend([entity1, entity2]) - client.put_multi([entity1, entity2]) - - query = client.query(kind='_Doctest') - cursor = None - .. doctest:: query-page >>> query_iter = query.fetch(start_cursor=cursor) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/entity.py b/packages/google-cloud-datastore/google/cloud/datastore/entity.py index c317bdda85fd..7353d210cb8d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/entity.py @@ -40,23 +40,6 @@ class Entity(dict): Use :meth:`~google.cloud.datastore.client.Client.get` to retrieve an existing entity: - .. testsetup:: entity-ctor - - import os - import uuid - - from google.cloud import datastore - from tests.system.test_system import Config # system tests - - unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8]) - client = datastore.Client(namespace='ns{}'.format(unique)) - key = client.key('EntityKind', 1234, namespace='_Doctest') - entity = datastore.Entity(key=key) - entity['property'] = 'value' - Config.TO_DELETE.append(entity) - - client.put(entity) - .. doctest:: entity-ctor >>> client.get(key) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index 21cac1a717a8..59d2ad9bf574 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -41,26 +41,6 @@ class Transaction(Batch): operations (either ``insert`` or ``upsert``) into the same mutation, and execute those within a transaction: - .. testsetup:: txn-put-multi, txn-api - - import os - import uuid - - from google.cloud import datastore - from tests.system.test_system import Config # system tests - - unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8]) - client = datastore.Client(namespace='ns{}'.format(unique)) - key1 = client.key('_Doctest') - entity1 = datastore.Entity(key=key1) - entity1['foo'] = 1337 - - key2 = client.key('_Doctest', 'abcd1234') - entity2 = datastore.Entity(key=key2) - entity2['foo'] = 42 - - Config.TO_DELETE.extend([entity1, entity2]) - .. doctest:: txn-put-multi >>> with client.transaction(): @@ -112,22 +92,6 @@ class SomeException(Exception): entities will not be available at save time! That means, if you try: - .. testsetup:: txn-entity-key, txn-entity-key-after, txn-manual - - import os - import uuid - - from google.cloud import datastore - from tests.system.test_system import Config # system tests - - unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8]) - client = datastore.Client(namespace='ns{}'.format(unique)) - - def Entity(*args, **kwargs): - entity = datastore.Entity(*args, **kwargs) - Config.TO_DELETE.append(entity) - return entity - .. doctest:: txn-entity-key >>> with client.transaction(): diff --git a/packages/google-cloud-datastore/tests/system/_helpers.py b/packages/google-cloud-datastore/tests/system/_helpers.py new file mode 100644 index 000000000000..e8b5cf1cb391 --- /dev/null +++ b/packages/google-cloud-datastore/tests/system/_helpers.py @@ -0,0 +1,44 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from google.cloud import datastore +from google.cloud.datastore.client import DATASTORE_DATASET +from test_utils.system import unique_resource_id + +EMULATOR_DATASET = os.getenv(DATASTORE_DATASET) + + +def unique_id(prefix, separator="-"): + return f"{prefix}{unique_resource_id(separator)}" + + +_SENTINEL = object() + + +def clone_client(base_client, namespace=_SENTINEL): + if namespace is _SENTINEL: + namespace = base_client.namespace + + kwargs = {} + if EMULATOR_DATASET is None: + kwargs["credentials"] = base_client._credentials + + return datastore.Client( + project=base_client.project, + namespace=namespace, + _http=base_client._http, + **kwargs, + ) diff --git a/packages/google-cloud-datastore/tests/system/conftest.py b/packages/google-cloud-datastore/tests/system/conftest.py new file mode 100644 index 000000000000..61f8c1f02309 --- /dev/null +++ b/packages/google-cloud-datastore/tests/system/conftest.py @@ -0,0 +1,50 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import requests + +from google.cloud import datastore +from . import _helpers + + +@pytest.fixture(scope="session") +def in_emulator(): + return _helpers.EMULATOR_DATASET is not None + + +@pytest.fixture(scope="session") +def test_namespace(): + return _helpers.unique_id("ns") + + +@pytest.fixture(scope="session") +def datastore_client(test_namespace): + if _helpers.EMULATOR_DATASET is not None: + http = requests.Session() # Un-authorized. + return datastore.Client( + project=_helpers.EMULATOR_DATASET, namespace=test_namespace, _http=http, + ) + else: + return datastore.Client(namespace=test_namespace) + + +@pytest.fixture(scope="function") +def entities_to_delete(datastore_client): + entities_to_delete = [] + + yield entities_to_delete + + with datastore_client.transaction(): + datastore_client.delete_multi(entities_to_delete) diff --git a/packages/google-cloud-datastore/tests/system/test_allocate_reserve_ids.py b/packages/google-cloud-datastore/tests/system/test_allocate_reserve_ids.py new file mode 100644 index 000000000000..8c40538f0bac --- /dev/null +++ b/packages/google-cloud-datastore/tests/system/test_allocate_reserve_ids.py @@ -0,0 +1,61 @@ +# Copyright 2011 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import warnings + + +def test_client_allocate_ids(datastore_client): + num_ids = 10 + allocated_keys = datastore_client.allocate_ids( + datastore_client.key("Kind"), num_ids, + ) + assert len(allocated_keys) == num_ids + + unique_ids = set() + for key in allocated_keys: + unique_ids.add(key.id) + assert key.name is None + assert key.id is not None + + assert len(unique_ids) == num_ids + + +def test_client_reserve_ids_sequential(datastore_client): + num_ids = 10 + key = datastore_client.key("Kind", 1234) + + # Smoke test to make sure it doesn't blow up. No return value or + # verifiable side effect to verify. + datastore_client.reserve_ids_sequential(key, num_ids) + + +def test_client_reserve_ids_deprecated(datastore_client): + num_ids = 10 + key = datastore_client.key("Kind", 1234) + + with warnings.catch_warnings(record=True) as warned: + datastore_client.reserve_ids(key, num_ids) + + assert len(warned) == 1 + assert warned[0].category is DeprecationWarning + assert "reserve_ids_sequential" in str(warned[0].message) + + +def test_client_reserve_ids_multi(datastore_client): + key1 = datastore_client.key("Kind", 1234) + key2 = datastore_client.key("Kind", 1235) + + # Smoke test to make sure it doesn't blow up. No return value or + # verifiable side effect to verify. + datastore_client.reserve_ids_multi([key1, key2]) diff --git a/packages/google-cloud-datastore/tests/system/test_put.py b/packages/google-cloud-datastore/tests/system/test_put.py new file mode 100644 index 000000000000..5e884cf3151a --- /dev/null +++ b/packages/google-cloud-datastore/tests/system/test_put.py @@ -0,0 +1,163 @@ +# Copyright 2011 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime + +import pytest + +from google.cloud._helpers import UTC +from google.cloud import datastore +from google.cloud.datastore.helpers import GeoPoint + +from . import _helpers + + +def parent_key(datastore_client): + return datastore_client.key("Blog", "PizzaMan") + + +def _get_post(datastore_client, id_or_name=None, post_content=None): + post_content = post_content or { + "title": u"How to make the perfect pizza in your grill", + "tags": [u"pizza", u"grill"], + "publishedAt": datetime.datetime(2001, 1, 1, tzinfo=UTC), + "author": u"Silvano", + "isDraft": False, + "wordCount": 400, + "rating": 5.0, + } + # Create an entity with the given content. + # NOTE: Using a parent to ensure consistency for query + # in `test_empty_kind`. + key = datastore_client.key("Post", parent=parent_key(datastore_client)) + entity = datastore.Entity(key=key) + entity.update(post_content) + + # Update the entity key. + if id_or_name is not None: + entity.key = entity.key.completed_key(id_or_name) + + return entity + + +@pytest.mark.parametrize( + "name,key_id", [(None, None), ("post1", None), (None, 123456789)] +) +def test_client_put(datastore_client, entities_to_delete, name, key_id): + entity = _get_post(datastore_client, id_or_name=(name or key_id)) + datastore_client.put(entity) + entities_to_delete.append(entity) + + if name is not None: + assert entity.key.name == name + if key_id is not None: + assert entity.key.id == key_id + + retrieved_entity = datastore_client.get(entity.key) + # Check the given and retrieved are the the same. + assert retrieved_entity == entity + + +def test_client_put_w_multiple_in_txn(datastore_client, entities_to_delete): + with datastore_client.transaction() as xact: + entity1 = _get_post(datastore_client) + xact.put(entity1) + # Register entity to be deleted. + entities_to_delete.append(entity1) + + second_post_content = { + "title": u"How to make the perfect homemade pasta", + "tags": [u"pasta", u"homemade"], + "publishedAt": datetime.datetime(2001, 1, 1), + "author": u"Silvano", + "isDraft": False, + "wordCount": 450, + "rating": 4.5, + } + entity2 = _get_post(datastore_client, post_content=second_post_content,) + xact.put(entity2) + # Register entity to be deleted. + entities_to_delete.append(entity2) + + keys = [entity1.key, entity2.key] + matches = datastore_client.get_multi(keys) + assert len(matches) == 2 + + +def test_client_query_w_empty_kind(datastore_client): + query = datastore_client.query(kind="Post") + query.ancestor = parent_key(datastore_client) + posts = query.fetch(limit=2) + assert list(posts) == [] + + +def test_client_put_w_all_value_types(datastore_client, entities_to_delete): + key = datastore_client.key("TestPanObject", 1234) + entity = datastore.Entity(key=key) + entity["timestamp"] = datetime.datetime(2014, 9, 9, tzinfo=UTC) + key_stored = datastore_client.key("SavedKey", "right-here") + entity["key"] = key_stored + entity["truthy"] = True + entity["float"] = 2.718281828 + entity["int"] = 3735928559 + entity["words"] = u"foo" + entity["blob"] = b"seekretz" + entity_stored = datastore.Entity(key=key_stored) + entity_stored["hi"] = "bye" + entity["nested"] = entity_stored + entity["items"] = [1, 2, 3] + entity["geo"] = GeoPoint(1.0, 2.0) + entity["nothing_here"] = None + + # Store the entity. + datastore_client.put(entity) + entities_to_delete.append(entity) + + # Check the original and retrieved are the the same. + retrieved_entity = datastore_client.get(entity.key) + assert retrieved_entity == entity + + +def test_client_put_w_entity_w_self_reference(datastore_client, entities_to_delete): + parent_key = datastore_client.key("Residence", "NewYork") + key = datastore_client.key("Person", "name", parent=parent_key) + entity = datastore.Entity(key=key) + entity["fullName"] = u"Full name" + entity["linkedTo"] = key # Self reference. + + datastore_client.put(entity) + entities_to_delete.append(entity) + + query = datastore_client.query(kind="Person") + # Adding ancestor to ensure consistency. + query.ancestor = parent_key + query.add_filter("linkedTo", "=", key) + + stored_persons = list(query.fetch(limit=2)) + assert stored_persons == [entity] + + +def test_client_put_w_empty_array(datastore_client, entities_to_delete): + local_client = _helpers.clone_client(datastore_client) + + key = local_client.key("EmptyArray", 1234) + local_client = datastore.Client() + entity = datastore.Entity(key=key) + entity["children"] = [] + local_client.put(entity) + entities_to_delete.append(entity) + + retrieved = local_client.get(entity.key) + + assert entity["children"] == retrieved["children"] diff --git a/packages/google-cloud-datastore/tests/system/test_query.py b/packages/google-cloud-datastore/tests/system/test_query.py new file mode 100644 index 000000000000..53ae3e4fb3d0 --- /dev/null +++ b/packages/google-cloud-datastore/tests/system/test_query.py @@ -0,0 +1,324 @@ +# Copyright 2011 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from .utils import clear_datastore +from .utils import populate_datastore +from . import _helpers + + +@pytest.fixture(scope="session") +def query_client(datastore_client): + return _helpers.clone_client(datastore_client, namespace=None) + + +@pytest.fixture(scope="session") +def ancestor_key(query_client, in_emulator): + + # In the emulator, re-populating the datastore is cheap. + if in_emulator: + populate_datastore.add_characters(client=query_client) + + ancestor_key = query_client.key(*populate_datastore.ANCESTOR) + + yield ancestor_key + + # In the emulator, destroy the query entities. + if in_emulator: + clear_datastore.remove_all_entities(client=query_client) + + +def _make_ancestor_query(query_client, ancestor_key): + return query_client.query(kind="Character", ancestor=ancestor_key) + + +@pytest.fixture(scope="function") +def ancestor_query(query_client, ancestor_key): + return _make_ancestor_query(query_client, ancestor_key) + + +def test_query_w_ancestor(ancestor_query): + query = ancestor_query + expected_matches = 8 + + # We expect 8, but allow the query to get 1 extra. + entities = list(query.fetch(limit=expected_matches + 1)) + + assert len(entities) == expected_matches + + +def test_query_w_limit_paging(ancestor_query): + query = ancestor_query + limit = 5 + + # Fetch characters. + iterator = query.fetch(limit=limit) + page = next(iterator.pages) + character_entities = list(page) + cursor = iterator.next_page_token + assert len(character_entities) == limit + + # Check cursor after fetch. + assert cursor is not None + + # Fetch remaining characters. + new_character_entities = list(query.fetch(start_cursor=cursor)) + characters_remaining = len(populate_datastore.CHARACTERS) - limit + assert len(new_character_entities) == characters_remaining + + +def test_query_w_simple_filter(ancestor_query): + query = ancestor_query + query.add_filter("appearances", ">=", 20) + expected_matches = 6 + + # We expect 6, but allow the query to get 1 extra. + entities = list(query.fetch(limit=expected_matches + 1)) + + assert len(entities) == expected_matches + + +def test_query_w_multiple_filters(ancestor_query): + query = ancestor_query + query.add_filter("appearances", ">=", 26) + query = query.add_filter("family", "=", "Stark") + expected_matches = 4 + + # We expect 4, but allow the query to get 1 extra. + entities = list(query.fetch(limit=expected_matches + 1)) + + assert len(entities) == expected_matches + + +def test_query_key_filter(query_client, ancestor_query): + # Use the client for this test instead of the global. + query = ancestor_query + rickard_key = query_client.key(*populate_datastore.RICKARD) + query.key_filter(rickard_key) + expected_matches = 1 + + # We expect 1, but allow the query to get 1 extra. + entities = list(query.fetch(limit=expected_matches + 1)) + + assert len(entities) == expected_matches + + +def test_query_w_order(ancestor_query): + query = ancestor_query + query.order = "appearances" + expected_matches = 8 + + # We expect 8, but allow the query to get 1 extra. + entities = list(query.fetch(limit=expected_matches + 1)) + + assert len(entities) == expected_matches + + # Actually check the ordered data returned. + assert entities[0]["name"] == populate_datastore.CHARACTERS[0]["name"] + assert entities[7]["name"] == populate_datastore.CHARACTERS[3]["name"] + + +def test_query_w_projection(ancestor_query): + filtered_query = ancestor_query + filtered_query.projection = ["name", "family"] + filtered_query.order = ["name", "family"] + + # NOTE: There are 9 responses because of Catelyn. She has both + # Stark and Tully as her families, hence occurs twice in + # the results. + expected_matches = 9 + + # We expect 9, but allow the query to get 1 extra. + entities = list(filtered_query.fetch(limit=expected_matches + 1)) + assert len(entities) == expected_matches + + arya_entity = entities[0] + catelyn_stark_entity = entities[2] + catelyn_tully_entity = entities[3] + sansa_entity = entities[8] + + assert dict(arya_entity) == {"name": "Arya", "family": "Stark"} + + # Check both Catelyn keys are the same. + assert catelyn_stark_entity.key == catelyn_tully_entity.key + assert dict(catelyn_stark_entity) == {"name": "Catelyn", "family": "Stark"} + assert dict(catelyn_tully_entity) == {"name": "Catelyn", "family": "Tully"} + + assert dict(sansa_entity) == {"name": "Sansa", "family": "Stark"} + + +def test_query_w_paginate_simple_uuid_keys(query_client): + + # See issue #4264 + page_query = query_client.query(kind="uuid_key") + iterator = page_query.fetch() + seen = set() + page_count = 0 + + for page in iterator.pages: + page_count += 1 + for entity in page: + uuid_str = entity.key.name + assert uuid_str not in seen + seen.add(uuid_str) + + assert page_count > 1 + + +def test_query_paginate_simple_timestamp_keys(query_client): + + # See issue #4264 + page_query = query_client.query(kind="timestamp_key") + iterator = page_query.fetch() + seen = set() + page_count = 0 + + for page in iterator.pages: + page_count += 1 + for entity in page: + timestamp = entity.key.id + assert timestamp not in seen + seen.add(timestamp) + + assert page_count > 1 + + +def test_query_w_offset_w_timestamp_keys(query_client): + # See issue #4675 + max_all = 10000 + offset = 1 + max_offset = max_all - offset + query = query_client.query(kind="timestamp_key") + + all_w_limit = list(query.fetch(limit=max_all)) + assert len(all_w_limit) == max_all + + offset_w_limit = list(query.fetch(offset=offset, limit=max_offset)) + assert offset_w_limit == all_w_limit[offset:] + + +def test_query_paginate_with_offset(ancestor_query): + page_query = ancestor_query + page_query.order = "appearances" + offset = 2 + limit = 3 + + iterator = page_query.fetch(limit=limit, offset=offset) + + # Fetch characters. + page = next(iterator.pages) + entities = list(page) + assert len(entities) == limit + assert entities[0]["name"] == "Robb" + assert entities[1]["name"] == "Bran" + assert entities[2]["name"] == "Catelyn" + + cursor = iterator.next_page_token + + # Fetch next set of characters. + new_iterator = page_query.fetch(limit=limit, offset=0, start_cursor=cursor) + + entities = list(new_iterator) + assert len(entities) == limit + assert entities[0]["name"] == "Sansa" + assert entities[1]["name"] == "Jon Snow" + assert entities[2]["name"] == "Arya" + + +def test_query_paginate_with_start_cursor(query_client, ancestor_key): + # Don't use fixture, because we need to create a clean copy later. + page_query = _make_ancestor_query(query_client, ancestor_key) + page_query.order = "appearances" + limit = 3 + offset = 2 + + iterator = page_query.fetch(limit=limit, offset=offset) + + # Fetch characters. + page = next(iterator.pages) + entities = list(page) + assert len(entities) == limit + + cursor = iterator.next_page_token + + # Use cursor to create a fresh query. + fresh_query = _make_ancestor_query(query_client, ancestor_key) + fresh_query.order = "appearances" + + new_entities = list(fresh_query.fetch(start_cursor=cursor, limit=limit)) + + characters_remaining = len(populate_datastore.CHARACTERS) - limit - offset + assert len(new_entities) == characters_remaining + assert new_entities[0]["name"] == "Sansa" + assert new_entities[2]["name"] == "Arya" + + +def test_query_distinct_on(ancestor_query): + query = ancestor_query + query.distinct_on = ["alive"] + expected_matches = 2 + + # We expect 2, but allow the query to get 1 extra. + entities = list(query.fetch(limit=expected_matches + 1)) + + assert len(entities) == expected_matches + assert entities[0]["name"] == "Catelyn" + assert entities[1]["name"] == "Arya" + + +@pytest.fixture(scope="session") +def large_query_client(datastore_client): + large_query_client = _helpers.clone_client( + datastore_client, namespace=populate_datastore.LARGE_CHARACTER_NAMESPACE, + ) + # Populate the datastore if necessary. + populate_datastore.add_large_character_entities(client=large_query_client) + + return large_query_client + + +@pytest.fixture(scope="function") +def large_query(large_query_client): + # Use the client for this test instead of the global. + return large_query_client.query( + kind=populate_datastore.LARGE_CHARACTER_KIND, + namespace=populate_datastore.LARGE_CHARACTER_NAMESPACE, + ) + + +@pytest.mark.parametrize( + "limit,offset,expected", + [ + # with no offset there are the correct # of results + (None, None, populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS,), + # with no limit there are results (offset provided) + (None, 900, populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS - 900,), + # Offset beyond items larger: verify 200 items found + (200, 1100, 200,), + # offset within range, expect 50 despite larger limit") + (100, populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS - 50, 50), + # Offset beyond items larger Verify no items found") + (200, populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS + 1000, 0), + ], +) +def test_large_query(large_query, limit, offset, expected): + page_query = large_query + page_query.add_filter("family", "=", "Stark") + page_query.add_filter("alive", "=", False) + + iterator = page_query.fetch(limit=limit, offset=offset) + + entities = [e for e in iterator] + assert len(entities) == expected diff --git a/packages/google-cloud-datastore/tests/system/test_system.py b/packages/google-cloud-datastore/tests/system/test_system.py deleted file mode 100644 index a91b99ae89f9..000000000000 --- a/packages/google-cloud-datastore/tests/system/test_system.py +++ /dev/null @@ -1,636 +0,0 @@ -# Copyright 2014 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import os -import unittest -import warnings - -import requests - -from google.cloud._helpers import UTC -from google.cloud import datastore -from google.cloud.datastore.helpers import GeoPoint -from google.cloud.datastore.client import DATASTORE_DATASET -from google.cloud.exceptions import Conflict - -from test_utils.system import unique_resource_id - -from tests.system.utils import clear_datastore -from tests.system.utils import populate_datastore - - -class Config(object): - """Run-time configuration to be modified at set-up. - - This is a mutable stand-in to allow test set-up to modify - global state. - """ - - CLIENT = None - TO_DELETE = [] - - -def clone_client(client): - emulator_dataset = os.getenv(DATASTORE_DATASET) - - if emulator_dataset is None: - return datastore.Client( - project=client.project, - namespace=client.namespace, - credentials=client._credentials, - _http=client._http, - ) - else: - return datastore.Client( - project=client.project, namespace=client.namespace, _http=client._http, - ) - - -def setUpModule(): - emulator_dataset = os.getenv(DATASTORE_DATASET) - # Isolated namespace so concurrent test runs don't collide. - test_namespace = "ns" + unique_resource_id() - if emulator_dataset is None: - Config.CLIENT = datastore.Client(namespace=test_namespace) - else: - http = requests.Session() # Un-authorized. - Config.CLIENT = datastore.Client( - project=emulator_dataset, namespace=test_namespace, _http=http, - ) - - -def tearDownModule(): - with Config.CLIENT.transaction(): - Config.CLIENT.delete_multi(Config.TO_DELETE) - - -class TestDatastore(unittest.TestCase): - def setUp(self): - self.case_entities_to_delete = [] - - def tearDown(self): - with Config.CLIENT.transaction(): - Config.CLIENT.delete_multi(self.case_entities_to_delete) - - -class TestDatastoreAllocateIDs(TestDatastore): - def test_allocate_ids(self): - num_ids = 10 - allocated_keys = Config.CLIENT.allocate_ids(Config.CLIENT.key("Kind"), num_ids) - self.assertEqual(len(allocated_keys), num_ids) - - unique_ids = set() - for key in allocated_keys: - unique_ids.add(key.id) - self.assertIsNone(key.name) - self.assertNotEqual(key.id, None) - - self.assertEqual(len(unique_ids), num_ids) - - -class TestDatastoreReserveIDs(TestDatastore): - def test_reserve_ids_sequential(self): - # Smoke test to make sure it doesn't blow up. No return value or - # verifiable side effect to verify. - num_ids = 10 - Config.CLIENT.reserve_ids_sequential(Config.CLIENT.key("Kind", 1234), num_ids) - - def test_reserve_ids(self): - with warnings.catch_warnings(record=True) as warned: - num_ids = 10 - Config.CLIENT.reserve_ids(Config.CLIENT.key("Kind", 1234), num_ids) - - warned = [ - warning - for warning in warned - if "reserve_ids_sequential" in str(warning.message) - ] - assert len(warned) == 1 - - def test_reserve_ids_multi(self): - # Smoke test to make sure it doesn't blow up. No return value or - # verifiable side effect to verify. - keys = [Config.CLIENT.key("KIND", 1234), Config.CLIENT.key("KIND", 1235)] - Config.CLIENT.reserve_ids_multi(keys) - - -class TestDatastoreSave(TestDatastore): - @classmethod - def setUpClass(cls): - cls.PARENT = Config.CLIENT.key("Blog", "PizzaMan") - - def _get_post(self, id_or_name=None, post_content=None): - post_content = post_content or { - "title": u"How to make the perfect pizza in your grill", - "tags": [u"pizza", u"grill"], - "publishedAt": datetime.datetime(2001, 1, 1, tzinfo=UTC), - "author": u"Silvano", - "isDraft": False, - "wordCount": 400, - "rating": 5.0, - } - # Create an entity with the given content. - # NOTE: Using a parent to ensure consistency for query - # in `test_empty_kind`. - key = Config.CLIENT.key("Post", parent=self.PARENT) - entity = datastore.Entity(key=key) - entity.update(post_content) - - # Update the entity key. - if id_or_name is not None: - entity.key = entity.key.completed_key(id_or_name) - - return entity - - def _generic_test_post(self, name=None, key_id=None): - entity = self._get_post(id_or_name=(name or key_id)) - Config.CLIENT.put(entity) - - # Register entity to be deleted. - self.case_entities_to_delete.append(entity) - - if name is not None: - self.assertEqual(entity.key.name, name) - if key_id is not None: - self.assertEqual(entity.key.id, key_id) - retrieved_entity = Config.CLIENT.get(entity.key) - # Check the given and retrieved are the the same. - self.assertEqual(retrieved_entity, entity) - - def test_post_with_name(self): - self._generic_test_post(name="post1") - - def test_post_with_id(self): - self._generic_test_post(key_id=123456789) - - def test_post_with_generated_id(self): - self._generic_test_post() - - def test_save_multiple(self): - with Config.CLIENT.transaction() as xact: - entity1 = self._get_post() - xact.put(entity1) - # Register entity to be deleted. - self.case_entities_to_delete.append(entity1) - - second_post_content = { - "title": u"How to make the perfect homemade pasta", - "tags": [u"pasta", u"homemade"], - "publishedAt": datetime.datetime(2001, 1, 1), - "author": u"Silvano", - "isDraft": False, - "wordCount": 450, - "rating": 4.5, - } - entity2 = self._get_post(post_content=second_post_content) - xact.put(entity2) - # Register entity to be deleted. - self.case_entities_to_delete.append(entity2) - - keys = [entity1.key, entity2.key] - matches = Config.CLIENT.get_multi(keys) - self.assertEqual(len(matches), 2) - - def test_empty_kind(self): - query = Config.CLIENT.query(kind="Post") - query.ancestor = self.PARENT - posts = list(query.fetch(limit=2)) - self.assertEqual(posts, []) - - def test_all_value_types(self): - key = Config.CLIENT.key("TestPanObject", 1234) - entity = datastore.Entity(key=key) - entity["timestamp"] = datetime.datetime(2014, 9, 9, tzinfo=UTC) - key_stored = Config.CLIENT.key("SavedKey", "right-here") - entity["key"] = key_stored - entity["truthy"] = True - entity["float"] = 2.718281828 - entity["int"] = 3735928559 - entity["words"] = u"foo" - entity["blob"] = b"seekretz" - entity_stored = datastore.Entity(key=key_stored) - entity_stored["hi"] = "bye" - entity["nested"] = entity_stored - entity["items"] = [1, 2, 3] - entity["geo"] = GeoPoint(1.0, 2.0) - entity["nothing_here"] = None - - # Store the entity. - self.case_entities_to_delete.append(entity) - Config.CLIENT.put(entity) - - # Check the original and retrieved are the the same. - retrieved_entity = Config.CLIENT.get(entity.key) - self.assertEqual(retrieved_entity, entity) - - -class TestDatastoreSaveKeys(TestDatastore): - def test_save_key_self_reference(self): - parent_key = Config.CLIENT.key("Residence", "NewYork") - key = Config.CLIENT.key("Person", "name", parent=parent_key) - entity = datastore.Entity(key=key) - entity["fullName"] = u"Full name" - entity["linkedTo"] = key # Self reference. - - Config.CLIENT.put(entity) - self.case_entities_to_delete.append(entity) - - query = Config.CLIENT.query(kind="Person") - # Adding ancestor to ensure consistency. - query.ancestor = parent_key - query.add_filter("linkedTo", "=", key) - - stored_persons = list(query.fetch(limit=2)) - self.assertEqual(stored_persons, [entity]) - - -class TestDatastoreQuery(TestDatastore): - @classmethod - def setUpClass(cls): - cls.CLIENT = clone_client(Config.CLIENT) - # Remove the namespace from the cloned client, since these - # query tests rely on the entities to be already stored and indexed, - # hence ``test_namespace`` set at runtime can't be used. - cls.CLIENT.namespace = None - - # In the emulator, re-populating the datastore is cheap. - if os.getenv(DATASTORE_DATASET) is not None: - # Populate the datastore with the cloned client. - populate_datastore.add_characters(client=cls.CLIENT) - - cls.CHARACTERS = populate_datastore.CHARACTERS - # Use the client for this test instead of the global. - cls.ANCESTOR_KEY = cls.CLIENT.key(*populate_datastore.ANCESTOR) - - @classmethod - def tearDownClass(cls): - # In the emulator, destroy the query entities. - if os.getenv(DATASTORE_DATASET) is not None: - # Use the client for this test instead of the global. - clear_datastore.remove_all_entities(client=cls.CLIENT) - - def _base_query(self): - # Use the client for this test instead of the global. - return self.CLIENT.query(kind="Character", ancestor=self.ANCESTOR_KEY) - - def test_limit_queries(self): - limit = 5 - query = self._base_query() - - # Fetch characters. - iterator = query.fetch(limit=limit) - page = next(iterator.pages) - character_entities = list(page) - cursor = iterator.next_page_token - self.assertEqual(len(character_entities), limit) - - # Check cursor after fetch. - self.assertIsNotNone(cursor) - - # Fetch remaining characters. - new_character_entities = list(query.fetch(start_cursor=cursor)) - characters_remaining = len(self.CHARACTERS) - limit - self.assertEqual(len(new_character_entities), characters_remaining) - - def test_query_simple_filter(self): - query = self._base_query() - query.add_filter("appearances", ">=", 20) - expected_matches = 6 - # We expect 6, but allow the query to get 1 extra. - entities = list(query.fetch(limit=expected_matches + 1)) - self.assertEqual(len(entities), expected_matches) - - def test_query_multiple_filters(self): - query = self._base_query() - query.add_filter("appearances", ">=", 26) - query.add_filter("family", "=", "Stark") - expected_matches = 4 - # We expect 4, but allow the query to get 1 extra. - entities = list(query.fetch(limit=expected_matches + 1)) - self.assertEqual(len(entities), expected_matches) - - def test_ancestor_query(self): - filtered_query = self._base_query() - - expected_matches = 8 - # We expect 8, but allow the query to get 1 extra. - entities = list(filtered_query.fetch(limit=expected_matches + 1)) - self.assertEqual(len(entities), expected_matches) - - def test_query_key_filter(self): - # Use the client for this test instead of the global. - rickard_key = self.CLIENT.key(*populate_datastore.RICKARD) - - query = self._base_query() - query.key_filter(rickard_key) - expected_matches = 1 - # We expect 1, but allow the query to get 1 extra. - entities = list(query.fetch(limit=expected_matches + 1)) - self.assertEqual(len(entities), expected_matches) - - def test_ordered_query(self): - query = self._base_query() - query.order = "appearances" - expected_matches = 8 - # We expect 8, but allow the query to get 1 extra. - entities = list(query.fetch(limit=expected_matches + 1)) - self.assertEqual(len(entities), expected_matches) - - # Actually check the ordered data returned. - self.assertEqual(entities[0]["name"], self.CHARACTERS[0]["name"]) - self.assertEqual(entities[7]["name"], self.CHARACTERS[3]["name"]) - - def test_projection_query(self): - filtered_query = self._base_query() - filtered_query.projection = ["name", "family"] - filtered_query.order = ["name", "family"] - - # NOTE: There are 9 responses because of Catelyn. She has both - # Stark and Tully as her families, hence occurs twice in - # the results. - expected_matches = 9 - # We expect 9, but allow the query to get 1 extra. - entities = list(filtered_query.fetch(limit=expected_matches + 1)) - self.assertEqual(len(entities), expected_matches) - - arya_entity = entities[0] - catelyn_tully_entity = entities[3] - sansa_entity = entities[8] - - arya_dict = dict(arya_entity) - self.assertEqual(arya_dict, {"name": "Arya", "family": "Stark"}) - - catelyn_stark_entity = entities[2] - catelyn_stark_dict = dict(catelyn_stark_entity) - self.assertEqual(catelyn_stark_dict, {"name": "Catelyn", "family": "Stark"}) - - catelyn_tully_dict = dict(catelyn_tully_entity) - self.assertEqual(catelyn_tully_dict, {"name": "Catelyn", "family": "Tully"}) - - # Check both Catelyn keys are the same. - self.assertEqual(catelyn_stark_entity.key, catelyn_tully_entity.key) - - sansa_dict = dict(sansa_entity) - self.assertEqual(sansa_dict, {"name": "Sansa", "family": "Stark"}) - - def test_query_paginate_simple_uuid_keys(self): - - # See issue #4264 - page_query = self.CLIENT.query(kind="uuid_key") - iterator = page_query.fetch() - - seen = set() - page_count = 0 - for page in iterator.pages: - page_count += 1 - for entity in page: - uuid_str = entity.key.name - self.assertNotIn(uuid_str, seen, uuid_str) - seen.add(uuid_str) - - self.assertTrue(page_count > 1) - - def test_query_paginate_simple_timestamp_keys(self): - - # See issue #4264 - page_query = self.CLIENT.query(kind="timestamp_key") - iterator = page_query.fetch() - - seen = set() - page_count = 0 - for page in iterator.pages: - page_count += 1 - for entity in page: - timestamp = entity.key.id - self.assertNotIn(timestamp, seen, timestamp) - seen.add(timestamp) - - self.assertTrue(page_count > 1) - - def test_query_offset_timestamp_keys(self): - # See issue #4675 - max_all = 10000 - offset = 1 - max_offset = max_all - offset - query = self.CLIENT.query(kind="timestamp_key") - all_w_limit = list(query.fetch(limit=max_all)) - self.assertEqual(len(all_w_limit), max_all) - - offset_w_limit = list(query.fetch(offset=offset, limit=max_offset)) - self.assertEqual(offset_w_limit, all_w_limit[offset:]) - - def test_query_paginate_with_offset(self): - page_query = self._base_query() - page_query.order = "appearances" - offset = 2 - limit = 3 - iterator = page_query.fetch(limit=limit, offset=offset) - - # Fetch characters. - page = next(iterator.pages) - entities = list(page) - cursor = iterator.next_page_token - self.assertEqual(len(entities), limit) - self.assertEqual(entities[0]["name"], "Robb") - self.assertEqual(entities[1]["name"], "Bran") - self.assertEqual(entities[2]["name"], "Catelyn") - - # Fetch next set of characters. - new_iterator = page_query.fetch(limit=limit, offset=0, start_cursor=cursor) - entities = list(new_iterator) - self.assertEqual(len(entities), limit) - self.assertEqual(entities[0]["name"], "Sansa") - self.assertEqual(entities[1]["name"], "Jon Snow") - self.assertEqual(entities[2]["name"], "Arya") - - def test_query_paginate_with_start_cursor(self): - page_query = self._base_query() - page_query.order = "appearances" - limit = 3 - offset = 2 - iterator = page_query.fetch(limit=limit, offset=offset) - - # Fetch characters. - page = next(iterator.pages) - entities = list(page) - cursor = iterator.next_page_token - self.assertEqual(len(entities), limit) - - # Use cursor to create a fresh query. - fresh_query = self._base_query() - fresh_query.order = "appearances" - - new_entities = list(fresh_query.fetch(start_cursor=cursor, limit=limit)) - characters_remaining = len(self.CHARACTERS) - limit - offset - self.assertEqual(len(new_entities), characters_remaining) - self.assertEqual(new_entities[0]["name"], "Sansa") - self.assertEqual(new_entities[2]["name"], "Arya") - - def test_query_distinct_on(self): - query = self._base_query() - query.distinct_on = ["alive"] - - expected_matches = 2 - # We expect 2, but allow the query to get 1 extra. - entities = list(query.fetch(limit=expected_matches + 1)) - self.assertEqual(len(entities), expected_matches) - - self.assertEqual(entities[0]["name"], "Catelyn") - self.assertEqual(entities[1]["name"], "Arya") - - -class TestDatastoreQueryOffsets(TestDatastore): - TOTAL_OBJECTS = 2500 - NAMESPACE = "LargeCharacterEntity" - KIND = "LargeCharacter" - - @classmethod - def setUpClass(cls): - cls.CLIENT = clone_client(Config.CLIENT) - # Remove the namespace from the cloned client, since these - # query tests rely on the entities to be already stored - # cls.CLIENT.namespace = cls.NAMESPACE - cls.CLIENT.namespace = None - - # Populating the datastore if necessary. - populate_datastore.add_large_character_entities(client=cls.CLIENT) - - @classmethod - def tearDownClass(cls): - # In the emulator, destroy the query entities. - if os.getenv(DATASTORE_DATASET) is not None: - # Use the client for this test instead of the global. - clear_datastore.remove_all_entities(client=cls.CLIENT) - - def _base_query(self): - # Use the client for this test instead of the global. - return self.CLIENT.query(kind=self.KIND, namespace=self.NAMESPACE) - - def _verify(self, limit, offset, expected): - # Query used for all tests - page_query = self._base_query() - page_query.add_filter("family", "=", "Stark") - page_query.add_filter("alive", "=", False) - - iterator = page_query.fetch(limit=limit, offset=offset) - entities = [e for e in iterator] - self.assertEqual(len(entities), expected) - - def test_query_in_bounds_offsets(self): - # Verify that with no offset there are the correct # of results - self._verify(limit=None, offset=None, expected=self.TOTAL_OBJECTS) - - # Verify that with no limit there are results (offset provided)") - self._verify(limit=None, offset=900, expected=self.TOTAL_OBJECTS - 900) - - # Offset beyond items larger Verify 200 items found") - self._verify(limit=200, offset=1100, expected=200) - - def test_query_partially_out_of_bounds_offsets(self): - # Offset within range, expect 50 despite larger limit") - self._verify(limit=100, offset=self.TOTAL_OBJECTS - 50, expected=50) - - def test_query_out_of_bounds_offsets(self): - # Offset beyond items larger Verify no items found") - self._verify(limit=200, offset=self.TOTAL_OBJECTS + 1000, expected=0) - - -class TestDatastoreTransaction(TestDatastore): - def test_transaction_via_with_statement(self): - entity = datastore.Entity(key=Config.CLIENT.key("Company", "Google")) - entity["url"] = u"www.google.com" - - with Config.CLIENT.transaction() as xact: - result = Config.CLIENT.get(entity.key) - if result is None: - xact.put(entity) - self.case_entities_to_delete.append(entity) - - # This will always return after the transaction. - retrieved_entity = Config.CLIENT.get(entity.key) - self.case_entities_to_delete.append(retrieved_entity) - self.assertEqual(retrieved_entity, entity) - - def test_transaction_via_explicit_begin_get_commit(self): - # See - # github.com/GoogleCloudPlatform/google-cloud-python/issues/1859 - # Note that this example lacks the threading which provokes the race - # condition in that issue: we are basically just exercising the - # "explict" path for using transactions. - BEFORE_1 = 100 - BEFORE_2 = 0 - TRANSFER_AMOUNT = 40 - key1 = Config.CLIENT.key("account", "123") - account1 = datastore.Entity(key=key1) - account1["balance"] = BEFORE_1 - key2 = Config.CLIENT.key("account", "234") - account2 = datastore.Entity(key=key2) - account2["balance"] = BEFORE_2 - Config.CLIENT.put_multi([account1, account2]) - self.case_entities_to_delete.append(account1) - self.case_entities_to_delete.append(account2) - - xact = Config.CLIENT.transaction() - xact.begin() - from_account = Config.CLIENT.get(key1, transaction=xact) - to_account = Config.CLIENT.get(key2, transaction=xact) - from_account["balance"] -= TRANSFER_AMOUNT - to_account["balance"] += TRANSFER_AMOUNT - - xact.put(from_account) - xact.put(to_account) - xact.commit() - - after1 = Config.CLIENT.get(key1) - after2 = Config.CLIENT.get(key2) - self.assertEqual(after1["balance"], BEFORE_1 - TRANSFER_AMOUNT) - self.assertEqual(after2["balance"], BEFORE_2 + TRANSFER_AMOUNT) - - def test_failure_with_contention(self): - contention_prop_name = "baz" - local_client = clone_client(Config.CLIENT) - - # Insert an entity which will be retrieved in a transaction - # and updated outside it with a contentious value. - key = local_client.key("BreakTxn", 1234) - orig_entity = datastore.Entity(key=key) - orig_entity["foo"] = u"bar" - local_client.put(orig_entity) - self.case_entities_to_delete.append(orig_entity) - - with self.assertRaises(Conflict): - with local_client.transaction() as txn: - entity_in_txn = local_client.get(key) - - # Update the original entity outside the transaction. - orig_entity[contention_prop_name] = u"outside" - Config.CLIENT.put(orig_entity) - - # Try to update the entity which we already updated outside the - # transaction. - entity_in_txn[contention_prop_name] = u"inside" - txn.put(entity_in_txn) - - def test_empty_array_put(self): - local_client = clone_client(Config.CLIENT) - - key = local_client.key("EmptyArray", 1234) - local_client = datastore.Client() - entity = datastore.Entity(key=key) - entity["children"] = [] - local_client.put(entity) - retrieved = local_client.get(entity.key) - - self.assertEqual(entity["children"], retrieved["children"]) diff --git a/packages/google-cloud-datastore/tests/system/test_transaction.py b/packages/google-cloud-datastore/tests/system/test_transaction.py new file mode 100644 index 000000000000..d27bc43949d0 --- /dev/null +++ b/packages/google-cloud-datastore/tests/system/test_transaction.py @@ -0,0 +1,106 @@ +# Copyright 2011 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from google.cloud import datastore +from google.cloud.exceptions import Conflict + +from . import _helpers + + +def test_transaction_via_with_statement(datastore_client, entities_to_delete): + key = datastore_client.key("Company", "Google") + entity = datastore.Entity(key=key) + entity["url"] = u"www.google.com" + + with datastore_client.transaction() as xact: + result = datastore_client.get(entity.key) + if result is None: + xact.put(entity) + entities_to_delete.append(entity) + + # This will always return after the transaction. + retrieved_entity = datastore_client.get(key) + + entities_to_delete.append(retrieved_entity) + assert retrieved_entity == entity + + +def test_transaction_via_explicit_begin_get_commit( + datastore_client, entities_to_delete, +): + # See + # github.com/GoogleCloudPlatform/google-cloud-python/issues/1859 + # Note that this example lacks the threading which provokes the race + # condition in that issue: we are basically just exercising the + # "explict" path for using transactions. + before_1 = 100 + before_2 = 0 + transfer_amount = 40 + + key1 = datastore_client.key("account", "123") + account1 = datastore.Entity(key=key1) + account1["balance"] = before_1 + + key2 = datastore_client.key("account", "234") + account2 = datastore.Entity(key=key2) + account2["balance"] = before_2 + + datastore_client.put_multi([account1, account2]) + entities_to_delete.append(account1) + entities_to_delete.append(account2) + + xact = datastore_client.transaction() + xact.begin() + from_account = datastore_client.get(key1, transaction=xact) + to_account = datastore_client.get(key2, transaction=xact) + from_account["balance"] -= transfer_amount + to_account["balance"] += transfer_amount + + xact.put(from_account) + xact.put(to_account) + xact.commit() + + after1 = datastore_client.get(key1) + after2 = datastore_client.get(key2) + assert after1["balance"] == before_1 - transfer_amount + assert after2["balance"] == before_2 + transfer_amount + + +def test_failure_with_contention(datastore_client, entities_to_delete): + contention_prop_name = "baz" + local_client = _helpers.clone_client(datastore_client) + + # Insert an entity which will be retrieved in a transaction + # and updated outside it with a contentious value. + key = local_client.key("BreakTxn", 1234) + orig_entity = datastore.Entity(key=key) + orig_entity["foo"] = u"bar" + local_client.put(orig_entity) + + entities_to_delete.append(orig_entity) + + with pytest.raises(Conflict): + with local_client.transaction() as txn: + entity_in_txn = local_client.get(key) + + # Update the original entity outside the transaction. + orig_entity[contention_prop_name] = u"outside" + datastore_client.put(orig_entity) + + # Try to update the entity which we already updated outside the + # transaction. + entity_in_txn[contention_prop_name] = u"inside" + txn.put(entity_in_txn) diff --git a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py index 06b2895ac0f2..52f453f65b85 100644 --- a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py @@ -54,6 +54,9 @@ {"name": u"Bran", "family": u"Stark", "appearances": 25, "alive": True}, {"name": u"Jon Snow", "family": u"Stark", "appearances": 32, "alive": True}, ) +LARGE_CHARACTER_TOTAL_OBJECTS = 2500 +LARGE_CHARACTER_NAMESPACE = "LargeCharacterEntity" +LARGE_CHARACTER_KIND = "LargeCharacter" def print_func(message): @@ -62,15 +65,14 @@ def print_func(message): def add_large_character_entities(client=None): - TOTAL_OBJECTS = 2500 - NAMESPACE = "LargeCharacterEntity" - KIND = "LargeCharacter" MAX_STRING = (string.ascii_lowercase * 58)[:1500] - client.namespace = NAMESPACE + client.namespace = LARGE_CHARACTER_NAMESPACE # Query used for all tests - page_query = client.query(kind=KIND, namespace=NAMESPACE) + page_query = client.query( + kind=LARGE_CHARACTER_KIND, namespace=LARGE_CHARACTER_NAMESPACE + ) def put_objects(count): current = 0 @@ -86,7 +88,7 @@ def put_objects(count): for i in range(start, end): name = "character{0:05d}".format(i) # The Cloud Datastore key for the new entity - task_key = client.key(KIND, name) + task_key = client.key(LARGE_CHARACTER_KIND, name) # Prepares the new entity task = datastore.Entity(key=task_key) @@ -102,16 +104,16 @@ def put_objects(count): current += ENTITIES_TO_BATCH # Ensure we have 1500 entities for tests. If not, clean up type and add - # new entities equal to TOTAL_OBJECTS + # new entities equal to LARGE_CHARACTER_TOTAL_OBJECTS all_entities = [e for e in page_query.fetch()] - if len(all_entities) != TOTAL_OBJECTS: + if len(all_entities) != LARGE_CHARACTER_TOTAL_OBJECTS: # Cleanup Collection if not an exact match while all_entities: entities = all_entities[:500] all_entities = all_entities[500:] client.delete_multi([e.key for e in entities]) # Put objects - put_objects(TOTAL_OBJECTS) + put_objects(LARGE_CHARACTER_TOTAL_OBJECTS) def add_characters(client=None): From 0a21c8670890120d14b7b41c084dbf6cfc1e3edc Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Wed, 25 Aug 2021 12:48:11 -0400 Subject: [PATCH 358/611] chore: migrate to main branch (#217) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: migrate to main branch * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: update owlbot * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../google-cloud-datastore/.kokoro/build.sh | 2 +- .../.kokoro/test-samples-impl.sh | 2 +- .../google-cloud-datastore/CONTRIBUTING.rst | 12 +++--- packages/google-cloud-datastore/docs/conf.py | 10 ++--- packages/google-cloud-datastore/owlbot.py | 41 +++++++++++++++++++ 5 files changed, 54 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-datastore/.kokoro/build.sh b/packages/google-cloud-datastore/.kokoro/build.sh index 59bd450ec1d8..3ca0a226f8cc 100755 --- a/packages/google-cloud-datastore/.kokoro/build.sh +++ b/packages/google-cloud-datastore/.kokoro/build.sh @@ -41,7 +41,7 @@ python3 -m pip install --upgrade --quiet nox python3 -m nox --version # If this is a continuous build, send the test log to the FlakyBot. -# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then cleanup() { chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot diff --git a/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh b/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh index 311a8d54b9f1..8a324c9c7bc6 100755 --- a/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh +++ b/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh @@ -80,7 +80,7 @@ for file in samples/**/requirements.txt; do EXIT=$? # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot $KOKORO_GFILE_DIR/linux_amd64/flakybot diff --git a/packages/google-cloud-datastore/CONTRIBUTING.rst b/packages/google-cloud-datastore/CONTRIBUTING.rst index 5cb48ca79f5d..02db64370059 100644 --- a/packages/google-cloud-datastore/CONTRIBUTING.rst +++ b/packages/google-cloud-datastore/CONTRIBUTING.rst @@ -50,9 +50,9 @@ You'll have to create a development environment using a Git checkout: # Configure remotes such that you can pull changes from the googleapis/python-datastore # repository into your local repository. $ git remote add upstream git@github.com:googleapis/python-datastore.git - # fetch and merge changes from upstream into master + # fetch and merge changes from upstream into main $ git fetch upstream - $ git merge upstream/master + $ git merge upstream/main Now your local repo is set up such that you will push changes to your GitHub repo, from which you can submit a pull request. @@ -110,12 +110,12 @@ Coding Style variables:: export GOOGLE_CLOUD_TESTING_REMOTE="upstream" - export GOOGLE_CLOUD_TESTING_BRANCH="master" + export GOOGLE_CLOUD_TESTING_BRANCH="main" By doing this, you are specifying the location of the most up-to-date version of ``python-datastore``. The the suggested remote name ``upstream`` should point to the official ``googleapis`` checkout and the - the branch should be the main branch on that remote (``master``). + the branch should be the main branch on that remote (``main``). - This repository contains configuration for the `pre-commit `__ tool, which automates checking @@ -234,7 +234,7 @@ The `description on PyPI`_ for the project comes directly from the ``README``. Due to the reStructuredText (``rst``) parser used by PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` instead of -``https://github.com/googleapis/python-datastore/blob/master/CONTRIBUTING.rst``) +``https://github.com/googleapis/python-datastore/blob/main/CONTRIBUTING.rst``) may cause problems creating links or rendering the description. .. _description on PyPI: https://pypi.org/project/google-cloud-datastore @@ -259,7 +259,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/python-datastore/blob/master/noxfile.py +.. _config: https://github.com/googleapis/python-datastore/blob/main/noxfile.py We also explicitly decided to support Python 3 beginning with version 3.6. diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index adfa53c42084..20196a3d1bd0 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -76,8 +76,8 @@ # The encoding of source files. # source_encoding = 'utf-8-sig' -# The master toctree document. -master_doc = "index" +# The main toctree document. +root_doc = "index" # General information about the project. project = "google-cloud-datastore" @@ -280,7 +280,7 @@ # author, documentclass [howto, manual, or own class]). latex_documents = [ ( - master_doc, + root_doc, "google-cloud-datastore.tex", "google-cloud-datastore Documentation", author, @@ -315,7 +315,7 @@ # (source start file, name, description, authors, manual section). man_pages = [ ( - master_doc, + root_doc, "google-cloud-datastore", "google-cloud-datastore Documentation", [author], @@ -334,7 +334,7 @@ # dir menu entry, description, category) texinfo_documents = [ ( - master_doc, + root_doc, "google-cloud-datastore", "google-cloud-datastore Documentation", author, diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 0be97a88a169..f4d6ccb81eac 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -152,3 +152,44 @@ def system(session, disable_grpc): ) s.shell.run(["nox", "-s", "blacken"], hide_output=False) + +# ---------------------------------------------------------------------------- +# Main Branch migration +# ---------------------------------------------------------------------------- + +s.replace( + "*.rst", + "master", + "main" +) + +s.replace( + "CONTRIBUTING.rst", + "kubernetes/community/blob/main", + "kubernetes/community/blob/master" +) + +s.replace( + "docs/conf.py", + "master", + "main" +) + +s.replace( + "docs/conf.py", + "main_doc", + "root_doc" +) + +s.replace( + ".kokoro/*", + "master", + "main" +) + +s.replace( + "README.rst", + "google-cloud-python/blob/main/README.rst", + "google-cloud-python/blob/master/README.rst" +) + From 3e776c2e9f7b83ae01ccc602c69b017452f64b01 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Aug 2021 15:30:34 +0000 Subject: [PATCH 359/611] chore(python): disable dependency dashboard (#219) --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/renovate.json | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index a9fcd07cc43b..b75186cf1ba4 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:9743664022bd63a8084be67f144898314c7ca12f0a03e422ac17c733c129d803 + digest: sha256:d6761eec279244e57fe9d21f8343381a01d3632c034811a72f68b83119e58c69 diff --git a/packages/google-cloud-datastore/renovate.json b/packages/google-cloud-datastore/renovate.json index c04895563e69..9fa8816fe873 100644 --- a/packages/google-cloud-datastore/renovate.json +++ b/packages/google-cloud-datastore/renovate.json @@ -1,6 +1,8 @@ { "extends": [ - "config:base", ":preserveSemverRanges" + "config:base", + ":preserveSemverRanges", + ":disableDependencyDashboard" ], "ignorePaths": [".pre-commit-config.yaml"], "pip_requirements": { From f58600e79a1897ebac3470306aad0d6dcebc7c43 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 1 Sep 2021 15:43:03 +0000 Subject: [PATCH 360/611] chore(python): group renovate prs (#220) --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/renovate.json | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index b75186cf1ba4..ef3cb34f66fd 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:d6761eec279244e57fe9d21f8343381a01d3632c034811a72f68b83119e58c69 + digest: sha256:1456ea2b3b523ccff5e13030acef56d1de28f21249c62aa0f196265880338fa7 diff --git a/packages/google-cloud-datastore/renovate.json b/packages/google-cloud-datastore/renovate.json index 9fa8816fe873..c21036d385e5 100644 --- a/packages/google-cloud-datastore/renovate.json +++ b/packages/google-cloud-datastore/renovate.json @@ -1,6 +1,7 @@ { "extends": [ "config:base", + "group:all", ":preserveSemverRanges", ":disableDependencyDashboard" ], From f62a87a56169ad8e8803df1a0671a976ddbd7f14 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 1 Sep 2021 14:29:50 -0400 Subject: [PATCH 361/611] chore(python): rename default branch to main (#221) Source-Link: https://github.com/googleapis/synthtool/commit/5c0fa62eea9c33ebe61e582424b659eb264e1ba4 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:0ffe3bdd6c7159692df5f7744da74e5ef19966288a6bf76023e8e04e0c424d7d Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/CONTRIBUTING.rst | 6 +++--- packages/google-cloud-datastore/docs/conf.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index ef3cb34f66fd..c07f148f0b0b 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:1456ea2b3b523ccff5e13030acef56d1de28f21249c62aa0f196265880338fa7 + digest: sha256:0ffe3bdd6c7159692df5f7744da74e5ef19966288a6bf76023e8e04e0c424d7d diff --git a/packages/google-cloud-datastore/CONTRIBUTING.rst b/packages/google-cloud-datastore/CONTRIBUTING.rst index 02db64370059..bd2d8831923b 100644 --- a/packages/google-cloud-datastore/CONTRIBUTING.rst +++ b/packages/google-cloud-datastore/CONTRIBUTING.rst @@ -113,9 +113,9 @@ Coding Style export GOOGLE_CLOUD_TESTING_BRANCH="main" By doing this, you are specifying the location of the most up-to-date - version of ``python-datastore``. The the suggested remote name ``upstream`` - should point to the official ``googleapis`` checkout and the - the branch should be the main branch on that remote (``main``). + version of ``python-datastore``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). - This repository contains configuration for the `pre-commit `__ tool, which automates checking diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index 20196a3d1bd0..d51558bef117 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -76,7 +76,7 @@ # The encoding of source files. # source_encoding = 'utf-8-sig' -# The main toctree document. +# The root toctree document. root_doc = "index" # General information about the project. From 306b51aeabd6b3ce0b785f91447581a7e3931d38 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Tue, 7 Sep 2021 11:07:45 -0400 Subject: [PATCH 362/611] chore: revert owlbot main branch templates (#222) --- packages/google-cloud-datastore/owlbot.py | 40 ----------------------- 1 file changed, 40 deletions(-) diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index f4d6ccb81eac..308ca7d52ef2 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -153,43 +153,3 @@ def system(session, disable_grpc): s.shell.run(["nox", "-s", "blacken"], hide_output=False) -# ---------------------------------------------------------------------------- -# Main Branch migration -# ---------------------------------------------------------------------------- - -s.replace( - "*.rst", - "master", - "main" -) - -s.replace( - "CONTRIBUTING.rst", - "kubernetes/community/blob/main", - "kubernetes/community/blob/master" -) - -s.replace( - "docs/conf.py", - "master", - "main" -) - -s.replace( - "docs/conf.py", - "main_doc", - "root_doc" -) - -s.replace( - ".kokoro/*", - "master", - "main" -) - -s.replace( - "README.rst", - "google-cloud-python/blob/main/README.rst", - "google-cloud-python/blob/master/README.rst" -) - From 9afc8b9dbffea31bc2f398ef0b20cf1494b14d49 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 7 Sep 2021 12:02:15 -0600 Subject: [PATCH 363/611] chore: reference main branch of google-cloud-python (#223) --- packages/google-cloud-datastore/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index bef8a2fcdb74..b142bc22ffa8 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -13,7 +13,7 @@ all other queries. - `Product Documentation`_ .. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg - :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#general-availability + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-datastore.svg :target: https://pypi.org/project/google-cloud-datastore/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-datastore.svg From 47a5f888b18dfb06c0d5beabe2dfa1f6707192d7 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 16 Sep 2021 13:18:08 -0400 Subject: [PATCH 364/611] tests: exercise doctests (#224) Closes #209. --- .../google/cloud/datastore/batch.py | 63 ++++++++++++----- .../google/cloud/datastore/client.py | 15 ++-- .../google/cloud/datastore/entity.py | 19 +++++ .../google/cloud/datastore/helpers.py | 14 ++-- .../google/cloud/datastore/key.py | 2 + .../google/cloud/datastore/query.py | 59 ++++++++++++---- .../google/cloud/datastore/transaction.py | 70 +++++++++++-------- packages/google-cloud-datastore/noxfile.py | 17 +++++ packages/google-cloud-datastore/owlbot.py | 43 +++++++++++- .../google-cloud-datastore/tests/doctests.py | 52 ++++++++------ 10 files changed, 257 insertions(+), 97 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/batch.py b/packages/google-cloud-datastore/google/cloud/datastore/batch.py index 7b0b47589576..ba8fe6b7551b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/batch.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/batch.py @@ -32,31 +32,58 @@ class Batch(object): For example, the following snippet of code will put the two ``save`` operations and the ``delete`` operation into the same mutation, and send - them to the server in a single API request:: + them to the server in a single API request: - >>> from google.cloud import datastore - >>> client = datastore.Client() - >>> batch = client.batch() - >>> batch.begin() - >>> batch.put(entity1) - >>> batch.put(entity2) - >>> batch.delete(key3) - >>> batch.commit() + .. testsetup:: batch + + import uuid + + from google.cloud import datastore + + unique = str(uuid.uuid4())[0:8] + client = datastore.Client(namespace='ns{}'.format(unique)) + + .. doctest:: batch + + >>> entity1 = datastore.Entity(client.key('EntityKind', 1234)) + >>> entity2 = datastore.Entity(client.key('EntityKind', 2345)) + >>> key3 = client.key('EntityKind', 3456) + >>> batch = client.batch() + >>> batch.begin() + >>> batch.put(entity1) + >>> batch.put(entity2) + >>> batch.delete(key3) + >>> batch.commit() You can also use a batch as a context manager, in which case :meth:`commit` will be called automatically if its block exits without - raising an exception:: + raising an exception: + + .. doctest:: batch + + >>> with client.batch() as batch: + ... batch.put(entity1) + ... batch.put(entity2) + ... batch.delete(key3) + + By default, no updates will be sent if the block exits with an error: + + .. doctest:: batch - >>> with batch: - ... batch.put(entity1) - ... batch.put(entity2) - ... batch.delete(key3) + >>> def do_some_work(batch): + ... return + >>> with client.batch() as batch: + ... do_some_work(batch) + ... raise Exception() # rolls back + Traceback (most recent call last): + ... + Exception - By default, no updates will be sent if the block exits with an error:: + .. testcleanup:: txn - >>> with batch: - ... do_some_work(batch) - ... raise Exception() # rolls back + with client.batch() as batch: + batch.delete(client.key('EntityKind', 1234)) + batch.delete(client.key('EntityKind', 2345)) :type client: :class:`google.cloud.datastore.client.Client` :param client: The client used to connect to datastore. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index bacb33535555..b5de0fab600b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -757,36 +757,37 @@ def query(self, **kwargs): .. testsetup:: query - import os import uuid from google.cloud import datastore - unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8]) + unique = str(uuid.uuid4())[0:8] client = datastore.Client(namespace='ns{}'.format(unique)) - query = client.query(kind='_Doctest') - def do_something(entity): + def do_something_with(entity): pass .. doctest:: query >>> query = client.query(kind='MyKind') >>> query.add_filter('property', '=', 'val') + Using the query iterator .. doctest:: query + >>> filters = [('property', '=', 'val')] + >>> query = client.query(kind='MyKind', filters=filters) >>> query_iter = query.fetch() >>> for entity in query_iter: - ... do_something(entity) + ... do_something_with(entity) or manually page through results - .. doctest:: query-page + .. doctest:: query - >>> query_iter = query.fetch(start_cursor=cursor) + >>> query_iter = query.fetch() >>> pages = query_iter.pages >>> >>> first_page = next(pages) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/entity.py b/packages/google-cloud-datastore/google/cloud/datastore/entity.py index 7353d210cb8d..b1148865fd4c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/entity.py @@ -40,8 +40,23 @@ class Entity(dict): Use :meth:`~google.cloud.datastore.client.Client.get` to retrieve an existing entity: + .. testsetup:: entity-ctor + + import uuid + + from google.cloud import datastore + from google.cloud import datastore + + unique = str(uuid.uuid4())[0:8] + client = datastore.Client(namespace='ns{}'.format(unique)) + + entity = datastore.Entity(client.key('EntityKind', 1234)) + entity['property'] = 'value' + client.put(entity) + .. doctest:: entity-ctor + >>> key = client.key('EntityKind', 1234) >>> client.get(key) @@ -53,6 +68,10 @@ class Entity(dict): >>> entity['age'] = 20 >>> entity['name'] = 'JJ' + .. testcleanup:: entity-ctor + + client.delete(entity.key) + However, not all types are allowed as a value for a Google Cloud Datastore entity. The following basic types are supported by the API: diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index 7222fbdff73f..85dfc240cf19 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -313,10 +313,16 @@ def _pb_attr_value(val): For example: - >>> _pb_attr_value(1234) - ('integer_value', 1234) - >>> _pb_attr_value('my_string') - ('string_value', 'my_string') + .. testsetup:: pb-attr-value + + from google.cloud.datastore.helpers import _pb_attr_value + + .. doctest:: pb-attr-value + + >>> _pb_attr_value(1234) + ('integer_value', 1234) + >>> _pb_attr_value('my_string') + ('string_value', 'my_string') :type val: :class:`datetime.datetime`, :class:`google.cloud.datastore.key.Key`, diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index 98502f9c6c61..76f18455c312 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -598,6 +598,8 @@ def _get_flat_path(path_pb): For example + .. code:: python + Element { type: "parent" id: 59 diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index b4b24ca7b4e8..5e4f49376521 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -215,13 +215,21 @@ def add_filter(self, property_name, operator, value): where property is a property stored on the entity in the datastore and operator is one of ``OPERATORS`` - (ie, ``=``, ``<``, ``<=``, ``>``, ``>=``):: + (ie, ``=``, ``<``, ``<=``, ``>``, ``>=``): - >>> from google.cloud import datastore - >>> client = datastore.Client() - >>> query = client.query(kind='Person') - >>> query = query.add_filter('name', '=', 'James') - >>> query = query.add_filter('age', '>', 50) + .. testsetup:: query-filter + + import uuid + + from google.cloud import datastore + + client = datastore.Client() + + .. doctest:: query-filter + + >>> query = client.query(kind='Person') + >>> query = query.add_filter('name', '=', 'James') + >>> query = query.add_filter('age', '>', 50) :type property_name: str :param property_name: A property name. @@ -349,16 +357,37 @@ def fetch( ): """Execute the Query; return an iterator for the matching entities. - For example:: + For example: + + .. testsetup:: query-fetch + + import uuid + + from google.cloud import datastore + + unique = str(uuid.uuid4())[0:8] + client = datastore.Client(namespace='ns{}'.format(unique)) + + + .. doctest:: query-fetch + + >>> andy = datastore.Entity(client.key('Person', 1234)) + >>> andy['name'] = 'Andy' + >>> sally = datastore.Entity(client.key('Person', 2345)) + >>> sally['name'] = 'Sally' + >>> bobby = datastore.Entity(client.key('Person', 3456)) + >>> bobby['name'] = 'Bobby' + >>> client.put_multi([andy, sally, bobby]) + >>> query = client.query(kind='Person') + >>> result = list(query.add_filter('name', '=', 'Sally').fetch()) + >>> result + [] + + .. testcleanup:: query-fetch - >>> from google.cloud import datastore - >>> client = datastore.Client() - >>> query = client.query(kind='Person') - >>> result = query.add_filter('name', '=', 'Sally').fetch() - >>> list(result) - [, , ...] - >>> list(query.fetch(1)) - [] + client.delete(andy.key) + client.delete(sally.key) + client.delete(bobby.key) :type limit: int :param limit: (Optional) limit passed through to the iterator. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index 59d2ad9bf574..5da64198dc5a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -41,15 +41,26 @@ class Transaction(Batch): operations (either ``insert`` or ``upsert``) into the same mutation, and execute those within a transaction: - .. doctest:: txn-put-multi + .. testsetup:: txn + import uuid + + from google.cloud import datastore + + unique = str(uuid.uuid4())[0:8] + client = datastore.Client(namespace='ns{}'.format(unique)) + + .. doctest:: txn + + >>> entity1 = datastore.Entity(client.key('EntityKind', 1234)) + >>> entity2 = datastore.Entity(client.key('EntityKind', 2345)) >>> with client.transaction(): ... client.put_multi([entity1, entity2]) Because it derives from :class:`~google.cloud.datastore.batch.Batch`, :class:`Transaction` also provides :meth:`put` and :meth:`delete` methods: - .. doctest:: txn-api + .. doctest:: txn >>> with client.transaction() as xact: ... xact.put(entity1) @@ -58,24 +69,12 @@ class Transaction(Batch): By default, the transaction is rolled back if the transaction block exits with an error: - .. testsetup:: txn-error - - import os - import uuid - - from google.cloud import datastore - - unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8]) - client = datastore.Client(namespace='ns{}'.format(unique)) - - def do_some_work(): - return - - class SomeException(Exception): - pass - - .. doctest:: txn-error + .. doctest:: txn + >>> def do_some_work(): + ... return + >>> class SomeException(Exception): + ... pass >>> with client.transaction(): ... do_some_work() ... raise SomeException # rolls back @@ -92,42 +91,51 @@ class SomeException(Exception): entities will not be available at save time! That means, if you try: - .. doctest:: txn-entity-key + .. doctest:: txn >>> with client.transaction(): - ... entity = Entity(key=client.key('Thing')) - ... client.put(entity) + ... thing1 = datastore.Entity(key=client.key('Thing')) + ... client.put(thing1) - ``entity`` won't have a complete key until the transaction is + ``thing1`` won't have a complete key until the transaction is committed. Once you exit the transaction (or call :meth:`commit`), the automatically generated ID will be assigned to the entity: - .. doctest:: txn-entity-key-after + .. doctest:: txn >>> with client.transaction(): - ... entity = Entity(key=client.key('Thing')) - ... client.put(entity) - ... print(entity.key.is_partial) # There is no ID on this key. + ... thing2 = datastore.Entity(key=client.key('Thing')) + ... client.put(thing2) + ... print(thing2.key.is_partial) # There is no ID on this key. ... True - >>> print(entity.key.is_partial) # There *is* an ID. + >>> print(thing2.key.is_partial) # There *is* an ID. False If you don't want to use the context manager you can initialize a transaction manually: - .. doctest:: txn-manual + .. doctest:: txn >>> transaction = client.transaction() >>> transaction.begin() >>> - >>> entity = Entity(key=client.key('Thing')) - >>> transaction.put(entity) + >>> thing3 = datastore.Entity(key=client.key('Thing')) + >>> transaction.put(thing3) >>> >>> transaction.commit() + .. testcleanup:: txn + + with client.batch() as batch: + batch.delete(client.key('EntityKind', 1234)) + batch.delete(client.key('EntityKind', 2345)) + batch.delete(thing1.key) + batch.delete(thing2.key) + batch.delete(thing3.key) + :type client: :class:`google.cloud.datastore.client.Client` :param client: the client used to connect to datastore. diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index e7f995cf9a65..712619e982bf 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -42,6 +42,7 @@ "lint_setup_py", "blacken", "docs", + "doctests", ] # Error if a python version is missing @@ -209,6 +210,22 @@ def docs(session): ) +@nox.session(python="3.6") +def doctests(session): + # Doctests run against Python 3.6 only. + # It is difficult to make doctests run against both Python 2 and Python 3 + # because they test string output equivalence, which is difficult to + # make match (e.g. unicode literals starting with "u"). + + # Install all test dependencies, then install this package into the + # virtualenv's dist-packages. + session.install("mock", "pytest", "sphinx", "google-cloud-testutils") + session.install("-e", ".") + + # Run py.test against the system tests. + session.run("py.test", "tests/doctests.py") + + @nox.session(python=DEFAULT_PYTHON_VERSION) def docfx(session): """Build the docfx yaml files for this library.""" diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 308ca7d52ef2..17a80da5155a 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -109,6 +109,48 @@ def system(session, disable_grpc): """, ) +# Add nox session to exercise doctests +assert 1 == s.replace( + "noxfile.py", + r"""\ + "blacken", + "docs", +""", + """\ + "blacken", + "docs", + "doctests", +""", +) + +assert 1 == s.replace( + "noxfile.py", + r"""\ +@nox.session\(python=DEFAULT_PYTHON_VERSION\) +def docfx\(session\): +""", + """\ +@nox.session(python="3.6") +def doctests(session): + # Doctests run against Python 3.6 only. + # It is difficult to make doctests run against both Python 2 and Python 3 + # because they test string output equivalence, which is difficult to + # make match (e.g. unicode literals starting with "u"). + + # Install all test dependencies, then install this package into the + # virtualenv's dist-packages. + session.install("mock", "pytest", "sphinx", "google-cloud-testutils") + session.install("-e", ".") + + # Run py.test against the system tests. + session.run("py.test", "tests/doctests.py") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): +""", +) + # Add documentation about creating indexes and populating data for system # tests. assert 1 == s.replace( @@ -152,4 +194,3 @@ def system(session, disable_grpc): ) s.shell.run(["nox", "-s", "blacken"], hide_output=False) - diff --git a/packages/google-cloud-datastore/tests/doctests.py b/packages/google-cloud-datastore/tests/doctests.py index cc8d6a3a6378..32090561bf9b 100644 --- a/packages/google-cloud-datastore/tests/doctests.py +++ b/packages/google-cloud-datastore/tests/doctests.py @@ -21,6 +21,8 @@ SPHINX_CONF = """\ +root_doc = "contents" + extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', @@ -33,6 +35,7 @@ .. automodule:: google.cloud.%s :members: + :private-members: """ @@ -56,9 +59,7 @@ def _add_section(index, mod_name, file_obj): content = SPHINX_SECTION_TEMPLATE % (index, mod_part) file_obj.write(content) - def _make_temp_docs(self): - docs_dir = tempfile.mkdtemp(prefix="datastore-") - + def _make_temp_docs(self, docs_dir): conf_file = os.path.join(docs_dir, "conf.py") with open(conf_file, "w") as file_obj: @@ -66,29 +67,38 @@ def _make_temp_docs(self): index_file = os.path.join(docs_dir, "contents.rst") datastore_modules = self._submodules() + with open(index_file, "w") as file_obj: self._add_section(0, "__init__", file_obj) for index, datastore_module in enumerate(datastore_modules): self._add_section(index + 1, datastore_module, file_obj) - return docs_dir - def test_it(self): from sphinx import application - docs_dir = self._make_temp_docs() - outdir = os.path.join(docs_dir, "doctest", "out") - doctreedir = os.path.join(docs_dir, "doctest", "doctrees") - - app = application.Sphinx( - srcdir=docs_dir, - confdir=docs_dir, - outdir=outdir, - doctreedir=doctreedir, - buildername="doctest", - warningiserror=True, - parallel=1, - ) - - app.build() - self.assertEqual(app.statuscode, 0) + with tempfile.TemporaryDirectory(prefix="datastore-") as docs_dir: + self._make_temp_docs(docs_dir) + outdir = os.path.join(docs_dir, "doctest", "out") + doctreedir = os.path.join(docs_dir, "doctest", "doctrees") + + app = application.Sphinx( + srcdir=docs_dir, + confdir=docs_dir, + outdir=outdir, + doctreedir=doctreedir, + buildername="doctest", + warningiserror=True, + parallel=1, + verbosity=1, + ) + + try: + app.build() + except Exception: + outfile = os.path.join(outdir, "output.txt") + with open(outfile, "r") as file_obj: + output = file_obj.read() + print(f"\n\nDoctest output\n--------------\n\n{output}") + raise + else: + self.assertEqual(app.statuscode, 0) From 0fe8f07aed6c852b8d82a2017544a54779bfb020 Mon Sep 17 00:00:00 2001 From: Jeffrey Rennie Date: Tue, 21 Sep 2021 11:38:35 -0700 Subject: [PATCH 365/611] chore: relocate owl bot post processor (#226) --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-datastore/.github/.OwlBot.yaml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index c07f148f0b0b..2567653c000d 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: - image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:0ffe3bdd6c7159692df5f7744da74e5ef19966288a6bf76023e8e04e0c424d7d + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest + digest: sha256:87eee22d276554e4e52863ec9b1cb6a7245815dfae20439712bf644348215a5a diff --git a/packages/google-cloud-datastore/.github/.OwlBot.yaml b/packages/google-cloud-datastore/.github/.OwlBot.yaml index 83c15486953f..41eaf2b934f9 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.yaml @@ -13,7 +13,7 @@ # limitations under the License. docker: - image: gcr.io/repo-automation-bots/owlbot-python:latest + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest deep-remove-regex: - /owl-bot-staging From a459743df6d6d6be61b31f33836f756606874835 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 27 Sep 2021 14:45:32 -0400 Subject: [PATCH 366/611] tests: harden tests of 'Query.fetch' against 503 (#229) Wrap twice, once to retry 503 errors on initial API call, and again to retry the whole dance if a 503 is raised during iteration of the results. Closes #228. --- .../tests/system/test_query.py | 38 ++++++++++++++----- 1 file changed, 29 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-datastore/tests/system/test_query.py b/packages/google-cloud-datastore/tests/system/test_query.py index 53ae3e4fb3d0..c5921bc910f7 100644 --- a/packages/google-cloud-datastore/tests/system/test_query.py +++ b/packages/google-cloud-datastore/tests/system/test_query.py @@ -14,11 +14,31 @@ import pytest +from google.api_core import exceptions +from test_utils.retry import RetryErrors + from .utils import clear_datastore from .utils import populate_datastore from . import _helpers +retry_503 = RetryErrors(exceptions.ServiceUnavailable) + + +def _make_iterator(query, **kw): + # Do retry for errors raised during initial API call + return retry_503(query.fetch)(**kw) + + +def _pull_iterator(query, **kw): + return list(_make_iterator(query, **kw)) + + +def _do_fetch(query, **kw): + # Do retry for errors raised during iteration + return retry_503(_pull_iterator)(query, **kw) + + @pytest.fixture(scope="session") def query_client(datastore_client): return _helpers.clone_client(datastore_client, namespace=None) @@ -54,7 +74,7 @@ def test_query_w_ancestor(ancestor_query): expected_matches = 8 # We expect 8, but allow the query to get 1 extra. - entities = list(query.fetch(limit=expected_matches + 1)) + entities = _do_fetch(query, limit=expected_matches + 1) assert len(entities) == expected_matches @@ -74,7 +94,7 @@ def test_query_w_limit_paging(ancestor_query): assert cursor is not None # Fetch remaining characters. - new_character_entities = list(query.fetch(start_cursor=cursor)) + new_character_entities = _do_fetch(query, start_cursor=cursor) characters_remaining = len(populate_datastore.CHARACTERS) - limit assert len(new_character_entities) == characters_remaining @@ -85,7 +105,7 @@ def test_query_w_simple_filter(ancestor_query): expected_matches = 6 # We expect 6, but allow the query to get 1 extra. - entities = list(query.fetch(limit=expected_matches + 1)) + entities = _do_fetch(query, limit=expected_matches + 1) assert len(entities) == expected_matches @@ -97,7 +117,7 @@ def test_query_w_multiple_filters(ancestor_query): expected_matches = 4 # We expect 4, but allow the query to get 1 extra. - entities = list(query.fetch(limit=expected_matches + 1)) + entities = _do_fetch(query, limit=expected_matches + 1) assert len(entities) == expected_matches @@ -110,7 +130,7 @@ def test_query_key_filter(query_client, ancestor_query): expected_matches = 1 # We expect 1, but allow the query to get 1 extra. - entities = list(query.fetch(limit=expected_matches + 1)) + entities = _do_fetch(query, limit=expected_matches + 1) assert len(entities) == expected_matches @@ -121,7 +141,7 @@ def test_query_w_order(ancestor_query): expected_matches = 8 # We expect 8, but allow the query to get 1 extra. - entities = list(query.fetch(limit=expected_matches + 1)) + entities = _do_fetch(query, limit=expected_matches + 1) assert len(entities) == expected_matches @@ -202,10 +222,10 @@ def test_query_w_offset_w_timestamp_keys(query_client): max_offset = max_all - offset query = query_client.query(kind="timestamp_key") - all_w_limit = list(query.fetch(limit=max_all)) + all_w_limit = _do_fetch(query, limit=max_all) assert len(all_w_limit) == max_all - offset_w_limit = list(query.fetch(offset=offset, limit=max_offset)) + offset_w_limit = _do_fetch(query, offset=offset, limit=max_offset) assert offset_w_limit == all_w_limit[offset:] @@ -271,7 +291,7 @@ def test_query_distinct_on(ancestor_query): expected_matches = 2 # We expect 2, but allow the query to get 1 extra. - entities = list(query.fetch(limit=expected_matches + 1)) + entities = _do_fetch(query, limit=expected_matches + 1) assert len(entities) == expected_matches assert entities[0]["name"] == "Catelyn" From a2739d850ee2a5d89a78ab59ad1c5b78eaeed1ba Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 1 Oct 2021 12:28:34 -0400 Subject: [PATCH 367/611] chore: exclude 'CODEOWNERS' from templated files (#230) See: https://github.com/googleapis/synthtool/pull/1201 --- packages/google-cloud-datastore/owlbot.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 17a80da5155a..e590b68d13a3 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -55,7 +55,10 @@ # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library(microgenerator=True, split_system_tests=True,) -s.move(templated_files, excludes=["docs/multiprocessing.rst", ".coveragerc"]) +s.move( + templated_files, + excludes=["docs/multiprocessing.rst", ".coveragerc", ".github/CODEOOWNERS"], +) python.py_samples(skip_readmes=True) From c22b130aa22284d069169ad18fc8083605c99fe8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 5 Oct 2021 11:17:43 -0600 Subject: [PATCH 368/611] build: use trampoline_v2 for python samples and allow custom dockerfile (#232) Source-Link: https://github.com/googleapis/synthtool/commit/a7ed11ec0863c422ba2e73aafa75eab22c32b33d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:6e7328583be8edd3ba8f35311c76a1ecbc823010279ccb6ab46b7a76e25eafcc Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/samples/lint/common.cfg | 2 +- .../.kokoro/samples/python3.6/common.cfg | 2 +- .../.kokoro/samples/python3.6/periodic.cfg | 2 +- .../.kokoro/samples/python3.7/common.cfg | 2 +- .../.kokoro/samples/python3.7/periodic.cfg | 2 +- .../.kokoro/samples/python3.8/common.cfg | 2 +- .../.kokoro/samples/python3.8/periodic.cfg | 2 +- .../.kokoro/samples/python3.9/common.cfg | 2 +- .../.kokoro/samples/python3.9/periodic.cfg | 2 +- .../.kokoro/test-samples-against-head.sh | 2 -- .../.kokoro/test-samples.sh | 2 -- packages/google-cloud-datastore/.trampolinerc | 17 ++++++++++++++--- 13 files changed, 24 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 2567653c000d..ee94722ab57b 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:87eee22d276554e4e52863ec9b1cb6a7245815dfae20439712bf644348215a5a + digest: sha256:6e7328583be8edd3ba8f35311c76a1ecbc823010279ccb6ab46b7a76e25eafcc diff --git a/packages/google-cloud-datastore/.kokoro/samples/lint/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/lint/common.cfg index 33f7432a1209..80001a39bf25 100644 --- a/packages/google-cloud-datastore/.kokoro/samples/lint/common.cfg +++ b/packages/google-cloud-datastore/.kokoro/samples/lint/common.cfg @@ -31,4 +31,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-datastore/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.6/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.6/common.cfg index a65c0f39be86..c726211bf114 100644 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.6/common.cfg +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.6/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-datastore/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic.cfg index 50fec9649732..71cd1e597e38 100644 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic.cfg +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.7/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.7/common.cfg index 18251bfc8de9..a46730a6bbd1 100644 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.7/common.cfg +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.7/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-datastore/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic.cfg index 50fec9649732..71cd1e597e38 100644 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic.cfg +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.8/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.8/common.cfg index 77f73452273b..d13fb561e938 100644 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.8/common.cfg +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.8/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-datastore/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic.cfg index 50fec9649732..71cd1e597e38 100644 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic.cfg +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.9/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.9/common.cfg index 6e6deb8407f0..4fcda9f0720c 100644 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.9/common.cfg +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.9/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-datastore/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic.cfg index 50fec9649732..71cd1e597e38 100644 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic.cfg +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh b/packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh index 7503e7624993..ba3a707b040c 100755 --- a/packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh +++ b/packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh @@ -23,6 +23,4 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-datastore - exec .kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-datastore/.kokoro/test-samples.sh b/packages/google-cloud-datastore/.kokoro/test-samples.sh index b3014bb9cdf4..11c042d342d7 100755 --- a/packages/google-cloud-datastore/.kokoro/test-samples.sh +++ b/packages/google-cloud-datastore/.kokoro/test-samples.sh @@ -24,8 +24,6 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-datastore - # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then # preserving the test runner implementation. diff --git a/packages/google-cloud-datastore/.trampolinerc b/packages/google-cloud-datastore/.trampolinerc index 383b6ec89fbc..0eee72ab62aa 100644 --- a/packages/google-cloud-datastore/.trampolinerc +++ b/packages/google-cloud-datastore/.trampolinerc @@ -16,15 +16,26 @@ # Add required env vars here. required_envvars+=( - "STAGING_BUCKET" - "V2_STAGING_BUCKET" ) # Add env vars which are passed down into the container here. pass_down_envvars+=( + "NOX_SESSION" + ############### + # Docs builds + ############### "STAGING_BUCKET" "V2_STAGING_BUCKET" - "NOX_SESSION" + ################## + # Samples builds + ################## + "INSTALL_LIBRARY_FROM_SOURCE" + "RUN_TESTS_SESSION" + "BUILD_SPECIFIC_GCLOUD_PROJECT" + # Target directories. + "RUN_TESTS_DIRS" + # The nox session to run. + "RUN_TESTS_SESSION" ) # Prevent unintentional override on the default image. From d26f2a654d1baed022a40006bf009cb197bd7ea1 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 7 Oct 2021 18:14:48 +0000 Subject: [PATCH 369/611] chore(python): fix formatting issue in noxfile.py.j2 (#234) --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/CONTRIBUTING.rst | 6 ++++-- packages/google-cloud-datastore/noxfile.py | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index ee94722ab57b..76d0baa0a49d 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:6e7328583be8edd3ba8f35311c76a1ecbc823010279ccb6ab46b7a76e25eafcc + digest: sha256:4370ced27a324687ede5da07132dcdc5381993502a5e8a3e31e16dc631d026f0 diff --git a/packages/google-cloud-datastore/CONTRIBUTING.rst b/packages/google-cloud-datastore/CONTRIBUTING.rst index bd2d8831923b..37893012515c 100644 --- a/packages/google-cloud-datastore/CONTRIBUTING.rst +++ b/packages/google-cloud-datastore/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. + 3.6, 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.9 -- -k + $ nox -s unit-3.10 -- -k .. note:: @@ -250,11 +250,13 @@ We support: - `Python 3.7`_ - `Python 3.8`_ - `Python 3.9`_ +- `Python 3.10`_ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 712619e982bf..1ca31940f60d 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -29,7 +29,7 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() From e41f38352febeb2e44f146fdbfaaf5c8649e7b2d Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 8 Oct 2021 12:10:15 -0400 Subject: [PATCH 370/611] feat: add support for Python 3.10 (#233) --- packages/google-cloud-datastore/owlbot.py | 6 +++++- packages/google-cloud-datastore/setup.py | 1 + 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index e590b68d13a3..abbbb99aea11 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -54,7 +54,11 @@ # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(microgenerator=True, split_system_tests=True,) +templated_files = common.py_library( + microgenerator=True, + split_system_tests=True, + unit_test_python_versions=["3.6", "3.7", "3.8", "3.9", "3.10"], +) s.move( templated_files, excludes=["docs/multiprocessing.rst", ".coveragerc", ".github/CODEOOWNERS"], diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 19fe77117875..6550cea3eaf9 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -89,6 +89,7 @@ "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", "Operating System :: OS Independent", "Topic :: Internet", "Topic :: Software Development :: Libraries :: Python Modules", From c4db9cf5b630c7b0f1651515c884bb4928369e92 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 8 Oct 2021 16:18:14 +0000 Subject: [PATCH 371/611] chore: release 2.2.0 (#238) :robot: I have created a release \*beep\* \*boop\* --- ## [2.2.0](https://www.github.com/googleapis/python-datastore/compare/v2.1.6...v2.2.0) (2021-10-08) ### Features * add support for Python 3.10 ([#233](https://www.github.com/googleapis/python-datastore/issues/233)) ([f524c40](https://www.github.com/googleapis/python-datastore/commit/f524c40e8251c2b716ea87cd512404f0d6f1b019)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 91b6b8520416..2b81e22808ed 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.2.0](https://www.github.com/googleapis/python-datastore/compare/v2.1.6...v2.2.0) (2021-10-08) + + +### Features + +* add support for Python 3.10 ([#233](https://www.github.com/googleapis/python-datastore/issues/233)) ([f524c40](https://www.github.com/googleapis/python-datastore/commit/f524c40e8251c2b716ea87cd512404f0d6f1b019)) + ### [2.1.6](https://www.github.com/googleapis/python-datastore/compare/v2.1.5...v2.1.6) (2021-07-26) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 5c544daee541..bd0f8e5c7d25 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.1.6" +__version__ = "2.2.0" From a4c1d16469184ff6e3c0be307ef9289ceebd4192 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 8 Oct 2021 17:50:18 +0000 Subject: [PATCH 372/611] chore(python): Add kokoro configs for python 3.10 samples testing (#237) --- .../.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/samples/python3.10/common.cfg | 40 +++++++++++++++++++ .../.kokoro/samples/python3.10/continuous.cfg | 6 +++ .../samples/python3.10/periodic-head.cfg | 11 +++++ .../.kokoro/samples/python3.10/periodic.cfg | 6 +++ .../.kokoro/samples/python3.10/presubmit.cfg | 6 +++ 6 files changed, 70 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.10/common.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.10/continuous.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.10/periodic-head.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.10/periodic.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.10/presubmit.cfg diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 76d0baa0a49d..7d98291cc35f 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4370ced27a324687ede5da07132dcdc5381993502a5e8a3e31e16dc631d026f0 + digest: sha256:58f73ba196b5414782605236dd0712a73541b44ff2ff4d3a36ec41092dd6fa5b diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.10/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.10/common.cfg new file mode 100644 index 000000000000..237cfb14398b --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.10/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.10" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-310" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-datastore/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.10/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.10/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.10/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.10/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.10/periodic-head.cfg new file mode 100644 index 000000000000..714045a75ed7 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.10/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.10/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.10/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.10/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.10/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.10/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.10/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file From b1254efd38944fbc680eb5124ac79fee7eaee13e Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 13 Oct 2021 01:32:22 -0700 Subject: [PATCH 373/611] chore: add py.typed file for PEP 561 compatibility (#241) * chore: add py.typed file for PEP 561 compatibility * Update py.typed --- packages/google-cloud-datastore/google/cloud/datastore/py.typed | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/py.typed diff --git a/packages/google-cloud-datastore/google/cloud/datastore/py.typed b/packages/google-cloud-datastore/google/cloud/datastore/py.typed new file mode 100644 index 000000000000..341f38ba69d0 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-datastore package uses inline types. From bebb4647492ae5692782c8f88ca729df1c0751a8 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Thu, 14 Oct 2021 14:40:02 -0700 Subject: [PATCH 374/611] fix: improve type hints, mypy check (#242) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: ensure mypy passes * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: flake8 doesn't like lambdas * test: add mypy test scenarioa * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: scope mypy to google.cloud.datastore for now * chore: remove api_core ignores, quiet a mypy error by import rename * fix: drop unneded extra function Also, have shim 'make_datastore_api' raise an exception, as it only gets called if gRPC is disabled, or not installed. Co-authored-by: Owl Bot Co-authored-by: Tres Seaver --- .../google/cloud/datastore/_http.py | 4 +-- .../google/cloud/datastore/client.py | 17 +++++----- packages/google-cloud-datastore/mypy.ini | 7 ++++ packages/google-cloud-datastore/noxfile.py | 10 ++++++ packages/google-cloud-datastore/owlbot.py | 33 +++++++++++++++++++ 5 files changed, 61 insertions(+), 10 deletions(-) create mode 100644 packages/google-cloud-datastore/mypy.ini diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 9e13567b509b..9ea5aac88294 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -14,10 +14,10 @@ """Connections to Google Cloud Datastore API servers.""" -from google.rpc import status_pb2 +from google.rpc import status_pb2 # type: ignore from google.cloud import _http as connection_module -from google.cloud import exceptions +from google.cloud import exceptions # type: ignore from google.cloud.datastore_v1.types import datastore as _datastore_pb2 diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index b5de0fab600b..4793e059981a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -17,10 +17,10 @@ import warnings import google.api_core.client_options -from google.auth.credentials import AnonymousCredentials -from google.cloud._helpers import _LocalStack -from google.cloud._helpers import _determine_default_project as _base_default_project -from google.cloud.client import ClientWithProject +from google.auth.credentials import AnonymousCredentials # type: ignore +from google.cloud._helpers import _LocalStack # type: ignore +from google.cloud._helpers import _determine_default_project as _base_default_project # type: ignore +from google.cloud.client import ClientWithProject # type: ignore from google.cloud.datastore.version import __version__ from google.cloud.datastore import helpers from google.cloud.datastore._http import HTTPDatastoreAPI @@ -32,13 +32,14 @@ try: from google.cloud.datastore._gapic import make_datastore_api - except ImportError: # pragma: NO COVER - from google.api_core import client_info + from google.api_core import client_info as api_core_client_info + + def make_datastore_api(client): + raise RuntimeError("No gRPC available") - make_datastore_api = None _HAVE_GRPC = False - _CLIENT_INFO = client_info.ClientInfo(client_library_version=__version__) + _CLIENT_INFO = api_core_client_info.ClientInfo(client_library_version=__version__) else: from google.api_core.gapic_v1 import client_info diff --git a/packages/google-cloud-datastore/mypy.ini b/packages/google-cloud-datastore/mypy.ini new file mode 100644 index 000000000000..5663b40df934 --- /dev/null +++ b/packages/google-cloud-datastore/mypy.ini @@ -0,0 +1,7 @@ +[mypy] +python_version = 3.6 +namespace_packages = True +ignore_missing_imports = True + +[mypy-google.protobuf] +ignore_missing_imports = True diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 1ca31940f60d..4eeac549d24f 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -37,6 +37,7 @@ nox.options.sessions = [ "unit", "system", + "mypy", "cover", "lint", "lint_setup_py", @@ -72,6 +73,15 @@ def blacken(session): ) +@nox.session(python=DEFAULT_PYTHON_VERSION) +def mypy(session): + """Verify type hints are mypy compatible.""" + session.install("-e", ".") + session.install("mypy") + # TODO: also verify types on tests, all of google package + session.run("mypy", "-p", "google.cloud.datastore", "--no-incremental") + + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index abbbb99aea11..0ad059b78c85 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -200,4 +200,37 @@ def docfx(session): """, ) +# add type checker nox session +s.replace("noxfile.py", + """nox.options.sessions = \[ + "unit", + "system",""", + """nox.options.sessions = [ + "unit", + "system", + "mypy",""", +) + + +s.replace( + "noxfile.py", + """\ +@nox.session\(python=DEFAULT_PYTHON_VERSION\) +def lint_setup_py\(session\): +""", + '''\ +@nox.session(python=DEFAULT_PYTHON_VERSION) +def mypy(session): + """Verify type hints are mypy compatible.""" + session.install("-e", ".") + session.install("mypy") + # TODO: also verify types on tests, all of google package + session.run("mypy", "-p", "google.cloud.datastore", "--no-incremental") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): +''', +) + s.shell.run(["nox", "-s", "blacken"], hide_output=False) From 7a506678f45d89cad322d95f61fb16f600d17cd3 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 18 Oct 2021 15:10:23 -0400 Subject: [PATCH 375/611] feat: add 'Client.entity' helper (#239) Proxy for 'google.cloud.datastore.entity.Entity'. --- .../google/cloud/datastore/client.py | 4 ++++ .../tests/unit/test_client.py | 24 +++++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 4793e059981a..207759cc2bf6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -738,6 +738,10 @@ def key(self, *path_args, **kwargs): kwargs["namespace"] = self.namespace return Key(*path_args, **kwargs) + def entity(self, key=None, exclude_from_indexes=()): + """Proxy to :class:`google.cloud.datastore.entity.Entity`.""" + return Entity(key=key, exclude_from_indexes=exclude_from_indexes) + def batch(self): """Proxy to :class:`google.cloud.datastore.batch.Batch`.""" return Batch(self) diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 5127fd60730b..f4c27cf421fd 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -1335,6 +1335,30 @@ def test_key_w_namespace_collision(self): kind, id_, project=self.PROJECT, namespace=namespace2 ) + def test_entity_w_defaults(self): + creds = _make_credentials() + client = self._make_one(credentials=creds) + + patch = mock.patch("google.cloud.datastore.client.Entity", spec=["__call__"]) + with patch as mock_klass: + entity = client.entity() + self.assertIs(entity, mock_klass.return_value) + mock_klass.assert_called_once_with(key=None, exclude_from_indexes=()) + + def test_entity_w_explicit(self): + key = mock.Mock(spec=[]) + exclude_from_indexes = ["foo", "bar"] + creds = _make_credentials() + client = self._make_one(credentials=creds) + + patch = mock.patch("google.cloud.datastore.client.Entity", spec=["__call__"]) + with patch as mock_klass: + entity = client.entity(key, exclude_from_indexes) + self.assertIs(entity, mock_klass.return_value) + mock_klass.assert_called_once_with( + key=key, exclude_from_indexes=exclude_from_indexes + ) + def test_batch(self): creds = _make_credentials() client = self._make_one(credentials=creds) From 042574f96fa3cdd4a8ac0a5c8bc51f774b189138 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 18 Oct 2021 15:19:43 -0400 Subject: [PATCH 376/611] chore: release 2.3.0 (#244) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 12 ++++++++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 2b81e22808ed..b24a7b8a6deb 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.3.0](https://www.github.com/googleapis/python-datastore/compare/v2.2.0...v2.3.0) (2021-10-18) + + +### Features + +* add 'Client.entity' helper ([#239](https://www.github.com/googleapis/python-datastore/issues/239)) ([49d48f1](https://www.github.com/googleapis/python-datastore/commit/49d48f17b0c311b859b62a8bd0af8ebf8f7d5717)) + + +### Bug Fixes + +* improve type hints, mypy check ([#242](https://www.github.com/googleapis/python-datastore/issues/242)) ([6398bbc](https://www.github.com/googleapis/python-datastore/commit/6398bbcaf8a9d845a4b3d06cfc673a633851f48b)) + ## [2.2.0](https://www.github.com/googleapis/python-datastore/compare/v2.1.6...v2.2.0) (2021-10-08) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index bd0f8e5c7d25..999199f5a138 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.2.0" +__version__ = "2.3.0" From df5bcfa78bd0b73dbffd58533759e65b8e755df3 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 22 Oct 2021 14:20:33 -0400 Subject: [PATCH 377/611] tests: refactor unittests using pytest idioms (#235) --- .../tests/unit/test__gapic.py | 131 +- .../tests/unit/test__http.py | 1348 ++++---- .../tests/unit/test_batch.py | 864 +++--- .../tests/unit/test_client.py | 2697 +++++++++-------- .../tests/unit/test_entity.py | 412 +-- .../tests/unit/test_helpers.py | 2063 +++++++------ .../tests/unit/test_key.py | 1419 ++++----- .../tests/unit/test_query.py | 1489 ++++----- .../tests/unit/test_transaction.py | 602 ++-- 9 files changed, 5639 insertions(+), 5386 deletions(-) diff --git a/packages/google-cloud-datastore/tests/unit/test__gapic.py b/packages/google-cloud-datastore/tests/unit/test__gapic.py index 4543dba9d07f..e7f0b6903733 100644 --- a/packages/google-cloud-datastore/tests/unit/test__gapic.py +++ b/packages/google-cloud-datastore/tests/unit/test__gapic.py @@ -12,86 +12,81 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest - import mock +import pytest from google.cloud.datastore.client import _HAVE_GRPC -@unittest.skipUnless(_HAVE_GRPC, "No gRPC") -class Test_make_datastore_api(unittest.TestCase): - def _call_fut(self, client): - from google.cloud.datastore._gapic import make_datastore_api - - return make_datastore_api(client) - - @mock.patch( - "google.cloud.datastore_v1.services.datastore.client.DatastoreClient", - return_value=mock.sentinel.ds_client, +@pytest.mark.skipif(not _HAVE_GRPC, reason="No gRPC") +@mock.patch( + "google.cloud.datastore_v1.services.datastore.client.DatastoreClient", + return_value=mock.sentinel.ds_client, +) +@mock.patch( + "google.cloud.datastore_v1.services.datastore.transports.grpc.DatastoreGrpcTransport", + return_value=mock.sentinel.transport, +) +@mock.patch( + "google.cloud.datastore._gapic.make_secure_channel", + return_value=mock.sentinel.channel, +) +def test_live_api(make_chan, mock_transport, mock_klass): + from google.cloud._http import DEFAULT_USER_AGENT + from google.cloud.datastore._gapic import make_datastore_api + + base_url = "https://datastore.googleapis.com:443" + client = mock.Mock( + _base_url=base_url, + _credentials=mock.sentinel.credentials, + _client_info=mock.sentinel.client_info, + spec=["_base_url", "_credentials", "_client_info"], ) - @mock.patch( - "google.cloud.datastore_v1.services.datastore.transports.grpc.DatastoreGrpcTransport", - return_value=mock.sentinel.transport, - ) - @mock.patch( - "google.cloud.datastore._gapic.make_secure_channel", - return_value=mock.sentinel.channel, - ) - def test_live_api(self, make_chan, mock_transport, mock_klass): - from google.cloud._http import DEFAULT_USER_AGENT + ds_api = make_datastore_api(client) + assert ds_api is mock.sentinel.ds_client - base_url = "https://datastore.googleapis.com:443" - client = mock.Mock( - _base_url=base_url, - _credentials=mock.sentinel.credentials, - _client_info=mock.sentinel.client_info, - spec=["_base_url", "_credentials", "_client_info"], - ) - ds_api = self._call_fut(client) - self.assertIs(ds_api, mock.sentinel.ds_client) + mock_transport.assert_called_once_with(channel=mock.sentinel.channel) - mock_transport.assert_called_once_with(channel=mock.sentinel.channel) + make_chan.assert_called_once_with( + mock.sentinel.credentials, DEFAULT_USER_AGENT, "datastore.googleapis.com:443", + ) - make_chan.assert_called_once_with( - mock.sentinel.credentials, - DEFAULT_USER_AGENT, - "datastore.googleapis.com:443", - ) + mock_klass.assert_called_once_with( + transport=mock.sentinel.transport, client_info=mock.sentinel.client_info + ) - mock_klass.assert_called_once_with( - transport=mock.sentinel.transport, client_info=mock.sentinel.client_info - ) - @mock.patch( - "google.cloud.datastore_v1.services.datastore.client.DatastoreClient", - return_value=mock.sentinel.ds_client, - ) - @mock.patch( - "google.cloud.datastore_v1.services.datastore.transports.grpc.DatastoreGrpcTransport", - return_value=mock.sentinel.transport, - ) - @mock.patch( - "google.cloud.datastore._gapic.insecure_channel", - return_value=mock.sentinel.channel, +@pytest.mark.skipif(not _HAVE_GRPC, reason="No gRPC") +@mock.patch( + "google.cloud.datastore_v1.services.datastore.client.DatastoreClient", + return_value=mock.sentinel.ds_client, +) +@mock.patch( + "google.cloud.datastore_v1.services.datastore.transports.grpc.DatastoreGrpcTransport", + return_value=mock.sentinel.transport, +) +@mock.patch( + "google.cloud.datastore._gapic.insecure_channel", + return_value=mock.sentinel.channel, +) +def test_emulator(make_chan, mock_transport, mock_klass): + from google.cloud.datastore._gapic import make_datastore_api + + host = "localhost:8901" + base_url = "http://" + host + client = mock.Mock( + _base_url=base_url, + _credentials=mock.sentinel.credentials, + _client_info=mock.sentinel.client_info, + spec=["_base_url", "_credentials", "_client_info"], ) - def test_emulator(self, make_chan, mock_transport, mock_klass): + ds_api = make_datastore_api(client) + assert ds_api is mock.sentinel.ds_client - host = "localhost:8901" - base_url = "http://" + host - client = mock.Mock( - _base_url=base_url, - _credentials=mock.sentinel.credentials, - _client_info=mock.sentinel.client_info, - spec=["_base_url", "_credentials", "_client_info"], - ) - ds_api = self._call_fut(client) - self.assertIs(ds_api, mock.sentinel.ds_client) + mock_transport.assert_called_once_with(channel=mock.sentinel.channel) - mock_transport.assert_called_once_with(channel=mock.sentinel.channel) + make_chan.assert_called_once_with(host) - make_chan.assert_called_once_with(host) - - mock_klass.assert_called_once_with( - transport=mock.sentinel.transport, client_info=mock.sentinel.client_info - ) + mock_klass.assert_called_once_with( + transport=mock.sentinel.transport, client_info=mock.sentinel.client_info + ) diff --git a/packages/google-cloud-datastore/tests/unit/test__http.py b/packages/google-cloud-datastore/tests/unit/test__http.py index 2e8da9e940e6..67f28ffe0a75 100644 --- a/packages/google-cloud-datastore/tests/unit/test__http.py +++ b/packages/google-cloud-datastore/tests/unit/test__http.py @@ -12,830 +12,848 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest +import http.client import mock -from http import client - +import pytest import requests -class Test__make_retry_timeout_kwargs(unittest.TestCase): - @staticmethod - def _call_fut(retry, timeout): - from google.cloud.datastore._http import _make_retry_timeout_kwargs +def test__make_retry_timeout_kwargs_w_empty(): + from google.cloud.datastore._http import _make_retry_timeout_kwargs - return _make_retry_timeout_kwargs(retry, timeout) + expected = {} + assert _make_retry_timeout_kwargs(None, None) == expected - def test_empty(self): - expected = {} - self.assertEqual(self._call_fut(None, None), expected) - def test_w_retry(self): - retry = object() - expected = {"retry": retry} - self.assertEqual(self._call_fut(retry, None), expected) +def test__make_retry_timeout_kwargs_w_retry(): + from google.cloud.datastore._http import _make_retry_timeout_kwargs - def test_w_timeout(self): - timeout = 5.0 - expected = {"timeout": timeout} - self.assertEqual(self._call_fut(None, timeout), expected) + retry = object() + expected = {"retry": retry} + assert _make_retry_timeout_kwargs(retry, None) == expected - def test_w_retry_w_timeout(self): - retry = object() - timeout = 5.0 - expected = {"retry": retry, "timeout": timeout} - self.assertEqual(self._call_fut(retry, timeout), expected) +def test__make_retry_timeout_kwargs_w_timeout(): + from google.cloud.datastore._http import _make_retry_timeout_kwargs -class Foo: - def __init__(self, bar=None, baz=None): - self.bar = bar - self.baz = baz + timeout = 5.0 + expected = {"timeout": timeout} + assert _make_retry_timeout_kwargs(None, timeout) == expected -class Test__make_request_pb(unittest.TestCase): - @staticmethod - def _call_fut(request, request_pb_type): - from google.cloud.datastore._http import _make_request_pb +def test__make_retry_timeout_kwargs_w_both(): + from google.cloud.datastore._http import _make_retry_timeout_kwargs - return _make_request_pb(request, request_pb_type) + retry = object() + timeout = 5.0 + expected = {"retry": retry, "timeout": timeout} + assert _make_retry_timeout_kwargs(retry, timeout) == expected - def test_w_empty_dict(self): - request = {} - foo = self._call_fut(request, Foo) +def test__make_request_pb_w_empty_dict(): + from google.cloud.datastore._http import _make_request_pb - self.assertIsInstance(foo, Foo) - self.assertIsNone(foo.bar) - self.assertIsNone(foo.baz) + request = {} - def test_w_partial_dict(self): - request = {"bar": "Bar"} + foo = _make_request_pb(request, Foo) - foo = self._call_fut(request, Foo) + assert isinstance(foo, Foo) + assert foo.bar is None + assert foo.baz is None - self.assertIsInstance(foo, Foo) - self.assertEqual(foo.bar, "Bar") - self.assertIsNone(foo.baz) - def test_w_complete_dict(self): - request = {"bar": "Bar", "baz": "Baz"} +def test__make_request_pb_w_partial_dict(): + from google.cloud.datastore._http import _make_request_pb - foo = self._call_fut(request, Foo) + request = {"bar": "Bar"} - self.assertIsInstance(foo, Foo) - self.assertEqual(foo.bar, "Bar") - self.assertEqual(foo.baz, "Baz") + foo = _make_request_pb(request, Foo) - def test_w_instance(self): - passed = Foo() + assert isinstance(foo, Foo) + assert foo.bar == "Bar" + assert foo.baz is None - foo = self._call_fut(passed, Foo) - self.assertIs(foo, passed) +def test__make_request_pb_w_complete_dict(): + from google.cloud.datastore._http import _make_request_pb + request = {"bar": "Bar", "baz": "Baz"} -class Test__request(unittest.TestCase): - @staticmethod - def _call_fut(*args, **kwargs): - from google.cloud.datastore._http import _request + foo = _make_request_pb(request, Foo) - return _request(*args, **kwargs) + assert isinstance(foo, Foo) + assert foo.bar == "Bar" + assert foo.baz == "Baz" - def _helper(self, retry=None, timeout=None): - from google.cloud import _http as connection_module - project = "PROJECT" - method = "METHOD" - data = b"DATA" - base_url = "http://api-url" - user_agent = "USER AGENT" - client_info = _make_client_info(user_agent) - response_data = "CONTENT" +def test__make_request_pb_w_instance(): + from google.cloud.datastore._http import _make_request_pb - http = _make_requests_session([_make_response(content=response_data)]) + passed = Foo() - kwargs = _make_retry_timeout_kwargs(retry, timeout, http) + foo = _make_request_pb(passed, Foo) - response = self._call_fut( - http, project, method, data, base_url, client_info, **kwargs - ) - self.assertEqual(response, response_data) + assert foo is passed - # Check that the mocks were called as expected. - expected_url = _build_expected_url(base_url, project, method) - expected_headers = { - "Content-Type": "application/x-protobuf", - "User-Agent": user_agent, - connection_module.CLIENT_INFO_HEADER: user_agent, - } - if retry is not None: - retry.assert_called_once_with(http.request) +def _request_helper(retry=None, timeout=None): + from google.cloud import _http as connection_module + from google.cloud.datastore._http import _request - kwargs.pop("retry", None) - http.request.assert_called_once_with( - method="POST", - url=expected_url, - headers=expected_headers, - data=data, - **kwargs - ) + project = "PROJECT" + method = "METHOD" + data = b"DATA" + base_url = "http://api-url" + user_agent = "USER AGENT" + client_info = _make_client_info(user_agent) + response_data = "CONTENT" - def test_ok(self): - self._helper() + http = _make_requests_session([_make_response(content=response_data)]) - def test_w_retry(self): - retry = mock.MagicMock() - self._helper(retry=retry) + kwargs = _retry_timeout_kw(retry, timeout, http) - def test_w_timeout(self): - timeout = 5.0 - self._helper(timeout=timeout) + response = _request(http, project, method, data, base_url, client_info, **kwargs) + assert response == response_data - def test_failure(self): - from google.cloud.exceptions import BadRequest - from google.rpc import code_pb2 - from google.rpc import status_pb2 + # Check that the mocks were called as expected. + expected_url = _build_expected_url(base_url, project, method) + expected_headers = { + "Content-Type": "application/x-protobuf", + "User-Agent": user_agent, + connection_module.CLIENT_INFO_HEADER: user_agent, + } - project = "PROJECT" - method = "METHOD" - data = "DATA" - uri = "http://api-url" - user_agent = "USER AGENT" - client_info = _make_client_info(user_agent) + if retry is not None: + retry.assert_called_once_with(http.request) - error = status_pb2.Status() - error.message = "Entity value is indexed." - error.code = code_pb2.FAILED_PRECONDITION + kwargs.pop("retry", None) + http.request.assert_called_once_with( + method="POST", url=expected_url, headers=expected_headers, data=data, **kwargs + ) - http = _make_requests_session( - [_make_response(client.BAD_REQUEST, content=error.SerializeToString())] - ) - with self.assertRaises(BadRequest) as exc: - self._call_fut(http, project, method, data, uri, client_info) +def test__request_defaults(): + _request_helper() - expected_message = "400 Entity value is indexed." - self.assertEqual(str(exc.exception), expected_message) +def test__request_w_retry(): + retry = mock.MagicMock() + _request_helper(retry=retry) -class Test__rpc(unittest.TestCase): - @staticmethod - def _call_fut(*args, **kwargs): - from google.cloud.datastore._http import _rpc - return _rpc(*args, **kwargs) +def test__request_w_timeout(): + timeout = 5.0 + _request_helper(timeout=timeout) - def _helper(self, retry=None, timeout=None): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - http = object() - project = "projectOK" - method = "beginTransaction" - base_url = "test.invalid" - client_info = _make_client_info() - request_pb = datastore_pb2.BeginTransactionRequest(project_id=project) +def test__request_failure(): + from google.cloud.exceptions import BadRequest + from google.cloud.datastore._http import _request + from google.rpc import code_pb2 + from google.rpc import status_pb2 - response_pb = datastore_pb2.BeginTransactionResponse(transaction=b"7830rmc") + project = "PROJECT" + method = "METHOD" + data = "DATA" + uri = "http://api-url" + user_agent = "USER AGENT" + client_info = _make_client_info(user_agent) - kwargs = _make_retry_timeout_kwargs(retry, timeout) + error = status_pb2.Status() + error.message = "Entity value is indexed." + error.code = code_pb2.FAILED_PRECONDITION - patch = mock.patch( - "google.cloud.datastore._http._request", - return_value=response_pb._pb.SerializeToString(), - ) - with patch as mock_request: - result = self._call_fut( - http, - project, - method, - base_url, - client_info, - request_pb, - datastore_pb2.BeginTransactionResponse, - **kwargs - ) - - self.assertEqual(result, response_pb._pb) - - mock_request.assert_called_once_with( + session = _make_requests_session( + [_make_response(http.client.BAD_REQUEST, content=error.SerializeToString())] + ) + + with pytest.raises(BadRequest) as exc: + _request(session, project, method, data, uri, client_info) + + expected_message = "400 Entity value is indexed." + assert exc.match(expected_message) + + +def _rpc_helper(retry=None, timeout=None): + from google.cloud.datastore._http import _rpc + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + + http = object() + project = "projectOK" + method = "beginTransaction" + base_url = "test.invalid" + client_info = _make_client_info() + request_pb = datastore_pb2.BeginTransactionRequest(project_id=project) + + response_pb = datastore_pb2.BeginTransactionResponse(transaction=b"7830rmc") + + kwargs = _retry_timeout_kw(retry, timeout) + + patch = mock.patch( + "google.cloud.datastore._http._request", + return_value=response_pb._pb.SerializeToString(), + ) + with patch as mock_request: + result = _rpc( http, project, method, - request_pb._pb.SerializeToString(), base_url, client_info, + request_pb, + datastore_pb2.BeginTransactionResponse, **kwargs ) - def test_defaults(self): - self._helper() + assert result == response_pb._pb - def test_w_retry(self): - retry = mock.MagicMock() - self._helper(retry=retry) + mock_request.assert_called_once_with( + http, + project, + method, + request_pb._pb.SerializeToString(), + base_url, + client_info, + **kwargs + ) - def test_w_timeout(self): - timeout = 5.0 - self._helper(timeout=timeout) +def test__rpc_defaults(): + _rpc_helper() -class TestHTTPDatastoreAPI(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.datastore._http import HTTPDatastoreAPI - return HTTPDatastoreAPI +def test__rpc_w_retry(): + retry = mock.MagicMock() + _rpc_helper(retry=retry) - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - @staticmethod - def _make_query_pb(kind): - from google.cloud.datastore_v1.types import query as query_pb2 +def test__rpc_w_timeout(): + timeout = 5.0 + _rpc_helper(timeout=timeout) - return query_pb2.Query(kind=[query_pb2.KindExpression(name=kind)]) - def test_constructor(self): - client = object() - ds_api = self._make_one(client) - self.assertIs(ds_api.client, client) +def test_api_ctor(): + client = object() + ds_api = _make_http_datastore_api(client) + assert ds_api.client is client - def _lookup_single_helper( - self, - read_consistency=None, - transaction=None, - empty=True, - retry=None, - timeout=None, - ): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore_v1.types import entity as entity_pb2 - project = "PROJECT" - key_pb = _make_key_pb(project) +def _lookup_single_helper( + read_consistency=None, transaction=None, empty=True, retry=None, timeout=None, +): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 - options_kw = {} - if read_consistency is not None: - options_kw["read_consistency"] = read_consistency - if transaction is not None: - options_kw["transaction"] = transaction + project = "PROJECT" + key_pb = _make_key_pb(project) - read_options = datastore_pb2.ReadOptions(**options_kw) + options_kw = {} + if read_consistency is not None: + options_kw["read_consistency"] = read_consistency + if transaction is not None: + options_kw["transaction"] = transaction - rsp_pb = datastore_pb2.LookupResponse() + read_options = datastore_pb2.ReadOptions(**options_kw) - if not empty: - entity = entity_pb2.Entity() - entity.key._pb.CopyFrom(key_pb._pb) - rsp_pb._pb.found.add(entity=entity._pb) + rsp_pb = datastore_pb2.LookupResponse() - http = _make_requests_session( - [_make_response(content=rsp_pb._pb.SerializeToString())] - ) - client_info = _make_client_info() - client = mock.Mock( - _http=http, - _base_url="test.invalid", - _client_info=client_info, - spec=["_http", "_base_url", "_client_info"], - ) - ds_api = self._make_one(client) - request = { - "project_id": project, - "keys": [key_pb], - "read_options": read_options, - } - kwargs = _make_retry_timeout_kwargs(retry, timeout, http) + if not empty: + entity = entity_pb2.Entity() + entity.key._pb.CopyFrom(key_pb._pb) + rsp_pb._pb.found.add(entity=entity._pb) - response = ds_api.lookup(request=request, **kwargs) + http = _make_requests_session( + [_make_response(content=rsp_pb._pb.SerializeToString())] + ) + client_info = _make_client_info() + client = mock.Mock( + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], + ) + ds_api = _make_http_datastore_api(client) + request = { + "project_id": project, + "keys": [key_pb], + "read_options": read_options, + } + kwargs = _retry_timeout_kw(retry, timeout, http) - self.assertEqual(response, rsp_pb._pb) + response = ds_api.lookup(request=request, **kwargs) - if empty: - self.assertEqual(len(response.found), 0) - else: - self.assertEqual(len(response.found), 1) + response == rsp_pb._pb - self.assertEqual(len(response.missing), 0) - self.assertEqual(len(response.deferred), 0) + if empty: + assert len(response.found) == 0 + else: + assert len(response.found) == 1 - uri = _build_expected_url(client._base_url, project, "lookup") - request = _verify_protobuf_call( - http, uri, datastore_pb2.LookupRequest(), retry=retry, timeout=timeout, - ) + assert len(response.missing) == 0 + assert len(response.deferred) == 0 - if retry is not None: - retry.assert_called_once_with(http.request) - - self.assertEqual(list(request.keys), [key_pb._pb]) - self.assertEqual(request.read_options, read_options._pb) - - def test_lookup_single_key_miss(self): - self._lookup_single_helper() - - def test_lookup_single_key_miss_w_read_consistency(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - - read_consistency = datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL - self._lookup_single_helper(read_consistency=read_consistency) - - def test_lookup_single_key_miss_w_transaction(self): - transaction = b"TRANSACTION" - self._lookup_single_helper(transaction=transaction) - - def test_lookup_single_key_hit(self): - self._lookup_single_helper(empty=False) - - def test_lookup_single_key_hit_w_retry(self): - retry = mock.MagicMock() - self._lookup_single_helper(empty=False, retry=retry) - - def test_lookup_single_key_hit_w_timeout(self): - timeout = 5.0 - self._lookup_single_helper(empty=False, timeout=timeout) - - def _lookup_multiple_helper( - self, found=0, missing=0, deferred=0, retry=None, timeout=None, - ): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore_v1.types import entity as entity_pb2 - - project = "PROJECT" - key_pb1 = _make_key_pb(project) - key_pb2 = _make_key_pb(project, id_=2345) - keys = [key_pb1, key_pb2] - read_options = datastore_pb2.ReadOptions() - - rsp_pb = datastore_pb2.LookupResponse() - - found_keys = [] - for i_found in range(found): - key = keys[i_found] - found_keys.append(key._pb) - entity = entity_pb2.Entity() - entity.key._pb.CopyFrom(key._pb) - rsp_pb._pb.found.add(entity=entity._pb) - - missing_keys = [] - for i_missing in range(missing): - key = keys[i_missing] - missing_keys.append(key._pb) - entity = entity_pb2.Entity() - entity.key._pb.CopyFrom(key._pb) - rsp_pb._pb.missing.add(entity=entity._pb) - - deferred_keys = [] - for i_deferred in range(deferred): - key = keys[i_deferred] - deferred_keys.append(key._pb) - rsp_pb._pb.deferred.append(key._pb) - - http = _make_requests_session( - [_make_response(content=rsp_pb._pb.SerializeToString())] - ) - client_info = _make_client_info() - client = mock.Mock( - _http=http, - _base_url="test.invalid", - _client_info=client_info, - spec=["_http", "_base_url", "_client_info"], - ) - ds_api = self._make_one(client) - request = { - "project_id": project, - "keys": keys, - "read_options": read_options, - } - kwargs = _make_retry_timeout_kwargs(retry, timeout, http) + uri = _build_expected_url(client._base_url, project, "lookup") + request = _verify_protobuf_call( + http, uri, datastore_pb2.LookupRequest(), retry=retry, timeout=timeout, + ) + + if retry is not None: + retry.assert_called_once_with(http.request) - response = ds_api.lookup(request=request, **kwargs) + assert list(request.keys) == [key_pb._pb] + assert request.read_options == read_options._pb - self.assertEqual(response, rsp_pb._pb) - self.assertEqual([found.entity.key for found in response.found], found_keys) - self.assertEqual( - [missing.entity.key for missing in response.missing], missing_keys - ) - self.assertEqual(list(response.deferred), deferred_keys) +def test_api_lookup_single_key_miss(): + _lookup_single_helper() - uri = _build_expected_url(client._base_url, project, "lookup") - request = _verify_protobuf_call( - http, uri, datastore_pb2.LookupRequest(), retry=retry, timeout=timeout, - ) - self.assertEqual(list(request.keys), [key_pb1._pb, key_pb2._pb]) - self.assertEqual(request.read_options, read_options._pb) - - def test_lookup_multiple_keys_w_empty_response(self): - self._lookup_multiple_helper() - - def test_lookup_multiple_keys_w_retry(self): - retry = mock.MagicMock() - self._lookup_multiple_helper(retry=retry) - - def test_lookup_multiple_keys_w_timeout(self): - timeout = 5.0 - self._lookup_multiple_helper(timeout=timeout) - - def test_lookup_multiple_keys_w_found(self): - self._lookup_multiple_helper(found=2) - - def test_lookup_multiple_keys_w_missing(self): - self._lookup_multiple_helper(missing=2) - - def test_lookup_multiple_keys_w_deferred(self): - self._lookup_multiple_helper(deferred=2) - - def _run_query_helper( - self, - read_consistency=None, - transaction=None, - namespace=None, - found=0, - retry=None, - timeout=None, - ): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore_v1.types import query as query_pb2 - - project = "PROJECT" - kind = "Nonesuch" - query_pb = self._make_query_pb(kind) - - partition_kw = {"project_id": project} - if namespace is not None: - partition_kw["namespace_id"] = namespace - - partition_id = entity_pb2.PartitionId(**partition_kw) - - options_kw = {} - if read_consistency is not None: - options_kw["read_consistency"] = read_consistency - if transaction is not None: - options_kw["transaction"] = transaction - read_options = datastore_pb2.ReadOptions(**options_kw) - - cursor = b"\x00" - batch_kw = { - "entity_result_type": query_pb2.EntityResult.ResultType.FULL, - "end_cursor": cursor, - "more_results": query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS, - } - if found: - batch_kw["entity_results"] = [ - query_pb2.EntityResult(entity=entity_pb2.Entity()) - ] * found - rsp_pb = datastore_pb2.RunQueryResponse( - batch=query_pb2.QueryResultBatch(**batch_kw) - ) - http = _make_requests_session( - [_make_response(content=rsp_pb._pb.SerializeToString())] - ) - client_info = _make_client_info() - client = mock.Mock( - _http=http, - _base_url="test.invalid", - _client_info=client_info, - spec=["_http", "_base_url", "_client_info"], - ) - ds_api = self._make_one(client) - request = { - "project_id": project, - "partition_id": partition_id, - "read_options": read_options, - "query": query_pb, - } - kwargs = _make_retry_timeout_kwargs(retry, timeout, http) - - response = ds_api.run_query(request=request, **kwargs) - - self.assertEqual(response, rsp_pb._pb) - - uri = _build_expected_url(client._base_url, project, "runQuery") - request = _verify_protobuf_call( - http, uri, datastore_pb2.RunQueryRequest(), retry=retry, timeout=timeout, - ) - self.assertEqual(request.partition_id, partition_id._pb) - self.assertEqual(request.query, query_pb._pb) - self.assertEqual(request.read_options, read_options._pb) +def test_api_lookup_single_key_miss_w_read_consistency(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 - def test_run_query_simple(self): - self._run_query_helper() + read_consistency = datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL + _lookup_single_helper(read_consistency=read_consistency) - def test_run_query_w_retry(self): - retry = mock.MagicMock() - self._run_query_helper(retry=retry) - def test_run_query_w_timeout(self): - timeout = 5.0 - self._run_query_helper(timeout=timeout) +def test_api_lookup_single_key_miss_w_transaction(): + transaction = b"TRANSACTION" + _lookup_single_helper(transaction=transaction) - def test_run_query_w_read_consistency(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - read_consistency = datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL - self._run_query_helper(read_consistency=read_consistency) +def test_api_lookup_single_key_hit(): + _lookup_single_helper(empty=False) - def test_run_query_w_transaction(self): - transaction = b"TRANSACTION" - self._run_query_helper(transaction=transaction) - def test_run_query_w_namespace_nonempty_result(self): - namespace = "NS" - self._run_query_helper(namespace=namespace, found=1) +def test_api_lookup_single_key_hit_w_retry(): + retry = mock.MagicMock() + _lookup_single_helper(empty=False, retry=retry) - def _begin_transaction_helper(self, options=None, retry=None, timeout=None): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - project = "PROJECT" - transaction = b"TRANSACTION" - rsp_pb = datastore_pb2.BeginTransactionResponse() - rsp_pb.transaction = transaction +def test_api_lookup_single_key_hit_w_timeout(): + timeout = 5.0 + _lookup_single_helper(empty=False, timeout=timeout) - # Create mock HTTP and client with response. - http = _make_requests_session( - [_make_response(content=rsp_pb._pb.SerializeToString())] - ) - client_info = _make_client_info() - client = mock.Mock( - _http=http, - _base_url="test.invalid", - _client_info=client_info, - spec=["_http", "_base_url", "_client_info"], - ) - # Make request. - ds_api = self._make_one(client) - request = {"project_id": project} +def _lookup_multiple_helper( + found=0, missing=0, deferred=0, retry=None, timeout=None, +): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 - if options is not None: - request["transaction_options"] = options + project = "PROJECT" + key_pb1 = _make_key_pb(project) + key_pb2 = _make_key_pb(project, id_=2345) + keys = [key_pb1, key_pb2] + read_options = datastore_pb2.ReadOptions() - kwargs = _make_retry_timeout_kwargs(retry, timeout, http) + rsp_pb = datastore_pb2.LookupResponse() - response = ds_api.begin_transaction(request=request, **kwargs) + found_keys = [] + for i_found in range(found): + key = keys[i_found] + found_keys.append(key._pb) + entity = entity_pb2.Entity() + entity.key._pb.CopyFrom(key._pb) + rsp_pb._pb.found.add(entity=entity._pb) - # Check the result and verify the callers. - self.assertEqual(response, rsp_pb._pb) + missing_keys = [] + for i_missing in range(missing): + key = keys[i_missing] + missing_keys.append(key._pb) + entity = entity_pb2.Entity() + entity.key._pb.CopyFrom(key._pb) + rsp_pb._pb.missing.add(entity=entity._pb) - uri = _build_expected_url(client._base_url, project, "beginTransaction") - request = _verify_protobuf_call( - http, - uri, - datastore_pb2.BeginTransactionRequest(), - retry=retry, - timeout=timeout, - ) + deferred_keys = [] + for i_deferred in range(deferred): + key = keys[i_deferred] + deferred_keys.append(key._pb) + rsp_pb._pb.deferred.append(key._pb) - def test_begin_transaction_wo_options(self): - self._begin_transaction_helper() + http = _make_requests_session( + [_make_response(content=rsp_pb._pb.SerializeToString())] + ) + client_info = _make_client_info() + client = mock.Mock( + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], + ) + ds_api = _make_http_datastore_api(client) + request = { + "project_id": project, + "keys": keys, + "read_options": read_options, + } + kwargs = _retry_timeout_kw(retry, timeout, http) - def test_begin_transaction_w_options(self): - from google.cloud.datastore_v1.types import TransactionOptions + response = ds_api.lookup(request=request, **kwargs) - read_only = TransactionOptions.ReadOnly._meta.pb() - options = TransactionOptions(read_only=read_only) - self._begin_transaction_helper(options=options) + assert response == rsp_pb._pb - def test_begin_transaction_w_retry(self): - retry = mock.MagicMock() - self._begin_transaction_helper(retry=retry) + assert [found.entity.key for found in response.found] == found_keys + assert [missing.entity.key for missing in response.missing] == missing_keys + assert list(response.deferred) == deferred_keys - def test_begin_transaction_w_timeout(self): - timeout = 5.0 - self._begin_transaction_helper(timeout=timeout) + uri = _build_expected_url(client._base_url, project, "lookup") + request = _verify_protobuf_call( + http, uri, datastore_pb2.LookupRequest(), retry=retry, timeout=timeout, + ) + assert list(request.keys) == [key_pb1._pb, key_pb2._pb] + assert request.read_options == read_options._pb - def _commit_helper(self, transaction=None, retry=None, timeout=None): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore.helpers import _new_value_pb - project = "PROJECT" - key_pb = _make_key_pb(project) - rsp_pb = datastore_pb2.CommitResponse() - req_pb = datastore_pb2.CommitRequest() - mutation = req_pb._pb.mutations.add() - insert = mutation.upsert - insert.key.CopyFrom(key_pb._pb) - value_pb = _new_value_pb(insert, "foo") - value_pb.string_value = u"Foo" +def test_api_lookup_multiple_keys_w_empty_response(): + _lookup_multiple_helper() - http = _make_requests_session( - [_make_response(content=rsp_pb._pb.SerializeToString())] - ) - client_info = _make_client_info() - client = mock.Mock( - _http=http, - _base_url="test.invalid", - _client_info=client_info, - spec=["_http", "_base_url", "_client_info"], - ) - rq_class = datastore_pb2.CommitRequest - ds_api = self._make_one(client) +def test_api_lookup_multiple_keys_w_retry(): + retry = mock.MagicMock() + _lookup_multiple_helper(retry=retry) - request = {"project_id": project, "mutations": [mutation]} - if transaction is not None: - request["transaction"] = transaction - mode = request["mode"] = rq_class.Mode.TRANSACTIONAL - else: - mode = request["mode"] = rq_class.Mode.NON_TRANSACTIONAL +def test_api_lookup_multiple_keys_w_timeout(): + timeout = 5.0 + _lookup_multiple_helper(timeout=timeout) - kwargs = _make_retry_timeout_kwargs(retry, timeout, http) - result = ds_api.commit(request=request, **kwargs) +def test_api_lookup_multiple_keys_w_found(): + _lookup_multiple_helper(found=2) - self.assertEqual(result, rsp_pb._pb) - uri = _build_expected_url(client._base_url, project, "commit") - request = _verify_protobuf_call( - http, uri, rq_class(), retry=retry, timeout=timeout, - ) - self.assertEqual(list(request.mutations), [mutation]) - self.assertEqual(request.mode, mode) +def test_api_lookup_multiple_keys_w_missing(): + _lookup_multiple_helper(missing=2) - if transaction is not None: - self.assertEqual(request.transaction, transaction) - else: - self.assertEqual(request.transaction, b"") - def test_commit_wo_transaction(self): - self._commit_helper() +def test_api_lookup_multiple_keys_w_deferred(): + _lookup_multiple_helper(deferred=2) - def test_commit_w_transaction(self): - transaction = b"xact" - self._commit_helper(transaction=transaction) +def _run_query_helper( + read_consistency=None, + transaction=None, + namespace=None, + found=0, + retry=None, + timeout=None, +): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 - def test_commit_w_retry(self): - retry = mock.MagicMock() - self._commit_helper(retry=retry) + project = "PROJECT" + kind = "Nonesuch" + query_pb = query_pb2.Query(kind=[query_pb2.KindExpression(name=kind)]) - def test_commit_w_timeout(self): - timeout = 5.0 - self._commit_helper(timeout=timeout) + partition_kw = {"project_id": project} + if namespace is not None: + partition_kw["namespace_id"] = namespace - def _rollback_helper(self, retry=None, timeout=None): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 + partition_id = entity_pb2.PartitionId(**partition_kw) - project = "PROJECT" - transaction = b"xact" - rsp_pb = datastore_pb2.RollbackResponse() + options_kw = {} + if read_consistency is not None: + options_kw["read_consistency"] = read_consistency + if transaction is not None: + options_kw["transaction"] = transaction + read_options = datastore_pb2.ReadOptions(**options_kw) - # Create mock HTTP and client with response. - http = _make_requests_session( - [_make_response(content=rsp_pb._pb.SerializeToString())] - ) - client_info = _make_client_info() - client = mock.Mock( - _http=http, - _base_url="test.invalid", - _client_info=client_info, - spec=["_http", "_base_url", "_client_info"], - ) + cursor = b"\x00" + batch_kw = { + "entity_result_type": query_pb2.EntityResult.ResultType.FULL, + "end_cursor": cursor, + "more_results": query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS, + } + if found: + batch_kw["entity_results"] = [ + query_pb2.EntityResult(entity=entity_pb2.Entity()) + ] * found + rsp_pb = datastore_pb2.RunQueryResponse( + batch=query_pb2.QueryResultBatch(**batch_kw) + ) + + http = _make_requests_session( + [_make_response(content=rsp_pb._pb.SerializeToString())] + ) + client_info = _make_client_info() + client = mock.Mock( + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], + ) + ds_api = _make_http_datastore_api(client) + request = { + "project_id": project, + "partition_id": partition_id, + "read_options": read_options, + "query": query_pb, + } + kwargs = _retry_timeout_kw(retry, timeout, http) - # Make request. - ds_api = self._make_one(client) - request = {"project_id": project, "transaction": transaction} - kwargs = _make_retry_timeout_kwargs(retry, timeout, http) + response = ds_api.run_query(request=request, **kwargs) - response = ds_api.rollback(request=request, **kwargs) + assert response == rsp_pb._pb - # Check the result and verify the callers. - self.assertEqual(response, rsp_pb._pb) + uri = _build_expected_url(client._base_url, project, "runQuery") + request = _verify_protobuf_call( + http, uri, datastore_pb2.RunQueryRequest(), retry=retry, timeout=timeout, + ) + assert request.partition_id == partition_id._pb + assert request.query == query_pb._pb + assert request.read_options == read_options._pb - uri = _build_expected_url(client._base_url, project, "rollback") - request = _verify_protobuf_call( - http, uri, datastore_pb2.RollbackRequest(), retry=retry, timeout=timeout, - ) - self.assertEqual(request.transaction, transaction) - def test_rollback_ok(self): - self._rollback_helper() +def test_api_run_query_simple(): + _run_query_helper() - def test_rollback_w_retry(self): - retry = mock.MagicMock() - self._rollback_helper(retry=retry) - def test_rollback_w_timeout(self): - timeout = 5.0 - self._rollback_helper(timeout=timeout) +def test_api_run_query_w_retry(): + retry = mock.MagicMock() + _run_query_helper(retry=retry) - def _allocate_ids_helper(self, count=0, retry=None, timeout=None): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - project = "PROJECT" - before_key_pbs = [] - after_key_pbs = [] - rsp_pb = datastore_pb2.AllocateIdsResponse() +def test_api_run_query_w_timeout(): + timeout = 5.0 + _run_query_helper(timeout=timeout) - for i_count in range(count): - requested = _make_key_pb(project, id_=None) - before_key_pbs.append(requested) - allocated = _make_key_pb(project, id_=i_count) - after_key_pbs.append(allocated) - rsp_pb._pb.keys.add().CopyFrom(allocated._pb) - http = _make_requests_session( - [_make_response(content=rsp_pb._pb.SerializeToString())] - ) - client_info = _make_client_info() - client = mock.Mock( - _http=http, - _base_url="test.invalid", - _client_info=client_info, - spec=["_http", "_base_url", "_client_info"], - ) - ds_api = self._make_one(client) +def test_api_run_query_w_read_consistency(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 - request = {"project_id": project, "keys": before_key_pbs} - kwargs = _make_retry_timeout_kwargs(retry, timeout, http) + read_consistency = datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL + _run_query_helper(read_consistency=read_consistency) - response = ds_api.allocate_ids(request=request, **kwargs) - self.assertEqual(response, rsp_pb._pb) - self.assertEqual(list(response.keys), [i._pb for i in after_key_pbs]) +def test_api_run_query_w_transaction(): + transaction = b"TRANSACTION" + _run_query_helper(transaction=transaction) - uri = _build_expected_url(client._base_url, project, "allocateIds") - request = _verify_protobuf_call( - http, uri, datastore_pb2.AllocateIdsRequest(), retry=retry, timeout=timeout, - ) - self.assertEqual(len(request.keys), len(before_key_pbs)) - for key_before, key_after in zip(before_key_pbs, request.keys): - self.assertEqual(key_before, key_after) - def test_allocate_ids_empty(self): - self._allocate_ids_helper() +def test_api_run_query_w_namespace_nonempty_result(): + namespace = "NS" + _run_query_helper(namespace=namespace, found=1) - def test_allocate_ids_non_empty(self): - self._allocate_ids_helper(count=2) - def test_allocate_ids_w_retry(self): - retry = mock.MagicMock() - self._allocate_ids_helper(retry=retry) +def _begin_transaction_helper(options=None, retry=None, timeout=None): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 - def test_allocate_ids_w_timeout(self): - timeout = 5.0 - self._allocate_ids_helper(timeout=timeout) + project = "PROJECT" + transaction = b"TRANSACTION" + rsp_pb = datastore_pb2.BeginTransactionResponse() + rsp_pb.transaction = transaction - def _reserve_ids_helper(self, count=0, retry=None, timeout=None): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 + # Create mock HTTP and client with response. + http = _make_requests_session( + [_make_response(content=rsp_pb._pb.SerializeToString())] + ) + client_info = _make_client_info() + client = mock.Mock( + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], + ) - project = "PROJECT" - before_key_pbs = [] - rsp_pb = datastore_pb2.ReserveIdsResponse() + # Make request. + ds_api = _make_http_datastore_api(client) + request = {"project_id": project} - for i_count in range(count): - requested = _make_key_pb(project, id_=i_count) - before_key_pbs.append(requested) + if options is not None: + request["transaction_options"] = options - http = _make_requests_session( - [_make_response(content=rsp_pb._pb.SerializeToString())] - ) - client_info = _make_client_info() - client = mock.Mock( - _http=http, - _base_url="test.invalid", - _client_info=client_info, - spec=["_http", "_base_url", "_client_info"], - ) - ds_api = self._make_one(client) + kwargs = _retry_timeout_kw(retry, timeout, http) - request = {"project_id": project, "keys": before_key_pbs} - kwargs = _make_retry_timeout_kwargs(retry, timeout, http) + response = ds_api.begin_transaction(request=request, **kwargs) - response = ds_api.reserve_ids(request=request, **kwargs) + # Check the result and verify the callers. + assert response == rsp_pb._pb - self.assertEqual(response, rsp_pb._pb) + uri = _build_expected_url(client._base_url, project, "beginTransaction") + request = _verify_protobuf_call( + http, + uri, + datastore_pb2.BeginTransactionRequest(), + retry=retry, + timeout=timeout, + ) - uri = _build_expected_url(client._base_url, project, "reserveIds") - request = _verify_protobuf_call( - http, uri, datastore_pb2.AllocateIdsRequest(), retry=retry, timeout=timeout, - ) - self.assertEqual(len(request.keys), len(before_key_pbs)) - for key_before, key_after in zip(before_key_pbs, request.keys): - self.assertEqual(key_before, key_after) - def test_reserve_ids_empty(self): - self._reserve_ids_helper() +def test_api_begin_transaction_wo_options(): + _begin_transaction_helper() + + +def test_api_begin_transaction_w_options(): + from google.cloud.datastore_v1.types import TransactionOptions + + read_only = TransactionOptions.ReadOnly._meta.pb() + options = TransactionOptions(read_only=read_only) + _begin_transaction_helper(options=options) + + +def test_api_begin_transaction_w_retry(): + retry = mock.MagicMock() + _begin_transaction_helper(retry=retry) + + +def test_api_begin_transaction_w_timeout(): + timeout = 5.0 + _begin_transaction_helper(timeout=timeout) + + +def _commit_helper(transaction=None, retry=None, timeout=None): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore.helpers import _new_value_pb + + project = "PROJECT" + key_pb = _make_key_pb(project) + rsp_pb = datastore_pb2.CommitResponse() + req_pb = datastore_pb2.CommitRequest() + mutation = req_pb._pb.mutations.add() + insert = mutation.upsert + insert.key.CopyFrom(key_pb._pb) + value_pb = _new_value_pb(insert, "foo") + value_pb.string_value = u"Foo" - def test_reserve_ids_non_empty(self): - self._reserve_ids_helper(count=2) + http = _make_requests_session( + [_make_response(content=rsp_pb._pb.SerializeToString())] + ) + client_info = _make_client_info() + client = mock.Mock( + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], + ) - def test_reserve_ids_w_retry(self): - retry = mock.MagicMock() - self._reserve_ids_helper(retry=retry) + rq_class = datastore_pb2.CommitRequest + ds_api = _make_http_datastore_api(client) - def test_reserve_ids_w_timeout(self): - timeout = 5.0 - self._reserve_ids_helper(timeout=timeout) + request = {"project_id": project, "mutations": [mutation]} + if transaction is not None: + request["transaction"] = transaction + mode = request["mode"] = rq_class.Mode.TRANSACTIONAL + else: + mode = request["mode"] = rq_class.Mode.NON_TRANSACTIONAL + + kwargs = _retry_timeout_kw(retry, timeout, http) + + result = ds_api.commit(request=request, **kwargs) + + assert result == rsp_pb._pb + + uri = _build_expected_url(client._base_url, project, "commit") + request = _verify_protobuf_call( + http, uri, rq_class(), retry=retry, timeout=timeout, + ) + assert list(request.mutations) == [mutation] + assert request.mode == mode + + if transaction is not None: + assert request.transaction == transaction + else: + assert request.transaction == b"" + + +def test_api_commit_wo_transaction(): + _commit_helper() + + +def test_api_commit_w_transaction(): + transaction = b"xact" + + _commit_helper(transaction=transaction) + + +def test_api_commit_w_retry(): + retry = mock.MagicMock() + _commit_helper(retry=retry) + + +def test_api_commit_w_timeout(): + timeout = 5.0 + _commit_helper(timeout=timeout) + + +def _rollback_helper(retry=None, timeout=None): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + + project = "PROJECT" + transaction = b"xact" + rsp_pb = datastore_pb2.RollbackResponse() + + # Create mock HTTP and client with response. + http = _make_requests_session( + [_make_response(content=rsp_pb._pb.SerializeToString())] + ) + client_info = _make_client_info() + client = mock.Mock( + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], + ) + + # Make request. + ds_api = _make_http_datastore_api(client) + request = {"project_id": project, "transaction": transaction} + kwargs = _retry_timeout_kw(retry, timeout, http) + + response = ds_api.rollback(request=request, **kwargs) + + # Check the result and verify the callers. + assert response == rsp_pb._pb + + uri = _build_expected_url(client._base_url, project, "rollback") + request = _verify_protobuf_call( + http, uri, datastore_pb2.RollbackRequest(), retry=retry, timeout=timeout, + ) + assert request.transaction == transaction + + +def test_api_rollback_ok(): + _rollback_helper() + + +def test_api_rollback_w_retry(): + retry = mock.MagicMock() + _rollback_helper(retry=retry) + + +def test_api_rollback_w_timeout(): + timeout = 5.0 + _rollback_helper(timeout=timeout) + + +def _allocate_ids_helper(count=0, retry=None, timeout=None): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + + project = "PROJECT" + before_key_pbs = [] + after_key_pbs = [] + rsp_pb = datastore_pb2.AllocateIdsResponse() + + for i_count in range(count): + requested = _make_key_pb(project, id_=None) + before_key_pbs.append(requested) + allocated = _make_key_pb(project, id_=i_count) + after_key_pbs.append(allocated) + rsp_pb._pb.keys.add().CopyFrom(allocated._pb) + + http = _make_requests_session( + [_make_response(content=rsp_pb._pb.SerializeToString())] + ) + client_info = _make_client_info() + client = mock.Mock( + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], + ) + ds_api = _make_http_datastore_api(client) + + request = {"project_id": project, "keys": before_key_pbs} + kwargs = _retry_timeout_kw(retry, timeout, http) + + response = ds_api.allocate_ids(request=request, **kwargs) -def _make_response(status=client.OK, content=b"", headers={}): + assert response == rsp_pb._pb + assert list(response.keys) == [i._pb for i in after_key_pbs] + + uri = _build_expected_url(client._base_url, project, "allocateIds") + request = _verify_protobuf_call( + http, uri, datastore_pb2.AllocateIdsRequest(), retry=retry, timeout=timeout, + ) + assert len(request.keys) == len(before_key_pbs) + for key_before, key_after in zip(before_key_pbs, request.keys): + assert key_before == key_after + + +def test_api_allocate_ids_empty(): + _allocate_ids_helper() + + +def test_api_allocate_ids_non_empty(): + _allocate_ids_helper(count=2) + + +def test_api_allocate_ids_w_retry(): + retry = mock.MagicMock() + _allocate_ids_helper(retry=retry) + + +def test_api_allocate_ids_w_timeout(): + timeout = 5.0 + _allocate_ids_helper(timeout=timeout) + + +def _reserve_ids_helper(count=0, retry=None, timeout=None): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + + project = "PROJECT" + before_key_pbs = [] + rsp_pb = datastore_pb2.ReserveIdsResponse() + + for i_count in range(count): + requested = _make_key_pb(project, id_=i_count) + before_key_pbs.append(requested) + + http = _make_requests_session( + [_make_response(content=rsp_pb._pb.SerializeToString())] + ) + client_info = _make_client_info() + client = mock.Mock( + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], + ) + ds_api = _make_http_datastore_api(client) + + request = {"project_id": project, "keys": before_key_pbs} + kwargs = _retry_timeout_kw(retry, timeout, http) + + response = ds_api.reserve_ids(request=request, **kwargs) + + assert response == rsp_pb._pb + + uri = _build_expected_url(client._base_url, project, "reserveIds") + request = _verify_protobuf_call( + http, uri, datastore_pb2.AllocateIdsRequest(), retry=retry, timeout=timeout, + ) + assert len(request.keys) == len(before_key_pbs) + for key_before, key_after in zip(before_key_pbs, request.keys): + assert key_before == key_after + + +def test_api_reserve_ids_empty(): + _reserve_ids_helper() + + +def test_api_reserve_ids_non_empty(): + _reserve_ids_helper(count=2) + + +def test_api_reserve_ids_w_retry(): + retry = mock.MagicMock() + _reserve_ids_helper(retry=retry) + + +def test_api_reserve_ids_w_timeout(): + timeout = 5.0 + _reserve_ids_helper(timeout=timeout) + + +def _make_http_datastore_api(*args, **kwargs): + from google.cloud.datastore._http import HTTPDatastoreAPI + + return HTTPDatastoreAPI(*args, **kwargs) + + +def _make_response(status=http.client.OK, content=b"", headers={}): response = requests.Response() response.status_code = status response._content = content @@ -906,7 +924,7 @@ def _verify_protobuf_call(http, expected_url, pb, retry=None, timeout=None): return pb -def _make_retry_timeout_kwargs(retry, timeout, http=None): +def _retry_timeout_kw(retry, timeout, http=None): kwargs = {} if retry is not None: @@ -918,3 +936,9 @@ def _make_retry_timeout_kwargs(retry, timeout, http=None): kwargs["timeout"] = timeout return kwargs + + +class Foo: + def __init__(self, bar=None, baz=None): + self.bar = bar + self.baz = baz diff --git a/packages/google-cloud-datastore/tests/unit/test_batch.py b/packages/google-cloud-datastore/tests/unit/test_batch.py index ead00623c16c..fffbefa2b2d9 100644 --- a/packages/google-cloud-datastore/tests/unit/test_batch.py +++ b/packages/google-cloud-datastore/tests/unit/test_batch.py @@ -12,469 +12,487 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest - import mock +import pytest -class TestBatch(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.datastore.batch import Batch - - return Batch - - def _make_one(self, client): - return self._get_target_class()(client) - - def test_ctor(self): - project = "PROJECT" - namespace = "NAMESPACE" - client = _Client(project, namespace=namespace) - batch = self._make_one(client) - - self.assertEqual(batch.project, project) - self.assertIs(batch._client, client) - self.assertEqual(batch.namespace, namespace) - self.assertIsNone(batch._id) - self.assertEqual(batch._status, batch._INITIAL) - self.assertEqual(batch._mutations, []) - self.assertEqual(batch._partial_key_entities, []) - - def test_current(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - - project = "PROJECT" - client = _Client(project) - batch1 = self._make_one(client) - batch2 = self._make_one(client) - self.assertIsNone(batch1.current()) - self.assertIsNone(batch2.current()) - with batch1: - self.assertIs(batch1.current(), batch1) - self.assertIs(batch2.current(), batch1) - with batch2: - self.assertIs(batch1.current(), batch2) - self.assertIs(batch2.current(), batch2) - self.assertIs(batch1.current(), batch1) - self.assertIs(batch2.current(), batch1) - self.assertIsNone(batch1.current()) - self.assertIsNone(batch2.current()) - - commit_method = client._datastore_api.commit - self.assertEqual(commit_method.call_count, 2) - mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL - commit_method.assert_called_with( - request={ - "project_id": project, - "mode": mode, - "mutations": [], - "transaction": None, - } - ) - - def test_put_entity_wo_key(self): - project = "PROJECT" - client = _Client(project) - batch = self._make_one(client) +def _make_batch(client): + from google.cloud.datastore.batch import Batch - batch.begin() - self.assertRaises(ValueError, batch.put, _Entity()) + return Batch(client) - def test_put_entity_wrong_status(self): - project = "PROJECT" - client = _Client(project) - batch = self._make_one(client) - entity = _Entity() - entity.key = _Key("OTHER") - self.assertEqual(batch._status, batch._INITIAL) - self.assertRaises(ValueError, batch.put, entity) +def test_batch_ctor(): + project = "PROJECT" + namespace = "NAMESPACE" + client = _Client(project, namespace=namespace) + batch = _make_batch(client) - def test_put_entity_w_key_wrong_project(self): - project = "PROJECT" - client = _Client(project) - batch = self._make_one(client) - entity = _Entity() - entity.key = _Key("OTHER") + assert batch.project == project + assert batch._client is client + assert batch.namespace == namespace + assert batch._id is None + assert batch._status == batch._INITIAL + assert batch._mutations == [] + assert batch._partial_key_entities == [] - batch.begin() - self.assertRaises(ValueError, batch.put, entity) - def test_put_entity_w_partial_key(self): - project = "PROJECT" - properties = {"foo": "bar"} - client = _Client(project) - batch = self._make_one(client) - entity = _Entity(properties) - key = entity.key = _Key(project) - key._id = None +def test_batch_current(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 - batch.begin() + project = "PROJECT" + client = _Client(project) + batch1 = _make_batch(client) + batch2 = _make_batch(client) + + assert batch1.current() is None + assert batch2.current() is None + + with batch1: + assert batch1.current() is batch1 + assert batch2.current() is batch1 + + with batch2: + assert batch1.current() is batch2 + assert batch2.current() is batch2 + + assert batch1.current() is batch1 + assert batch2.current() is batch1 + + assert batch1.current() is None + assert batch2.current() is None + + commit_method = client._datastore_api.commit + assert commit_method.call_count == 2 + mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL + commit_method.assert_called_with( + request={ + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": None, + } + ) + + +def test_batch_put_w_entity_wo_key(): + project = "PROJECT" + client = _Client(project) + batch = _make_batch(client) + entity = _Entity() + + batch.begin() + with pytest.raises(ValueError): batch.put(entity) - mutated_entity = _mutated_pb(self, batch.mutations, "insert") - self.assertEqual(mutated_entity.key, key._key) - self.assertEqual(batch._partial_key_entities, [entity]) - def test_put_entity_w_completed_key(self): - project = "PROJECT" - properties = {"foo": "bar", "baz": "qux", "spam": [1, 2, 3], "frotz": []} - client = _Client(project) - batch = self._make_one(client) - entity = _Entity(properties) - entity.exclude_from_indexes = ("baz", "spam") - key = entity.key = _Key(project) +def test_batch_put_w_wrong_status(): + project = "PROJECT" + client = _Client(project) + batch = _make_batch(client) + entity = _Entity() + entity.key = _Key(project=project) - batch.begin() + assert batch._status == batch._INITIAL + with pytest.raises(ValueError): batch.put(entity) - mutated_entity = _mutated_pb(self, batch.mutations, "upsert") - self.assertEqual(mutated_entity.key, key._key) - - prop_dict = dict(mutated_entity.properties.items()) - self.assertEqual(len(prop_dict), 4) - self.assertFalse(prop_dict["foo"].exclude_from_indexes) - self.assertTrue(prop_dict["baz"].exclude_from_indexes) - self.assertFalse(prop_dict["spam"].exclude_from_indexes) - spam_values = prop_dict["spam"].array_value.values - self.assertTrue(spam_values[0].exclude_from_indexes) - self.assertTrue(spam_values[1].exclude_from_indexes) - self.assertTrue(spam_values[2].exclude_from_indexes) - self.assertTrue("frotz" in prop_dict) - - def test_delete_wrong_status(self): - project = "PROJECT" - client = _Client(project) - batch = self._make_one(client) - key = _Key(project) - key._id = None - - self.assertEqual(batch._status, batch._INITIAL) - self.assertRaises(ValueError, batch.delete, key) - - def test_delete_w_partial_key(self): - project = "PROJECT" - client = _Client(project) - batch = self._make_one(client) - key = _Key(project) - key._id = None - batch.begin() - self.assertRaises(ValueError, batch.delete, key) +def test_batch_put_w_key_wrong_project(): + project = "PROJECT" + client = _Client(project) + batch = _make_batch(client) + entity = _Entity() + entity.key = _Key(project="OTHER") - def test_delete_w_key_wrong_project(self): - project = "PROJECT" - client = _Client(project) - batch = self._make_one(client) - key = _Key("OTHER") + batch.begin() + with pytest.raises(ValueError): + batch.put(entity) - batch.begin() - self.assertRaises(ValueError, batch.delete, key) - def test_delete_w_completed_key(self): - project = "PROJECT" - client = _Client(project) - batch = self._make_one(client) - key = _Key(project) +def test_batch_put_w_entity_w_partial_key(): + project = "PROJECT" + properties = {"foo": "bar"} + client = _Client(project) + batch = _make_batch(client) + entity = _Entity(properties) + key = entity.key = _Key(project) + key._id = None - batch.begin() + batch.begin() + batch.put(entity) + + mutated_entity = _mutated_pb(batch.mutations, "insert") + assert mutated_entity.key == key._key + assert batch._partial_key_entities == [entity] + + +def test_batch_put_w_entity_w_completed_key(): + project = "PROJECT" + properties = {"foo": "bar", "baz": "qux", "spam": [1, 2, 3], "frotz": []} + client = _Client(project) + batch = _make_batch(client) + entity = _Entity(properties) + entity.exclude_from_indexes = ("baz", "spam") + key = entity.key = _Key(project) + + batch.begin() + batch.put(entity) + + mutated_entity = _mutated_pb(batch.mutations, "upsert") + assert mutated_entity.key == key._key + + prop_dict = dict(mutated_entity.properties.items()) + assert len(prop_dict) == 4 + assert not prop_dict["foo"].exclude_from_indexes + assert prop_dict["baz"].exclude_from_indexes + assert not prop_dict["spam"].exclude_from_indexes + + spam_values = prop_dict["spam"].array_value.values + assert spam_values[0].exclude_from_indexes + assert spam_values[1].exclude_from_indexes + assert spam_values[2].exclude_from_indexes + assert "frotz" in prop_dict + + +def test_batch_delete_w_wrong_status(): + project = "PROJECT" + client = _Client(project) + batch = _make_batch(client) + key = _Key(project=project) + key._id = None + + assert batch._status == batch._INITIAL + + with pytest.raises(ValueError): batch.delete(key) - mutated_key = _mutated_pb(self, batch.mutations, "delete") - self.assertEqual(mutated_key, key._key) - def test_begin(self): - project = "PROJECT" - client = _Client(project, None) - batch = self._make_one(client) - self.assertEqual(batch._status, batch._INITIAL) - batch.begin() - self.assertEqual(batch._status, batch._IN_PROGRESS) - - def test_begin_fail(self): - project = "PROJECT" - client = _Client(project, None) - batch = self._make_one(client) - batch._status = batch._IN_PROGRESS - with self.assertRaises(ValueError): - batch.begin() - - def test_rollback(self): - project = "PROJECT" - client = _Client(project, None) - batch = self._make_one(client) +def test_batch_delete_w_partial_key(): + project = "PROJECT" + client = _Client(project) + batch = _make_batch(client) + key = _Key(project=project) + key._id = None + + batch.begin() + + with pytest.raises(ValueError): + batch.delete(key) + + +def test_batch_delete_w_key_wrong_project(): + project = "PROJECT" + client = _Client(project) + batch = _make_batch(client) + key = _Key(project="OTHER") + + batch.begin() + + with pytest.raises(ValueError): + batch.delete(key) + + +def test_batch_delete_w_completed_key(): + project = "PROJECT" + client = _Client(project) + batch = _make_batch(client) + key = _Key(project) + + batch.begin() + batch.delete(key) + + mutated_key = _mutated_pb(batch.mutations, "delete") + assert mutated_key == key._key + + +def test_batch_begin_w_wrong_status(): + project = "PROJECT" + client = _Client(project, None) + batch = _make_batch(client) + batch._status = batch._IN_PROGRESS + + with pytest.raises(ValueError): batch.begin() - self.assertEqual(batch._status, batch._IN_PROGRESS) + + +def test_batch_begin(): + project = "PROJECT" + client = _Client(project, None) + batch = _make_batch(client) + assert batch._status == batch._INITIAL + + batch.begin() + + assert batch._status == batch._IN_PROGRESS + + +def test_batch_rollback_w_wrong_status(): + project = "PROJECT" + client = _Client(project, None) + batch = _make_batch(client) + assert batch._status == batch._INITIAL + + with pytest.raises(ValueError): batch.rollback() - self.assertEqual(batch._status, batch._ABORTED) - def test_rollback_wrong_status(self): - project = "PROJECT" - client = _Client(project, None) - batch = self._make_one(client) - self.assertEqual(batch._status, batch._INITIAL) - self.assertRaises(ValueError, batch.rollback) +def test_batch_rollback(): + project = "PROJECT" + client = _Client(project, None) + batch = _make_batch(client) + batch.begin() + assert batch._status == batch._IN_PROGRESS - def test_commit(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 + batch.rollback() - project = "PROJECT" - client = _Client(project) - batch = self._make_one(client) + assert batch._status == batch._ABORTED - self.assertEqual(batch._status, batch._INITIAL) - batch.begin() - self.assertEqual(batch._status, batch._IN_PROGRESS) - batch.commit() - self.assertEqual(batch._status, batch._FINISHED) - - commit_method = client._datastore_api.commit - mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL - commit_method.assert_called_with( - request={ - "project_id": project, - "mode": mode, - "mutations": [], - "transaction": None, - } - ) - - def test_commit_w_timeout(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - - project = "PROJECT" - client = _Client(project) - batch = self._make_one(client) - timeout = 100000 - - self.assertEqual(batch._status, batch._INITIAL) - batch.begin() - self.assertEqual(batch._status, batch._IN_PROGRESS) - batch.commit(timeout=timeout) - self.assertEqual(batch._status, batch._FINISHED) - - commit_method = client._datastore_api.commit - mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL - commit_method.assert_called_with( - request={ - "project_id": project, - "mode": mode, - "mutations": [], - "transaction": None, - }, - timeout=timeout, - ) - - def test_commit_w_retry(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - - project = "PROJECT" - client = _Client(project) - batch = self._make_one(client) - retry = mock.Mock() - - self.assertEqual(batch._status, batch._INITIAL) - batch.begin() - self.assertEqual(batch._status, batch._IN_PROGRESS) - batch.commit(retry=retry) - self.assertEqual(batch._status, batch._FINISHED) - - commit_method = client._datastore_api.commit - mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL - commit_method.assert_called_with( - request={ - "project_id": project, - "mode": mode, - "mutations": [], - "transaction": None, - }, - retry=retry, - ) - - def test_commit_wrong_status(self): - project = "PROJECT" - client = _Client(project) - batch = self._make_one(client) - - self.assertEqual(batch._status, batch._INITIAL) - self.assertRaises(ValueError, batch.commit) - - def test_commit_w_partial_key_entities(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - - project = "PROJECT" - new_id = 1234 - ds_api = _make_datastore_api(new_id) - client = _Client(project, datastore_api=ds_api) - batch = self._make_one(client) - entity = _Entity({}) - key = entity.key = _Key(project) - key._id = None - batch._partial_key_entities.append(entity) - - self.assertEqual(batch._status, batch._INITIAL) - batch.begin() - self.assertEqual(batch._status, batch._IN_PROGRESS) + +def test_batch_commit_wrong_status(): + project = "PROJECT" + client = _Client(project) + batch = _make_batch(client) + assert batch._status == batch._INITIAL + + with pytest.raises(ValueError): batch.commit() - self.assertEqual(batch._status, batch._FINISHED) - - mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL - ds_api.commit.assert_called_once_with( - request={ - "project_id": project, - "mode": mode, - "mutations": [], - "transaction": None, - } - ) - self.assertFalse(entity.key.is_partial) - self.assertEqual(entity.key._id, new_id) - - def test_as_context_mgr_wo_error(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - - project = "PROJECT" - properties = {"foo": "bar"} - entity = _Entity(properties) - key = entity.key = _Key(project) - - client = _Client(project) - self.assertEqual(list(client._batches), []) - - with self._make_one(client) as batch: - self.assertEqual(list(client._batches), [batch]) + + +def _batch_commit_helper(timeout=None, retry=None): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + + project = "PROJECT" + client = _Client(project) + batch = _make_batch(client) + assert batch._status == batch._INITIAL + + batch.begin() + assert batch._status == batch._IN_PROGRESS + + kwargs = {} + + if timeout is not None: + kwargs["timeout"] = timeout + + if retry is not None: + kwargs["retry"] = retry + + batch.commit(**kwargs) + assert batch._status == batch._FINISHED + + commit_method = client._datastore_api.commit + mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL + commit_method.assert_called_with( + request={ + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": None, + }, + **kwargs + ) + + +def test_batch_commit(): + _batch_commit_helper() + + +def test_batch_commit_w_timeout(): + timeout = 100000 + _batch_commit_helper(timeout=timeout) + + +def test_batch_commit_w_retry(): + retry = mock.Mock(spec=[]) + _batch_commit_helper(retry=retry) + + +def test_batch_commit_w_partial_key_entity(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + + project = "PROJECT" + new_id = 1234 + ds_api = _make_datastore_api(new_id) + client = _Client(project, datastore_api=ds_api) + batch = _make_batch(client) + entity = _Entity({}) + key = entity.key = _Key(project) + key._id = None + batch._partial_key_entities.append(entity) + assert batch._status == batch._INITIAL + + batch.begin() + assert batch._status == batch._IN_PROGRESS + + batch.commit() + assert batch._status == batch._FINISHED + + mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL + ds_api.commit.assert_called_once_with( + request={ + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": None, + } + ) + assert not entity.key.is_partial + assert entity.key._id == new_id + + +def test_batch_as_context_mgr_wo_error(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + + project = "PROJECT" + properties = {"foo": "bar"} + entity = _Entity(properties) + key = entity.key = _Key(project) + + client = _Client(project) + assert list(client._batches) == [] + + with _make_batch(client) as batch: + assert list(client._batches) == [batch] + batch.put(entity) + + assert list(client._batches) == [] + + mutated_entity = _mutated_pb(batch.mutations, "upsert") + assert mutated_entity.key == key._key + + commit_method = client._datastore_api.commit + mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL + commit_method.assert_called_with( + request={ + "project_id": project, + "mode": mode, + "mutations": batch.mutations, + "transaction": None, + } + ) + + +def test_batch_as_context_mgr_nested(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + + project = "PROJECT" + properties = {"foo": "bar"} + entity1 = _Entity(properties) + key1 = entity1.key = _Key(project) + entity2 = _Entity(properties) + key2 = entity2.key = _Key(project) + + client = _Client(project) + assert list(client._batches) == [] + + with _make_batch(client) as batch1: + assert list(client._batches) == [batch1] + batch1.put(entity1) + + with _make_batch(client) as batch2: + assert list(client._batches) == [batch2, batch1] + batch2.put(entity2) + + assert list(client._batches) == [batch1] + + assert list(client._batches) == [] + + mutated_entity1 = _mutated_pb(batch1.mutations, "upsert") + assert mutated_entity1.key == key1._key + + mutated_entity2 = _mutated_pb(batch2.mutations, "upsert") + assert mutated_entity2.key == key2._key + + commit_method = client._datastore_api.commit + assert commit_method.call_count == 2 + + mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL + commit_method.assert_called_with( + request={ + "project_id": project, + "mode": mode, + "mutations": batch1.mutations, + "transaction": None, + } + ) + commit_method.assert_called_with( + request={ + "project_id": project, + "mode": mode, + "mutations": batch2.mutations, + "transaction": None, + } + ) + + +def test_batch_as_context_mgr_w_error(): + project = "PROJECT" + properties = {"foo": "bar"} + entity = _Entity(properties) + key = entity.key = _Key(project) + + client = _Client(project) + assert list(client._batches) == [] + + try: + with _make_batch(client) as batch: + assert list(client._batches) == [batch] batch.put(entity) - self.assertEqual(list(client._batches), []) - - mutated_entity = _mutated_pb(self, batch.mutations, "upsert") - self.assertEqual(mutated_entity.key, key._key) - commit_method = client._datastore_api.commit - mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL - commit_method.assert_called_with( - request={ - "project_id": project, - "mode": mode, - "mutations": batch.mutations, - "transaction": None, - } - ) - - def test_as_context_mgr_nested(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - - project = "PROJECT" - properties = {"foo": "bar"} - entity1 = _Entity(properties) - key1 = entity1.key = _Key(project) - entity2 = _Entity(properties) - key2 = entity2.key = _Key(project) - - client = _Client(project) - self.assertEqual(list(client._batches), []) - - with self._make_one(client) as batch1: - self.assertEqual(list(client._batches), [batch1]) - batch1.put(entity1) - with self._make_one(client) as batch2: - self.assertEqual(list(client._batches), [batch2, batch1]) - batch2.put(entity2) - - self.assertEqual(list(client._batches), [batch1]) - - self.assertEqual(list(client._batches), []) - - mutated_entity1 = _mutated_pb(self, batch1.mutations, "upsert") - self.assertEqual(mutated_entity1.key, key1._key) - - mutated_entity2 = _mutated_pb(self, batch2.mutations, "upsert") - self.assertEqual(mutated_entity2.key, key2._key) - - commit_method = client._datastore_api.commit - self.assertEqual(commit_method.call_count, 2) - mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL - commit_method.assert_called_with( - request={ - "project_id": project, - "mode": mode, - "mutations": batch1.mutations, - "transaction": None, - } - ) - commit_method.assert_called_with( - request={ - "project_id": project, - "mode": mode, - "mutations": batch2.mutations, - "transaction": None, - } - ) - - def test_as_context_mgr_w_error(self): - project = "PROJECT" - properties = {"foo": "bar"} - entity = _Entity(properties) - key = entity.key = _Key(project) - - client = _Client(project) - self.assertEqual(list(client._batches), []) - - try: - with self._make_one(client) as batch: - self.assertEqual(list(client._batches), [batch]) - batch.put(entity) - raise ValueError("testing") - except ValueError: - pass + raise ValueError("testing") - self.assertEqual(list(client._batches), []) + except ValueError: + pass - mutated_entity = _mutated_pb(self, batch.mutations, "upsert") - self.assertEqual(mutated_entity.key, key._key) + assert list(client._batches) == [] - def test_as_context_mgr_enter_fails(self): - klass = self._get_target_class() + mutated_entity = _mutated_pb(batch.mutations, "upsert") + assert mutated_entity.key == key._key - class FailedBegin(klass): - def begin(self): - raise RuntimeError + client._datastore_api.commit.assert_not_called() - client = _Client(None, None) - self.assertEqual(client._batches, []) - batch = FailedBegin(client) - with self.assertRaises(RuntimeError): - # The context manager will never be entered because - # of the failure. - with batch: # pragma: NO COVER - pass - # Make sure no batch was added. - self.assertEqual(client._batches, []) +def test_batch_as_context_mgr_w_enter_fails(): + from google.cloud.datastore.batch import Batch + class FailedBegin(Batch): + def begin(self): + raise RuntimeError -class Test__parse_commit_response(unittest.TestCase): - def _call_fut(self, commit_response_pb): - from google.cloud.datastore.batch import _parse_commit_response + client = _Client(None, None) + assert list(client._batches) == [] - return _parse_commit_response(commit_response_pb) + batch = FailedBegin(client) - def test_it(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore_v1.types import entity as entity_pb2 + with pytest.raises(RuntimeError): + # The context manager will never be entered because + # of the failure. + with batch: # pragma: NO COVER + pass + + # Make sure no batch was added. + assert list(client._batches) == [] - index_updates = 1337 - keys = [ - entity_pb2.Key(path=[entity_pb2.Key.PathElement(kind="Foo", id=1234)]), - entity_pb2.Key(path=[entity_pb2.Key.PathElement(kind="Bar", name="baz")]), - ] - response = datastore_pb2.CommitResponse( - mutation_results=[datastore_pb2.MutationResult(key=key) for key in keys], - index_updates=index_updates, - ) - result = self._call_fut(response) - self.assertEqual(result, (index_updates, [i._pb for i in keys])) + +def test__parse_commit_response(): + from google.cloud.datastore.batch import _parse_commit_response + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 + + index_updates = 1337 + keys = [ + entity_pb2.Key(path=[entity_pb2.Key.PathElement(kind="Foo", id=1234)]), + entity_pb2.Key(path=[entity_pb2.Key.PathElement(kind="Bar", name="baz")]), + ] + response = datastore_pb2.CommitResponse( + mutation_results=[datastore_pb2.MutationResult(key=key) for key in keys], + index_updates=index_updates, + ) + + result = _parse_commit_response(response) + + assert result == (index_updates, [i._pb for i in keys]) class _Entity(dict): @@ -539,18 +557,14 @@ def current_batch(self): return self._batches[0] -def _assert_num_mutations(test_case, mutation_pb_list, num_mutations): - test_case.assertEqual(len(mutation_pb_list), num_mutations) - - -def _mutated_pb(test_case, mutation_pb_list, mutation_type): +def _mutated_pb(mutation_pb_list, mutation_type): # Make sure there is only one mutation. - _assert_num_mutations(test_case, mutation_pb_list, 1) + assert len(mutation_pb_list) == 1 # We grab the only mutation. mutated_pb = mutation_pb_list[0] # Then check if it is the correct type. - test_case.assertEqual(mutated_pb._pb.WhichOneof("operation"), mutation_type) + assert mutated_pb._pb.WhichOneof("operation") == mutation_type return getattr(mutated_pb, mutation_type) diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index f4c27cf421fd..7f38a5ad61e7 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -12,1483 +12,1477 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest - import mock +import pytest +PROJECT = "dummy-project-123" -def _make_credentials(): - import google.auth.credentials - return mock.Mock(spec=google.auth.credentials.Credentials) +def test__get_gcd_project_wo_value_set(): + from google.cloud.datastore.client import _get_gcd_project + environ = {} -def _make_entity_pb(project, kind, integer_id, name=None, str_val=None): - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.helpers import _new_value_pb + with mock.patch("os.getenv", new=environ.get): + project = _get_gcd_project() + assert project is None - entity_pb = entity_pb2.Entity() - entity_pb.key.partition_id.project_id = project - path_element = entity_pb._pb.key.path.add() - path_element.kind = kind - path_element.id = integer_id - if name is not None and str_val is not None: - value_pb = _new_value_pb(entity_pb, name) - value_pb.string_value = str_val - return entity_pb +def test__get_gcd_project_w_value_set(): + from google.cloud.datastore.client import _get_gcd_project + from google.cloud.datastore.client import DATASTORE_DATASET + environ = {DATASTORE_DATASET: PROJECT} -class Test__get_gcd_project(unittest.TestCase): - def _call_fut(self): - from google.cloud.datastore.client import _get_gcd_project + with mock.patch("os.getenv", new=environ.get): + project = _get_gcd_project() + assert project == PROJECT - return _get_gcd_project() - def test_no_value(self): - environ = {} - with mock.patch("os.getenv", new=environ.get): - project = self._call_fut() - self.assertIsNone(project) +def _determine_default_helper(gcd=None, fallback=None, project_called=None): + from google.cloud.datastore.client import _determine_default_project - def test_value_set(self): - from google.cloud.datastore.client import DATASTORE_DATASET + _callers = [] - MOCK_PROJECT = object() - environ = {DATASTORE_DATASET: MOCK_PROJECT} - with mock.patch("os.getenv", new=environ.get): - project = self._call_fut() - self.assertEqual(project, MOCK_PROJECT) + def gcd_mock(): + _callers.append("gcd_mock") + return gcd + def fallback_mock(project=None): + _callers.append(("fallback_mock", project)) + return fallback -class Test__determine_default_project(unittest.TestCase): - def _call_fut(self, project=None): - from google.cloud.datastore.client import _determine_default_project + patch = mock.patch.multiple( + "google.cloud.datastore.client", + _get_gcd_project=gcd_mock, + _base_default_project=fallback_mock, + ) + with patch: + returned_project = _determine_default_project(project_called) - return _determine_default_project(project=project) + return returned_project, _callers - def _determine_default_helper(self, gcd=None, fallback=None, project_called=None): - _callers = [] - def gcd_mock(): - _callers.append("gcd_mock") - return gcd +def test__determine_default_project_wo_value(): + project, callers = _determine_default_helper() + assert project is None + assert callers == ["gcd_mock", ("fallback_mock", None)] - def fallback_mock(project=None): - _callers.append(("fallback_mock", project)) - return fallback - patch = mock.patch.multiple( - "google.cloud.datastore.client", - _get_gcd_project=gcd_mock, - _base_default_project=fallback_mock, - ) - with patch: - returned_project = self._call_fut(project_called) - - return returned_project, _callers - - def test_no_value(self): - project, callers = self._determine_default_helper() - self.assertIsNone(project) - self.assertEqual(callers, ["gcd_mock", ("fallback_mock", None)]) - - def test_explicit(self): - PROJECT = object() - project, callers = self._determine_default_helper(project_called=PROJECT) - self.assertEqual(project, PROJECT) - self.assertEqual(callers, []) - - def test_gcd(self): - PROJECT = object() - project, callers = self._determine_default_helper(gcd=PROJECT) - self.assertEqual(project, PROJECT) - self.assertEqual(callers, ["gcd_mock"]) - - def test_fallback(self): - PROJECT = object() - project, callers = self._determine_default_helper(fallback=PROJECT) - self.assertEqual(project, PROJECT) - self.assertEqual(callers, ["gcd_mock", ("fallback_mock", None)]) - - -class TestClient(unittest.TestCase): - - PROJECT = "PROJECT" - - @staticmethod - def _get_target_class(): - from google.cloud.datastore.client import Client - - return Client - - def _make_one( - self, - project=PROJECT, - namespace=None, - credentials=None, - client_info=None, - client_options=None, - _http=None, - _use_grpc=None, - ): - return self._get_target_class()( - project=project, - namespace=namespace, - credentials=credentials, - client_info=client_info, - client_options=client_options, - _http=_http, - _use_grpc=_use_grpc, - ) +def test__determine_default_project_w_explicit(): + project, callers = _determine_default_helper(project_called=PROJECT) + assert project == PROJECT + assert callers == [] - def test_constructor_w_project_no_environ(self): - # Some environments (e.g. AppVeyor CI) run in GCE, so - # this test would fail artificially. - patch = mock.patch( - "google.cloud.datastore.client._base_default_project", return_value=None - ) - with patch: - self.assertRaises(EnvironmentError, self._make_one, None) - def test_constructor_w_implicit_inputs(self): - from google.cloud.datastore.client import _CLIENT_INFO - from google.cloud.datastore.client import _DATASTORE_BASE_URL +def test__determine_default_project_w_gcd(): + project, callers = _determine_default_helper(gcd=PROJECT) + assert project == PROJECT + assert callers == ["gcd_mock"] - klass = self._get_target_class() - other = "other" - creds = _make_credentials() - klass = self._get_target_class() - patch1 = mock.patch( - "google.cloud.datastore.client._determine_default_project", - return_value=other, - ) - patch2 = mock.patch("google.auth.default", return_value=(creds, None)) - - with patch1 as _determine_default_project: - with patch2 as default: - client = klass() - - self.assertEqual(client.project, other) - self.assertIsNone(client.namespace) - self.assertIs(client._credentials, creds) - self.assertIs(client._client_info, _CLIENT_INFO) - self.assertIsNone(client._http_internal) - self.assertIsNone(client._client_options) - self.assertEqual(client.base_url, _DATASTORE_BASE_URL) - - self.assertIsNone(client.current_batch) - self.assertIsNone(client.current_transaction) - - default.assert_called_once_with(scopes=klass.SCOPE,) - _determine_default_project.assert_called_once_with(None) - - def test_constructor_w_explicit_inputs(self): - from google.api_core.client_options import ClientOptions - - other = "other" - namespace = "namespace" - creds = _make_credentials() - client_info = mock.Mock() - client_options = ClientOptions("endpoint") - http = object() - client = self._make_one( - project=other, - namespace=namespace, - credentials=creds, - client_info=client_info, - client_options=client_options, - _http=http, - ) - self.assertEqual(client.project, other) - self.assertEqual(client.namespace, namespace) - self.assertIs(client._credentials, creds) - self.assertIs(client._client_info, client_info) - self.assertIs(client._http_internal, http) - self.assertIsNone(client.current_batch) - self.assertIs(client._base_url, "endpoint") - self.assertEqual(list(client._batch_stack), []) - - def test_constructor_use_grpc_default(self): - import google.cloud.datastore.client as MUT - - project = "PROJECT" - creds = _make_credentials() - http = object() - - with mock.patch.object(MUT, "_USE_GRPC", new=True): - client1 = self._make_one(project=project, credentials=creds, _http=http) - self.assertTrue(client1._use_grpc) - # Explicitly over-ride the environment. - client2 = self._make_one( - project=project, credentials=creds, _http=http, _use_grpc=False - ) - self.assertFalse(client2._use_grpc) - - with mock.patch.object(MUT, "_USE_GRPC", new=False): - client3 = self._make_one(project=project, credentials=creds, _http=http) - self.assertFalse(client3._use_grpc) - # Explicitly over-ride the environment. - client4 = self._make_one( - project=project, credentials=creds, _http=http, _use_grpc=True - ) - self.assertTrue(client4._use_grpc) - - def test_constructor_w_emulator_w_creds(self): - from google.cloud.datastore.client import DATASTORE_EMULATOR_HOST - - host = "localhost:1234" - fake_environ = {DATASTORE_EMULATOR_HOST: host} - project = "PROJECT" - creds = _make_credentials() - http = object() - - with mock.patch("os.environ", new=fake_environ): - with self.assertRaises(ValueError): - self._make_one(project=project, credentials=creds, _http=http) - - def test_constructor_w_emulator_wo_creds(self): - from google.auth.credentials import AnonymousCredentials - from google.cloud.datastore.client import DATASTORE_EMULATOR_HOST - - host = "localhost:1234" - fake_environ = {DATASTORE_EMULATOR_HOST: host} - project = "PROJECT" - http = object() - - with mock.patch("os.environ", new=fake_environ): - client = self._make_one(project=project, _http=http) - - self.assertEqual(client.base_url, "http://" + host) - self.assertIsInstance(client._credentials, AnonymousCredentials) - - def test_base_url_property(self): - from google.cloud.datastore.client import _DATASTORE_BASE_URL - from google.api_core.client_options import ClientOptions - - alternate_url = "https://alias.example.com/" - project = "PROJECT" - creds = _make_credentials() - http = object() - client_options = ClientOptions() - - client = self._make_one( - project=project, - credentials=creds, - _http=http, - client_options=client_options, - ) - self.assertEqual(client.base_url, _DATASTORE_BASE_URL) - client.base_url = alternate_url - self.assertEqual(client.base_url, alternate_url) - - def test_base_url_property_w_client_options(self): - alternate_url = "https://alias.example.com/" - project = "PROJECT" - creds = _make_credentials() - http = object() - client_options = {"api_endpoint": "endpoint"} - - client = self._make_one( - project=project, - credentials=creds, - _http=http, - client_options=client_options, - ) - self.assertEqual(client.base_url, "endpoint") - client.base_url = alternate_url - self.assertEqual(client.base_url, alternate_url) +def test__determine_default_project_w_fallback(): + project, callers = _determine_default_helper(fallback=PROJECT) + assert project == PROJECT + assert callers == ["gcd_mock", ("fallback_mock", None)] - def test__datastore_api_property_already_set(self): - client = self._make_one( - project="prahj-ekt", credentials=_make_credentials(), _use_grpc=True - ) - already = client._datastore_api_internal = object() - self.assertIs(client._datastore_api, already) - - def test__datastore_api_property_gapic(self): - client_info = mock.Mock() - client = self._make_one( - project="prahj-ekt", - credentials=_make_credentials(), - client_info=client_info, - _http=object(), - _use_grpc=True, - ) - self.assertIsNone(client._datastore_api_internal) - patch = mock.patch( - "google.cloud.datastore.client.make_datastore_api", - return_value=mock.sentinel.ds_api, +def _make_client( + project=PROJECT, + namespace=None, + credentials=None, + client_info=None, + client_options=None, + _http=None, + _use_grpc=None, +): + from google.cloud.datastore.client import Client + + return Client( + project=project, + namespace=namespace, + credentials=credentials, + client_info=client_info, + client_options=client_options, + _http=_http, + _use_grpc=_use_grpc, + ) + + +def test_client_ctor_w_project_no_environ(): + # Some environments (e.g. AppVeyor CI) run in GCE, so + # this test would fail artificially. + patch = mock.patch( + "google.cloud.datastore.client._base_default_project", return_value=None + ) + with patch: + with pytest.raises(EnvironmentError): + _make_client(project=None) + + +def test_client_ctor_w_implicit_inputs(): + from google.cloud.datastore.client import Client + from google.cloud.datastore.client import _CLIENT_INFO + from google.cloud.datastore.client import _DATASTORE_BASE_URL + + other = "other" + patch1 = mock.patch( + "google.cloud.datastore.client._determine_default_project", return_value=other, + ) + + creds = _make_credentials() + patch2 = mock.patch("google.auth.default", return_value=(creds, None)) + + with patch1 as _determine_default_project: + with patch2 as default: + client = Client() + + assert client.project == other + assert client.namespace is None + assert client._credentials is creds + assert client._client_info is _CLIENT_INFO + assert client._http_internal is None + assert client._client_options is None + assert client.base_url == _DATASTORE_BASE_URL + + assert client.current_batch is None + assert client.current_transaction is None + + default.assert_called_once_with(scopes=Client.SCOPE,) + _determine_default_project.assert_called_once_with(None) + + +def test_client_ctor_w_explicit_inputs(): + from google.api_core.client_options import ClientOptions + + other = "other" + namespace = "namespace" + creds = _make_credentials() + client_info = mock.Mock() + client_options = ClientOptions("endpoint") + http = object() + client = _make_client( + project=other, + namespace=namespace, + credentials=creds, + client_info=client_info, + client_options=client_options, + _http=http, + ) + assert client.project == other + assert client.namespace == namespace + assert client._credentials is creds + assert client._client_info is client_info + assert client._http_internal is http + assert client.current_batch is None + assert client._base_url == "endpoint" + assert list(client._batch_stack) == [] + + +def test_client_ctor_use_grpc_default(): + import google.cloud.datastore.client as MUT + + project = "PROJECT" + creds = _make_credentials() + http = object() + + with mock.patch.object(MUT, "_USE_GRPC", new=True): + client1 = _make_client(project=PROJECT, credentials=creds, _http=http) + assert client1._use_grpc + # Explicitly over-ride the environment. + client2 = _make_client( + project=project, credentials=creds, _http=http, _use_grpc=False ) - with patch as make_api: - ds_api = client._datastore_api - - self.assertIs(ds_api, mock.sentinel.ds_api) - self.assertIs(client._datastore_api_internal, mock.sentinel.ds_api) - make_api.assert_called_once_with(client) - - def test__datastore_api_property_http(self): - client_info = mock.Mock() - client = self._make_one( - project="prahj-ekt", - credentials=_make_credentials(), - client_info=client_info, - _http=object(), - _use_grpc=False, + assert not client2._use_grpc + + with mock.patch.object(MUT, "_USE_GRPC", new=False): + client3 = _make_client(project=PROJECT, credentials=creds, _http=http) + assert not client3._use_grpc + # Explicitly over-ride the environment. + client4 = _make_client( + project=project, credentials=creds, _http=http, _use_grpc=True ) + assert client4._use_grpc - self.assertIsNone(client._datastore_api_internal) - patch = mock.patch( - "google.cloud.datastore.client.HTTPDatastoreAPI", - return_value=mock.sentinel.ds_api, - ) - with patch as make_api: - ds_api = client._datastore_api - self.assertIs(ds_api, mock.sentinel.ds_api) - self.assertIs(client._datastore_api_internal, mock.sentinel.ds_api) - make_api.assert_called_once_with(client) +def test_client_ctor_w_emulator_w_creds(): + from google.cloud.datastore.client import DATASTORE_EMULATOR_HOST - def test__push_batch_and__pop_batch(self): - creds = _make_credentials() - client = self._make_one(credentials=creds) - batch = client.batch() - xact = client.transaction() - client._push_batch(batch) - self.assertEqual(list(client._batch_stack), [batch]) - self.assertIs(client.current_batch, batch) - self.assertIsNone(client.current_transaction) - client._push_batch(xact) - self.assertIs(client.current_batch, xact) - self.assertIs(client.current_transaction, xact) - # list(_LocalStack) returns in reverse order. - self.assertEqual(list(client._batch_stack), [xact, batch]) - self.assertIs(client._pop_batch(), xact) - self.assertEqual(list(client._batch_stack), [batch]) - self.assertIs(client._pop_batch(), batch) - self.assertEqual(list(client._batch_stack), []) - - def test_get_miss(self): - - creds = _make_credentials() - client = self._make_one(credentials=creds) - get_multi = client.get_multi = mock.Mock(return_value=[]) - - key = object() - - self.assertIsNone(client.get(key)) - - get_multi.assert_called_once_with( - keys=[key], - missing=None, - deferred=None, - transaction=None, - eventual=False, - retry=None, - timeout=None, - ) + host = "localhost:1234" + fake_environ = {DATASTORE_EMULATOR_HOST: host} + project = "PROJECT" + creds = _make_credentials() + http = object() - def test_get_hit(self): - TXN_ID = "123" - _called_with = [] - _entity = object() - - def _get_multi(*args, **kw): - _called_with.append((args, kw)) - return [_entity] - - creds = _make_credentials() - client = self._make_one(credentials=creds) - client.get_multi = _get_multi - - key, missing, deferred = object(), [], [] - - self.assertIs(client.get(key, missing, deferred, TXN_ID), _entity) - - self.assertEqual(_called_with[0][0], ()) - self.assertEqual(_called_with[0][1]["keys"], [key]) - self.assertIs(_called_with[0][1]["missing"], missing) - self.assertIs(_called_with[0][1]["deferred"], deferred) - self.assertEqual(_called_with[0][1]["transaction"], TXN_ID) - - def test_get_multi_no_keys(self): - creds = _make_credentials() - client = self._make_one(credentials=creds) - results = client.get_multi([]) - self.assertEqual(results, []) - - def test_get_multi_miss(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore.key import Key - - creds = _make_credentials() - client = self._make_one(credentials=creds) - ds_api = _make_datastore_api() - client._datastore_api_internal = ds_api - - key = Key("Kind", 1234, project=self.PROJECT) - results = client.get_multi([key]) - self.assertEqual(results, []) - - read_options = datastore_pb2.ReadOptions() - ds_api.lookup.assert_called_once_with( - request={ - "project_id": self.PROJECT, - "keys": [key.to_protobuf()], - "read_options": read_options, - } - ) + with mock.patch("os.environ", new=fake_environ): + with pytest.raises(ValueError): + _make_client(project=project, credentials=creds, _http=http) - def test_get_multi_miss_w_missing(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.key import Key - - KIND = "Kind" - ID = 1234 - - # Make a missing entity pb to be returned from mock backend. - missed = entity_pb2.Entity() - missed.key.partition_id.project_id = self.PROJECT - path_element = missed._pb.key.path.add() - path_element.kind = KIND - path_element.id = ID - - creds = _make_credentials() - client = self._make_one(credentials=creds) - # Set missing entity on mock connection. - lookup_response = _make_lookup_response(missing=[missed._pb]) - ds_api = _make_datastore_api(lookup_response=lookup_response) - client._datastore_api_internal = ds_api - - key = Key(KIND, ID, project=self.PROJECT) - missing = [] - entities = client.get_multi([key], missing=missing) - self.assertEqual(entities, []) - key_pb = key.to_protobuf() - self.assertEqual([missed.key.to_protobuf() for missed in missing], [key_pb._pb]) - - read_options = datastore_pb2.ReadOptions() - ds_api.lookup.assert_called_once_with( - request={ - "project_id": self.PROJECT, - "keys": [key_pb], - "read_options": read_options, - } - ) - def test_get_multi_w_missing_non_empty(self): - from google.cloud.datastore.key import Key +def test_client_ctor_w_emulator_wo_creds(): + from google.auth.credentials import AnonymousCredentials + from google.cloud.datastore.client import DATASTORE_EMULATOR_HOST - creds = _make_credentials() - client = self._make_one(credentials=creds) - key = Key("Kind", 1234, project=self.PROJECT) + host = "localhost:1234" + fake_environ = {DATASTORE_EMULATOR_HOST: host} + project = "PROJECT" + http = object() - missing = ["this", "list", "is", "not", "empty"] - self.assertRaises(ValueError, client.get_multi, [key], missing=missing) + with mock.patch("os.environ", new=fake_environ): + client = _make_client(project=project, _http=http) - def test_get_multi_w_deferred_non_empty(self): - from google.cloud.datastore.key import Key + assert client.base_url == "http://" + host + assert isinstance(client._credentials, AnonymousCredentials) - creds = _make_credentials() - client = self._make_one(credentials=creds) - key = Key("Kind", 1234, project=self.PROJECT) - deferred = ["this", "list", "is", "not", "empty"] - self.assertRaises(ValueError, client.get_multi, [key], deferred=deferred) +def test_client_base_url_property(): + from google.api_core.client_options import ClientOptions + from google.cloud.datastore.client import _DATASTORE_BASE_URL - def test_get_multi_miss_w_deferred(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore.key import Key + alternate_url = "https://alias.example.com/" + creds = _make_credentials() + client_options = ClientOptions() - key = Key("Kind", 1234, project=self.PROJECT) - key_pb = key.to_protobuf() + client = _make_client(credentials=creds, client_options=client_options) + assert client.base_url == _DATASTORE_BASE_URL - # Set deferred entity on mock connection. - creds = _make_credentials() - client = self._make_one(credentials=creds) - lookup_response = _make_lookup_response(deferred=[key_pb]) - ds_api = _make_datastore_api(lookup_response=lookup_response) - client._datastore_api_internal = ds_api + client.base_url = alternate_url + assert client.base_url == alternate_url - deferred = [] - entities = client.get_multi([key], deferred=deferred) - self.assertEqual(entities, []) - self.assertEqual([def_key.to_protobuf() for def_key in deferred], [key_pb]) - read_options = datastore_pb2.ReadOptions() - ds_api.lookup.assert_called_once_with( - request={ - "project_id": self.PROJECT, - "keys": [key_pb], - "read_options": read_options, - } - ) +def test_client_base_url_property_w_client_options(): + alternate_url = "https://alias.example.com/" + creds = _make_credentials() + client_options = {"api_endpoint": "endpoint"} - def test_get_multi_w_deferred_from_backend_but_not_passed(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.entity import Entity - from google.cloud.datastore.key import Key - - key1 = Key("Kind", project=self.PROJECT) - key1_pb = key1.to_protobuf() - key2 = Key("Kind", 2345, project=self.PROJECT) - key2_pb = key2.to_protobuf() - - entity1_pb = entity_pb2.Entity() - entity1_pb._pb.key.CopyFrom(key1_pb._pb) - entity2_pb = entity_pb2.Entity() - entity2_pb._pb.key.CopyFrom(key2_pb._pb) - - creds = _make_credentials() - client = self._make_one(credentials=creds) - # Mock up two separate requests. Using an iterable as side_effect - # allows multiple return values. - lookup_response1 = _make_lookup_response( - results=[entity1_pb], deferred=[key2_pb] - ) - lookup_response2 = _make_lookup_response(results=[entity2_pb]) - ds_api = _make_datastore_api() - ds_api.lookup = mock.Mock( - side_effect=[lookup_response1, lookup_response2], spec=[] - ) - client._datastore_api_internal = ds_api - - missing = [] - found = client.get_multi([key1, key2], missing=missing) - self.assertEqual(len(found), 2) - self.assertEqual(len(missing), 0) - - # Check the actual contents on the response. - self.assertIsInstance(found[0], Entity) - self.assertEqual(found[0].key.path, key1.path) - self.assertEqual(found[0].key.project, key1.project) - - self.assertIsInstance(found[1], Entity) - self.assertEqual(found[1].key.path, key2.path) - self.assertEqual(found[1].key.project, key2.project) - - self.assertEqual(ds_api.lookup.call_count, 2) - read_options = datastore_pb2.ReadOptions() - - ds_api.lookup.assert_any_call( - request={ - "project_id": self.PROJECT, - "keys": [key2_pb], - "read_options": read_options, - }, - ) + client = _make_client(credentials=creds, client_options=client_options,) + assert client.base_url == "endpoint" - ds_api.lookup.assert_any_call( - request={ - "project_id": self.PROJECT, - "keys": [key1_pb, key2_pb], - "read_options": read_options, - }, - ) + client.base_url = alternate_url + assert client.base_url == alternate_url - def test_get_multi_hit_w_retry_w_timeout(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore.key import Key - - kind = "Kind" - id_ = 1234 - path = [{"kind": kind, "id": id_}] - retry = mock.Mock() - timeout = 100000 - - # Make a found entity pb to be returned from mock backend. - entity_pb = _make_entity_pb(self.PROJECT, kind, id_, "foo", "Foo") - - # Make a connection to return the entity pb. - creds = _make_credentials() - client = self._make_one(credentials=creds) - lookup_response = _make_lookup_response(results=[entity_pb]) - ds_api = _make_datastore_api(lookup_response=lookup_response) - client._datastore_api_internal = ds_api - - key = Key(kind, id_, project=self.PROJECT) - (result,) = client.get_multi([key], retry=retry, timeout=timeout) - new_key = result.key - - # Check the returned value is as expected. - self.assertIsNot(new_key, key) - self.assertEqual(new_key.project, self.PROJECT) - self.assertEqual(new_key.path, path) - self.assertEqual(list(result), ["foo"]) - self.assertEqual(result["foo"], "Foo") - - read_options = datastore_pb2.ReadOptions() - - ds_api.lookup.assert_called_once_with( - request={ - "project_id": self.PROJECT, - "keys": [key.to_protobuf()], - "read_options": read_options, - }, - retry=retry, - timeout=timeout, - ) - def test_get_multi_hit_w_transaction(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore.key import Key - - txn_id = b"123" - kind = "Kind" - id_ = 1234 - path = [{"kind": kind, "id": id_}] - - # Make a found entity pb to be returned from mock backend. - entity_pb = _make_entity_pb(self.PROJECT, kind, id_, "foo", "Foo") - - # Make a connection to return the entity pb. - creds = _make_credentials() - client = self._make_one(credentials=creds) - lookup_response = _make_lookup_response(results=[entity_pb]) - ds_api = _make_datastore_api(lookup_response=lookup_response) - client._datastore_api_internal = ds_api - - key = Key(kind, id_, project=self.PROJECT) - txn = client.transaction() - txn._id = txn_id - (result,) = client.get_multi([key], transaction=txn) - new_key = result.key - - # Check the returned value is as expected. - self.assertIsNot(new_key, key) - self.assertEqual(new_key.project, self.PROJECT) - self.assertEqual(new_key.path, path) - self.assertEqual(list(result), ["foo"]) - self.assertEqual(result["foo"], "Foo") - - read_options = datastore_pb2.ReadOptions(transaction=txn_id) - ds_api.lookup.assert_called_once_with( - request={ - "project_id": self.PROJECT, - "keys": [key.to_protobuf()], - "read_options": read_options, - } - ) +def test_client__datastore_api_property_already_set(): + client = _make_client(credentials=_make_credentials(), _use_grpc=True) + already = client._datastore_api_internal = object() + assert client._datastore_api is already - def test_get_multi_hit_multiple_keys_same_project(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore.key import Key - - kind = "Kind" - id1 = 1234 - id2 = 2345 - - # Make a found entity pb to be returned from mock backend. - entity_pb1 = _make_entity_pb(self.PROJECT, kind, id1) - entity_pb2 = _make_entity_pb(self.PROJECT, kind, id2) - - # Make a connection to return the entity pbs. - creds = _make_credentials() - client = self._make_one(credentials=creds) - lookup_response = _make_lookup_response(results=[entity_pb1, entity_pb2]) - ds_api = _make_datastore_api(lookup_response=lookup_response) - client._datastore_api_internal = ds_api - - key1 = Key(kind, id1, project=self.PROJECT) - key2 = Key(kind, id2, project=self.PROJECT) - retrieved1, retrieved2 = client.get_multi([key1, key2]) - - # Check values match. - self.assertEqual(retrieved1.key.path, key1.path) - self.assertEqual(dict(retrieved1), {}) - self.assertEqual(retrieved2.key.path, key2.path) - self.assertEqual(dict(retrieved2), {}) - - read_options = datastore_pb2.ReadOptions() - ds_api.lookup.assert_called_once_with( - request={ - "project_id": self.PROJECT, - "keys": [key1.to_protobuf(), key2.to_protobuf()], - "read_options": read_options, - } - ) - def test_get_multi_hit_multiple_keys_different_project(self): - from google.cloud.datastore.key import Key +def test_client__datastore_api_property_gapic(): + client_info = mock.Mock() + client = _make_client( + project="prahj-ekt", + credentials=_make_credentials(), + client_info=client_info, + _http=object(), + _use_grpc=True, + ) - PROJECT1 = "PROJECT" - PROJECT2 = "PROJECT-ALT" + assert client._datastore_api_internal is None + patch = mock.patch( + "google.cloud.datastore.client.make_datastore_api", + return_value=mock.sentinel.ds_api, + ) + with patch as make_api: + ds_api = client._datastore_api + + assert ds_api is mock.sentinel.ds_api + assert client._datastore_api_internal is mock.sentinel.ds_api + make_api.assert_called_once_with(client) + + +def test__datastore_api_property_http(): + client_info = mock.Mock() + client = _make_client( + project="prahj-ekt", + credentials=_make_credentials(), + client_info=client_info, + _http=object(), + _use_grpc=False, + ) - # Make sure our IDs are actually different. - self.assertNotEqual(PROJECT1, PROJECT2) + assert client._datastore_api_internal is None + patch = mock.patch( + "google.cloud.datastore.client.HTTPDatastoreAPI", + return_value=mock.sentinel.ds_api, + ) + with patch as make_api: + ds_api = client._datastore_api - key1 = Key("KIND", 1234, project=PROJECT1) - key2 = Key("KIND", 1234, project=PROJECT2) + assert ds_api is mock.sentinel.ds_api + assert client._datastore_api_internal is mock.sentinel.ds_api + make_api.assert_called_once_with(client) - creds = _make_credentials() - client = self._make_one(credentials=creds) - with self.assertRaises(ValueError): - client.get_multi([key1, key2]) +def test_client__push_batch_and__pop_batch(): + creds = _make_credentials() + client = _make_client(credentials=creds) + batch = client.batch() + xact = client.transaction() - def test_get_multi_max_loops(self): - from google.cloud.datastore.key import Key + client._push_batch(batch) + assert list(client._batch_stack) == [batch] + assert client.current_batch is batch + assert client.current_transaction is None - kind = "Kind" - id_ = 1234 + client._push_batch(xact) + assert client.current_batch is xact + assert client.current_transaction is xact + # list(_LocalStack) returns in reverse order. + assert list(client._batch_stack) == [xact, batch] - # Make a found entity pb to be returned from mock backend. - entity_pb = _make_entity_pb(self.PROJECT, kind, id_, "foo", "Foo") + assert client._pop_batch() is xact + assert list(client._batch_stack) == [batch] + assert client.current_batch is batch + assert client.current_transaction is None - # Make a connection to return the entity pb. - creds = _make_credentials() - client = self._make_one(credentials=creds) - lookup_response = _make_lookup_response(results=[entity_pb]) - ds_api = _make_datastore_api(lookup_response=lookup_response) - client._datastore_api_internal = ds_api + assert client._pop_batch() is batch + assert list(client._batch_stack) == [] - key = Key(kind, id_, project=self.PROJECT) - deferred = [] - missing = [] - patch = mock.patch("google.cloud.datastore.client._MAX_LOOPS", new=-1) - with patch: - result = client.get_multi([key], missing=missing, deferred=deferred) +def test_client_get_miss(): - # Make sure we have no results, even though the connection has been - # set up as in `test_hit` to return a single result. - self.assertEqual(result, []) - self.assertEqual(missing, []) - self.assertEqual(deferred, []) - ds_api.lookup.assert_not_called() + creds = _make_credentials() + client = _make_client(credentials=creds) + get_multi = client.get_multi = mock.Mock(return_value=[]) - def test_put(self): + key = object() - creds = _make_credentials() - client = self._make_one(credentials=creds) - put_multi = client.put_multi = mock.Mock() - entity = mock.Mock() + assert client.get(key) is None - client.put(entity) + get_multi.assert_called_once_with( + keys=[key], + missing=None, + deferred=None, + transaction=None, + eventual=False, + retry=None, + timeout=None, + ) - put_multi.assert_called_once_with(entities=[entity], retry=None, timeout=None) - def test_put_w_retry_w_timeout(self): +def test_client_get_hit(): + txn_id = "123" + _entity = object() + creds = _make_credentials() + client = _make_client(credentials=creds) + get_multi = client.get_multi = mock.Mock(return_value=[_entity]) - creds = _make_credentials() - client = self._make_one(credentials=creds) - put_multi = client.put_multi = mock.Mock() - entity = mock.Mock() - retry = mock.Mock() - timeout = 100000 + key, missing, deferred = object(), [], [] - client.put(entity, retry=retry, timeout=timeout) + assert client.get(key, missing, deferred, txn_id) is _entity - put_multi.assert_called_once_with( - entities=[entity], retry=retry, timeout=timeout - ) + get_multi.assert_called_once_with( + keys=[key], + missing=missing, + deferred=deferred, + transaction=txn_id, + eventual=False, + retry=None, + timeout=None, + ) - def test_put_multi_no_entities(self): - creds = _make_credentials() - client = self._make_one(credentials=creds) - self.assertIsNone(client.put_multi([])) - def test_put_multi_w_single_empty_entity(self): - # https://github.com/GoogleCloudPlatform/google-cloud-python/issues/649 - from google.cloud.datastore.entity import Entity +def test_client_get_multi_no_keys(): + creds = _make_credentials() + client = _make_client(credentials=creds) + ds_api = _make_datastore_api() + client._datastore_api_internal = ds_api - creds = _make_credentials() - client = self._make_one(credentials=creds) - self.assertRaises(ValueError, client.put_multi, Entity()) + results = client.get_multi([]) - def test_put_multi_no_batch_w_partial_key_w_retry_w_timeout(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 + assert results == [] - entity = _Entity(foo=u"bar") - key = entity.key = _Key(_Key.kind, None) - retry = mock.Mock() - timeout = 100000 + ds_api.lookup.assert_not_called() - creds = _make_credentials() - client = self._make_one(credentials=creds) - key_pb = _make_key(234) - ds_api = _make_datastore_api(key_pb) - client._datastore_api_internal = ds_api - result = client.put_multi([entity], retry=retry, timeout=timeout) - self.assertIsNone(result) +def test_client_get_multi_miss(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore.key import Key + + creds = _make_credentials() + client = _make_client(credentials=creds) + ds_api = _make_datastore_api() + client._datastore_api_internal = ds_api + + key = Key("Kind", 1234, project=PROJECT) + results = client.get_multi([key]) + assert results == [] + + read_options = datastore_pb2.ReadOptions() + ds_api.lookup.assert_called_once_with( + request={ + "project_id": PROJECT, + "keys": [key.to_protobuf()], + "read_options": read_options, + } + ) - self.assertEqual(ds_api.commit.call_count, 1) - _, positional, keyword = ds_api.commit.mock_calls[0] - self.assertEqual(len(positional), 0) +def test_client_get_multi_miss_w_missing(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.key import Key + + KIND = "Kind" + ID = 1234 + + # Make a missing entity pb to be returned from mock backend. + missed = entity_pb2.Entity() + missed.key.partition_id.project_id = PROJECT + path_element = missed._pb.key.path.add() + path_element.kind = KIND + path_element.id = ID + + creds = _make_credentials() + client = _make_client(credentials=creds) + # Set missing entity on mock connection. + lookup_response = _make_lookup_response(missing=[missed._pb]) + ds_api = _make_datastore_api(lookup_response=lookup_response) + client._datastore_api_internal = ds_api + + key = Key(KIND, ID, project=PROJECT) + missing = [] + entities = client.get_multi([key], missing=missing) + assert entities == [] + key_pb = key.to_protobuf() + assert [missed.key.to_protobuf() for missed in missing] == [key_pb._pb] + + read_options = datastore_pb2.ReadOptions() + ds_api.lookup.assert_called_once_with( + request={"project_id": PROJECT, "keys": [key_pb], "read_options": read_options} + ) - self.assertEqual(len(keyword), 3) - self.assertEqual(keyword["retry"], retry) - self.assertEqual(keyword["timeout"], timeout) - self.assertEqual(len(keyword["request"]), 4) - self.assertEqual(keyword["request"]["project_id"], self.PROJECT) - self.assertEqual( - keyword["request"]["mode"], - datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL, - ) - self.assertEqual(keyword["request"]["transaction"], None) - mutations = keyword["request"]["mutations"] - mutated_entity = _mutated_pb(self, mutations, "insert") - self.assertEqual(mutated_entity.key, key.to_protobuf()) - - prop_list = list(mutated_entity.properties.items()) - self.assertTrue(len(prop_list), 1) - name, value_pb = prop_list[0] - self.assertEqual(name, "foo") - self.assertEqual(value_pb.string_value, u"bar") - - def test_put_multi_existing_batch_w_completed_key(self): - creds = _make_credentials() - client = self._make_one(credentials=creds) - entity = _Entity(foo=u"bar") - key = entity.key = _Key() - - with _NoCommitBatch(client) as CURR_BATCH: - result = client.put_multi([entity]) - - self.assertIsNone(result) - mutated_entity = _mutated_pb(self, CURR_BATCH.mutations, "upsert") - self.assertEqual(mutated_entity.key, key.to_protobuf()) - - prop_list = list(mutated_entity.properties.items()) - self.assertTrue(len(prop_list), 1) - name, value_pb = prop_list[0] - self.assertEqual(name, "foo") - self.assertEqual(value_pb.string_value, u"bar") - - def test_delete(self): - creds = _make_credentials() - client = self._make_one(credentials=creds) - delete_multi = client.delete_multi = mock.Mock() - key = mock.Mock() - - client.delete(key) - - delete_multi.assert_called_once_with(keys=[key], retry=None, timeout=None) - - def test_delete_w_retry_w_timeout(self): - creds = _make_credentials() - client = self._make_one(credentials=creds) - delete_multi = client.delete_multi = mock.Mock() - key = mock.Mock() - retry = mock.Mock() - timeout = 100000 - - client.delete(key, retry=retry, timeout=timeout) - - delete_multi.assert_called_once_with(keys=[key], retry=retry, timeout=timeout) - - def test_delete_multi_no_keys(self): - creds = _make_credentials() - client = self._make_one(credentials=creds) - client._datastore_api_internal = _make_datastore_api() - - result = client.delete_multi([]) - self.assertIsNone(result) - client._datastore_api_internal.commit.assert_not_called() - - def test_delete_multi_no_batch_w_retry_w_timeout(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - - key = _Key() - retry = mock.Mock() - timeout = 100000 - - creds = _make_credentials() - client = self._make_one(credentials=creds) - ds_api = _make_datastore_api() - client._datastore_api_internal = ds_api - - result = client.delete_multi([key], retry=retry, timeout=timeout) - self.assertIsNone(result) - - self.assertEqual(ds_api.commit.call_count, 1) - _, positional, keyword = ds_api.commit.mock_calls[0] - - self.assertEqual(len(positional), 0) - - self.assertEqual(len(keyword), 3) - self.assertEqual(keyword["retry"], retry) - self.assertEqual(keyword["timeout"], timeout) - - self.assertEqual(len(keyword["request"]), 4) - self.assertEqual(keyword["request"]["project_id"], self.PROJECT) - self.assertEqual( - keyword["request"]["mode"], - datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL, - ) - self.assertEqual(keyword["request"]["transaction"], None) - mutations = keyword["request"]["mutations"] - mutated_key = _mutated_pb(self, mutations, "delete") - self.assertEqual(mutated_key, key.to_protobuf()) +def test_client_get_multi_w_missing_non_empty(): + from google.cloud.datastore.key import Key - def test_delete_multi_w_existing_batch(self): - creds = _make_credentials() - client = self._make_one(credentials=creds) - client._datastore_api_internal = _make_datastore_api() + creds = _make_credentials() + client = _make_client(credentials=creds) + key = Key("Kind", 1234, project=PROJECT) - key = _Key() + missing = ["this", "list", "is", "not", "empty"] + with pytest.raises(ValueError): + client.get_multi([key], missing=missing) - with _NoCommitBatch(client) as CURR_BATCH: - result = client.delete_multi([key]) - self.assertIsNone(result) - mutated_key = _mutated_pb(self, CURR_BATCH.mutations, "delete") - self.assertEqual(mutated_key, key._key) - client._datastore_api_internal.commit.assert_not_called() +def test_client_get_multi_w_deferred_non_empty(): + from google.cloud.datastore.key import Key - def test_delete_multi_w_existing_transaction(self): - creds = _make_credentials() - client = self._make_one(credentials=creds) - client._datastore_api_internal = _make_datastore_api() + creds = _make_credentials() + client = _make_client(credentials=creds) + key = Key("Kind", 1234, project=PROJECT) - key = _Key() + deferred = ["this", "list", "is", "not", "empty"] + with pytest.raises(ValueError): + client.get_multi([key], deferred=deferred) - with _NoCommitTransaction(client) as CURR_XACT: - result = client.delete_multi([key]) - self.assertIsNone(result) - mutated_key = _mutated_pb(self, CURR_XACT.mutations, "delete") - self.assertEqual(mutated_key, key._key) - client._datastore_api_internal.commit.assert_not_called() +def test_client_get_multi_miss_w_deferred(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore.key import Key + + key = Key("Kind", 1234, project=PROJECT) + key_pb = key.to_protobuf() + + # Set deferred entity on mock connection. + creds = _make_credentials() + client = _make_client(credentials=creds) + lookup_response = _make_lookup_response(deferred=[key_pb]) + ds_api = _make_datastore_api(lookup_response=lookup_response) + client._datastore_api_internal = ds_api + + deferred = [] + entities = client.get_multi([key], deferred=deferred) + assert entities == [] + assert [def_key.to_protobuf() for def_key in deferred] == [key_pb] + + read_options = datastore_pb2.ReadOptions() + ds_api.lookup.assert_called_once_with( + request={"project_id": PROJECT, "keys": [key_pb], "read_options": read_options} + ) - def test_delete_multi_w_existing_transaction_entity(self): - from google.cloud.datastore.entity import Entity - creds = _make_credentials() - client = self._make_one(credentials=creds) - client._datastore_api_internal = _make_datastore_api() +def test_client_get_multi_w_deferred_from_backend_but_not_passed(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.key import Key + + key1 = Key("Kind", project=PROJECT) + key1_pb = key1.to_protobuf() + key2 = Key("Kind", 2345, project=PROJECT) + key2_pb = key2.to_protobuf() + + entity1_pb = entity_pb2.Entity() + entity1_pb._pb.key.CopyFrom(key1_pb._pb) + entity2_pb = entity_pb2.Entity() + entity2_pb._pb.key.CopyFrom(key2_pb._pb) + + creds = _make_credentials() + client = _make_client(credentials=creds) + # Mock up two separate requests. Using an iterable as side_effect + # allows multiple return values. + lookup_response1 = _make_lookup_response(results=[entity1_pb], deferred=[key2_pb]) + lookup_response2 = _make_lookup_response(results=[entity2_pb]) + ds_api = _make_datastore_api() + ds_api.lookup = mock.Mock(side_effect=[lookup_response1, lookup_response2], spec=[]) + client._datastore_api_internal = ds_api + + missing = [] + found = client.get_multi([key1, key2], missing=missing) + assert len(found) == 2 + assert len(missing) == 0 + + # Check the actual contents on the response. + assert isinstance(found[0], Entity) + assert found[0].key.path == key1.path + assert found[0].key.project == key1.project + + assert isinstance(found[1], Entity) + assert found[1].key.path == key2.path + assert found[1].key.project == key2.project + + assert ds_api.lookup.call_count == 2 + read_options = datastore_pb2.ReadOptions() + + ds_api.lookup.assert_any_call( + request={ + "project_id": PROJECT, + "keys": [key2_pb], + "read_options": read_options, + }, + ) - key = _Key() - entity = Entity(key=key) + ds_api.lookup.assert_any_call( + request={ + "project_id": PROJECT, + "keys": [key1_pb, key2_pb], + "read_options": read_options, + }, + ) - with _NoCommitTransaction(client) as CURR_XACT: - result = client.delete_multi([entity]) - self.assertIsNone(result) - mutated_key = _mutated_pb(self, CURR_XACT.mutations, "delete") - self.assertEqual(mutated_key, key._key) - client._datastore_api_internal.commit.assert_not_called() +def test_client_get_multi_hit_w_retry_w_timeout(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore.key import Key + + kind = "Kind" + id_ = 1234 + path = [{"kind": kind, "id": id_}] + retry = mock.Mock() + timeout = 100000 + + # Make a found entity pb to be returned from mock backend. + entity_pb = _make_entity_pb(PROJECT, kind, id_, "foo", "Foo") + + # Make a connection to return the entity pb. + creds = _make_credentials() + client = _make_client(credentials=creds) + lookup_response = _make_lookup_response(results=[entity_pb]) + ds_api = _make_datastore_api(lookup_response=lookup_response) + client._datastore_api_internal = ds_api + + key = Key(kind, id_, project=PROJECT) + (result,) = client.get_multi([key], retry=retry, timeout=timeout) + new_key = result.key + + # Check the returned value is as expected. + assert new_key is not key + assert new_key.project == PROJECT + assert new_key.path == path + assert list(result) == ["foo"] + assert result["foo"] == "Foo" + + read_options = datastore_pb2.ReadOptions() + + ds_api.lookup.assert_called_once_with( + request={ + "project_id": PROJECT, + "keys": [key.to_protobuf()], + "read_options": read_options, + }, + retry=retry, + timeout=timeout, + ) - def test_allocate_ids_w_partial_key(self): - num_ids = 2 - incomplete_key = _Key(_Key.kind, None) +def test_client_get_multi_hit_w_transaction(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore.key import Key + + txn_id = b"123" + kind = "Kind" + id_ = 1234 + path = [{"kind": kind, "id": id_}] + + # Make a found entity pb to be returned from mock backend. + entity_pb = _make_entity_pb(PROJECT, kind, id_, "foo", "Foo") + + # Make a connection to return the entity pb. + creds = _make_credentials() + client = _make_client(credentials=creds) + lookup_response = _make_lookup_response(results=[entity_pb]) + ds_api = _make_datastore_api(lookup_response=lookup_response) + client._datastore_api_internal = ds_api + + key = Key(kind, id_, project=PROJECT) + txn = client.transaction() + txn._id = txn_id + (result,) = client.get_multi([key], transaction=txn) + new_key = result.key + + # Check the returned value is as expected. + assert new_key is not key + assert new_key.project == PROJECT + assert new_key.path == path + assert list(result) == ["foo"] + assert result["foo"] == "Foo" + + read_options = datastore_pb2.ReadOptions(transaction=txn_id) + ds_api.lookup.assert_called_once_with( + request={ + "project_id": PROJECT, + "keys": [key.to_protobuf()], + "read_options": read_options, + } + ) - creds = _make_credentials() - client = self._make_one(credentials=creds, _use_grpc=False) - allocated = mock.Mock(keys=[_KeyPB(i) for i in range(num_ids)], spec=["keys"]) - alloc_ids = mock.Mock(return_value=allocated, spec=[]) - ds_api = mock.Mock(allocate_ids=alloc_ids, spec=["allocate_ids"]) - client._datastore_api_internal = ds_api - result = client.allocate_ids(incomplete_key, num_ids) +def test_client_get_multi_hit_multiple_keys_same_project(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore.key import Key + + kind = "Kind" + id1 = 1234 + id2 = 2345 + + # Make a found entity pb to be returned from mock backend. + entity_pb1 = _make_entity_pb(PROJECT, kind, id1) + entity_pb2 = _make_entity_pb(PROJECT, kind, id2) + + # Make a connection to return the entity pbs. + creds = _make_credentials() + client = _make_client(credentials=creds) + lookup_response = _make_lookup_response(results=[entity_pb1, entity_pb2]) + ds_api = _make_datastore_api(lookup_response=lookup_response) + client._datastore_api_internal = ds_api + + key1 = Key(kind, id1, project=PROJECT) + key2 = Key(kind, id2, project=PROJECT) + retrieved1, retrieved2 = client.get_multi([key1, key2]) + + # Check values match. + assert retrieved1.key.path == key1.path + assert dict(retrieved1) == {} + assert retrieved2.key.path == key2.path + assert dict(retrieved2) == {} + + read_options = datastore_pb2.ReadOptions() + ds_api.lookup.assert_called_once_with( + request={ + "project_id": PROJECT, + "keys": [key1.to_protobuf(), key2.to_protobuf()], + "read_options": read_options, + } + ) - # Check the IDs returned. - self.assertEqual([key.id for key in result], list(range(num_ids))) - expected_keys = [incomplete_key.to_protobuf()] * num_ids - alloc_ids.assert_called_once_with( - request={"project_id": self.PROJECT, "keys": expected_keys} - ) +def test_client_get_multi_hit_multiple_keys_different_project(): + from google.cloud.datastore.key import Key - def test_allocate_ids_w_partial_key_w_retry_w_timeout(self): - num_ids = 2 + PROJECT1 = "PROJECT" + PROJECT2 = "PROJECT-ALT" - incomplete_key = _Key(_Key.kind, None) - retry = mock.Mock() - timeout = 100000 + key1 = Key("KIND", 1234, project=PROJECT1) + key2 = Key("KIND", 1234, project=PROJECT2) - creds = _make_credentials() - client = self._make_one(credentials=creds, _use_grpc=False) - allocated = mock.Mock(keys=[_KeyPB(i) for i in range(num_ids)], spec=["keys"]) - alloc_ids = mock.Mock(return_value=allocated, spec=[]) - ds_api = mock.Mock(allocate_ids=alloc_ids, spec=["allocate_ids"]) - client._datastore_api_internal = ds_api + creds = _make_credentials() + client = _make_client(credentials=creds) - result = client.allocate_ids( - incomplete_key, num_ids, retry=retry, timeout=timeout - ) + with pytest.raises(ValueError): + client.get_multi([key1, key2]) - # Check the IDs returned. - self.assertEqual([key.id for key in result], list(range(num_ids))) - expected_keys = [incomplete_key.to_protobuf()] * num_ids - alloc_ids.assert_called_once_with( - request={"project_id": self.PROJECT, "keys": expected_keys}, - retry=retry, - timeout=timeout, - ) +def test_client_get_multi_max_loops(): + from google.cloud.datastore.key import Key - def test_allocate_ids_w_completed_key(self): - creds = _make_credentials() - client = self._make_one(credentials=creds) + kind = "Kind" + id_ = 1234 - complete_key = _Key() - self.assertRaises(ValueError, client.allocate_ids, complete_key, 2) + # Make a found entity pb to be returned from mock backend. + entity_pb = _make_entity_pb(PROJECT, kind, id_, "foo", "Foo") - def test_reserve_ids_sequential_w_completed_key(self): - num_ids = 2 - creds = _make_credentials() - client = self._make_one(credentials=creds, _use_grpc=False) - complete_key = _Key() - reserve_ids = mock.Mock() - ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) - client._datastore_api_internal = ds_api - self.assertTrue(not complete_key.is_partial) + # Make a connection to return the entity pb. + creds = _make_credentials() + client = _make_client(credentials=creds) + lookup_response = _make_lookup_response(results=[entity_pb]) + ds_api = _make_datastore_api(lookup_response=lookup_response) + client._datastore_api_internal = ds_api - client.reserve_ids_sequential(complete_key, num_ids) + key = Key(kind, id_, project=PROJECT) + deferred = [] + missing = [] - reserved_keys = ( - _Key(_Key.kind, id) - for id in range(complete_key.id, complete_key.id + num_ids) - ) - expected_keys = [key.to_protobuf() for key in reserved_keys] - reserve_ids.assert_called_once_with( - request={"project_id": self.PROJECT, "keys": expected_keys} - ) + patch = mock.patch("google.cloud.datastore.client._MAX_LOOPS", new=-1) + with patch: + result = client.get_multi([key], missing=missing, deferred=deferred) - def test_reserve_ids_sequential_w_completed_key_w_retry_w_timeout(self): - num_ids = 2 - retry = mock.Mock() - timeout = 100000 - - creds = _make_credentials() - client = self._make_one(credentials=creds, _use_grpc=False) - complete_key = _Key() - self.assertTrue(not complete_key.is_partial) - reserve_ids = mock.Mock() - ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) - client._datastore_api_internal = ds_api - - client.reserve_ids_sequential( - complete_key, num_ids, retry=retry, timeout=timeout - ) + # Make sure we have no results, even though the connection has been + # set up as in `test_hit` to return a single result. + assert result == [] + assert missing == [] + assert deferred == [] + ds_api.lookup.assert_not_called() - reserved_keys = ( - _Key(_Key.kind, id) - for id in range(complete_key.id, complete_key.id + num_ids) - ) - expected_keys = [key.to_protobuf() for key in reserved_keys] - reserve_ids.assert_called_once_with( - request={"project_id": self.PROJECT, "keys": expected_keys}, - retry=retry, - timeout=timeout, - ) - def test_reserve_ids_sequential_w_completed_key_w_ancestor(self): - num_ids = 2 - creds = _make_credentials() - client = self._make_one(credentials=creds, _use_grpc=False) - complete_key = _Key("PARENT", "SINGLETON", _Key.kind, 1234) - reserve_ids = mock.Mock() - ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) - client._datastore_api_internal = ds_api - self.assertTrue(not complete_key.is_partial) +def test_client_put(): + + creds = _make_credentials() + client = _make_client(credentials=creds) + put_multi = client.put_multi = mock.Mock() + entity = mock.Mock() + + client.put(entity) + + put_multi.assert_called_once_with(entities=[entity], retry=None, timeout=None) + + +def test_client_put_w_retry_w_timeout(): + + creds = _make_credentials() + client = _make_client(credentials=creds) + put_multi = client.put_multi = mock.Mock() + entity = mock.Mock() + retry = mock.Mock() + timeout = 100000 + + client.put(entity, retry=retry, timeout=timeout) + + put_multi.assert_called_once_with(entities=[entity], retry=retry, timeout=timeout) + + +def test_client_put_multi_no_entities(): + creds = _make_credentials() + client = _make_client(credentials=creds) + assert client.put_multi([]) is None + + +def test_client_put_multi_w_single_empty_entity(): + # https://github.com/GoogleCloudPlatform/google-cloud-python/issues/649 + from google.cloud.datastore.entity import Entity + + creds = _make_credentials() + client = _make_client(credentials=creds) + with pytest.raises(ValueError): + client.put_multi(Entity()) + + +def test_client_put_multi_no_batch_w_partial_key_w_retry_w_timeout(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + + entity = _Entity(foo=u"bar") + key = entity.key = _Key(_Key.kind, None) + retry = mock.Mock() + timeout = 100000 + + creds = _make_credentials() + client = _make_client(credentials=creds) + key_pb = _make_key(234) + ds_api = _make_datastore_api(key_pb) + client._datastore_api_internal = ds_api + + result = client.put_multi([entity], retry=retry, timeout=timeout) + assert result is None + + ds_api.commit.assert_called_once_with( + request={ + "project_id": PROJECT, + "mode": datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL, + "mutations": mock.ANY, + "transaction": None, + }, + retry=retry, + timeout=timeout, + ) + + mutations = ds_api.commit.call_args[1]["request"]["mutations"] + mutated_entity = _mutated_pb(mutations, "insert") + assert mutated_entity.key == key.to_protobuf() + + prop_list = list(mutated_entity.properties.items()) + assert len(prop_list) == 1 + name, value_pb = prop_list[0] + assert name == "foo" + assert value_pb.string_value == u"bar" + + +def test_client_put_multi_existing_batch_w_completed_key(): + creds = _make_credentials() + client = _make_client(credentials=creds) + entity = _Entity(foo=u"bar") + key = entity.key = _Key() + + with _NoCommitBatch(client) as CURR_BATCH: + result = client.put_multi([entity]) + + assert result is None + mutated_entity = _mutated_pb(CURR_BATCH.mutations, "upsert") + assert mutated_entity.key == key.to_protobuf() + + prop_list = list(mutated_entity.properties.items()) + assert len(prop_list) == 1 + name, value_pb = prop_list[0] + assert name == "foo" + assert value_pb.string_value == u"bar" + + +def test_client_delete(): + creds = _make_credentials() + client = _make_client(credentials=creds) + delete_multi = client.delete_multi = mock.Mock() + key = mock.Mock() + + client.delete(key) + + delete_multi.assert_called_once_with(keys=[key], retry=None, timeout=None) + + +def test_client_delete_w_retry_w_timeout(): + creds = _make_credentials() + client = _make_client(credentials=creds) + delete_multi = client.delete_multi = mock.Mock() + key = mock.Mock() + retry = mock.Mock() + timeout = 100000 + + client.delete(key, retry=retry, timeout=timeout) + + delete_multi.assert_called_once_with(keys=[key], retry=retry, timeout=timeout) + + +def test_client_delete_multi_no_keys(): + creds = _make_credentials() + client = _make_client(credentials=creds) + client._datastore_api_internal = _make_datastore_api() + + result = client.delete_multi([]) + assert result is None + client._datastore_api_internal.commit.assert_not_called() + + +def test_client_delete_multi_no_batch_w_retry_w_timeout(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + + key = _Key() + retry = mock.Mock() + timeout = 100000 + + creds = _make_credentials() + client = _make_client(credentials=creds) + ds_api = _make_datastore_api() + client._datastore_api_internal = ds_api + + result = client.delete_multi([key], retry=retry, timeout=timeout) + assert result is None + + ds_api.commit.assert_called_once_with( + request={ + "project_id": PROJECT, + "mode": datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL, + "mutations": mock.ANY, + "transaction": None, + }, + retry=retry, + timeout=timeout, + ) + + mutations = ds_api.commit.call_args[1]["request"]["mutations"] + mutated_key = _mutated_pb(mutations, "delete") + assert mutated_key == key.to_protobuf() + + +def test_client_delete_multi_w_existing_batch(): + creds = _make_credentials() + client = _make_client(credentials=creds) + client._datastore_api_internal = _make_datastore_api() + + key = _Key() + + with _NoCommitBatch(client) as CURR_BATCH: + result = client.delete_multi([key]) + + assert result is None + mutated_key = _mutated_pb(CURR_BATCH.mutations, "delete") + assert mutated_key == key._key + client._datastore_api_internal.commit.assert_not_called() + + +def test_client_delete_multi_w_existing_transaction(): + creds = _make_credentials() + client = _make_client(credentials=creds) + client._datastore_api_internal = _make_datastore_api() + + key = _Key() + + with _NoCommitTransaction(client) as CURR_XACT: + result = client.delete_multi([key]) + + assert result is None + mutated_key = _mutated_pb(CURR_XACT.mutations, "delete") + assert mutated_key == key._key + client._datastore_api_internal.commit.assert_not_called() + + +def test_client_delete_multi_w_existing_transaction_entity(): + from google.cloud.datastore.entity import Entity + + creds = _make_credentials() + client = _make_client(credentials=creds) + client._datastore_api_internal = _make_datastore_api() + + key = _Key() + entity = Entity(key=key) + + with _NoCommitTransaction(client) as CURR_XACT: + result = client.delete_multi([entity]) + + assert result is None + mutated_key = _mutated_pb(CURR_XACT.mutations, "delete") + assert mutated_key == key._key + client._datastore_api_internal.commit.assert_not_called() + + +def test_client_allocate_ids_w_completed_key(): + creds = _make_credentials() + client = _make_client(credentials=creds) + + complete_key = _Key() + with pytest.raises(ValueError): + client.allocate_ids(complete_key, 2) + + +def test_client_allocate_ids_w_partial_key(): + num_ids = 2 + + incomplete_key = _Key(_Key.kind, None) + creds = _make_credentials() + client = _make_client(credentials=creds, _use_grpc=False) + allocated = mock.Mock(keys=[_KeyPB(i) for i in range(num_ids)], spec=["keys"]) + alloc_ids = mock.Mock(return_value=allocated, spec=[]) + ds_api = mock.Mock(allocate_ids=alloc_ids, spec=["allocate_ids"]) + client._datastore_api_internal = ds_api + + result = client.allocate_ids(incomplete_key, num_ids) + + # Check the IDs returned. + assert [key.id for key in result] == list(range(num_ids)) + + expected_keys = [incomplete_key.to_protobuf()] * num_ids + alloc_ids.assert_called_once_with( + request={"project_id": PROJECT, "keys": expected_keys} + ) + + +def test_client_allocate_ids_w_partial_key_w_retry_w_timeout(): + num_ids = 2 + + incomplete_key = _Key(_Key.kind, None) + retry = mock.Mock() + timeout = 100000 + + creds = _make_credentials() + client = _make_client(credentials=creds, _use_grpc=False) + allocated = mock.Mock(keys=[_KeyPB(i) for i in range(num_ids)], spec=["keys"]) + alloc_ids = mock.Mock(return_value=allocated, spec=[]) + ds_api = mock.Mock(allocate_ids=alloc_ids, spec=["allocate_ids"]) + client._datastore_api_internal = ds_api + + result = client.allocate_ids(incomplete_key, num_ids, retry=retry, timeout=timeout) + + # Check the IDs returned. + assert [key.id for key in result] == list(range(num_ids)) + + expected_keys = [incomplete_key.to_protobuf()] * num_ids + alloc_ids.assert_called_once_with( + request={"project_id": PROJECT, "keys": expected_keys}, + retry=retry, + timeout=timeout, + ) + + +def test_client_reserve_ids_sequential_w_completed_key(): + num_ids = 2 + creds = _make_credentials() + client = _make_client(credentials=creds, _use_grpc=False) + complete_key = _Key() + reserve_ids = mock.Mock() + ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) + client._datastore_api_internal = ds_api + assert not complete_key.is_partial + + client.reserve_ids_sequential(complete_key, num_ids) + + reserved_keys = ( + _Key(_Key.kind, id) for id in range(complete_key.id, complete_key.id + num_ids) + ) + expected_keys = [key.to_protobuf() for key in reserved_keys] + reserve_ids.assert_called_once_with( + request={"project_id": PROJECT, "keys": expected_keys} + ) + + +def test_client_reserve_ids_sequential_w_completed_key_w_retry_w_timeout(): + num_ids = 2 + retry = mock.Mock() + timeout = 100000 + + creds = _make_credentials() + client = _make_client(credentials=creds, _use_grpc=False) + complete_key = _Key() + assert not complete_key.is_partial + reserve_ids = mock.Mock() + ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) + client._datastore_api_internal = ds_api + + client.reserve_ids_sequential(complete_key, num_ids, retry=retry, timeout=timeout) + + reserved_keys = ( + _Key(_Key.kind, id) for id in range(complete_key.id, complete_key.id + num_ids) + ) + expected_keys = [key.to_protobuf() for key in reserved_keys] + reserve_ids.assert_called_once_with( + request={"project_id": PROJECT, "keys": expected_keys}, + retry=retry, + timeout=timeout, + ) + + +def test_client_reserve_ids_sequential_w_completed_key_w_ancestor(): + num_ids = 2 + creds = _make_credentials() + client = _make_client(credentials=creds, _use_grpc=False) + complete_key = _Key("PARENT", "SINGLETON", _Key.kind, 1234) + reserve_ids = mock.Mock() + ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) + client._datastore_api_internal = ds_api + assert not complete_key.is_partial + + client.reserve_ids_sequential(complete_key, num_ids) + + reserved_keys = ( + _Key("PARENT", "SINGLETON", _Key.kind, id) + for id in range(complete_key.id, complete_key.id + num_ids) + ) + expected_keys = [key.to_protobuf() for key in reserved_keys] + reserve_ids.assert_called_once_with( + request={"project_id": PROJECT, "keys": expected_keys} + ) + + +def test_client_reserve_ids_sequential_w_partial_key(): + num_ids = 2 + incomplete_key = _Key(_Key.kind, None) + creds = _make_credentials() + client = _make_client(credentials=creds) + with pytest.raises(ValueError): + client.reserve_ids_sequential(incomplete_key, num_ids) + + +def test_client_reserve_ids_sequential_w_wrong_num_ids(): + num_ids = "2" + complete_key = _Key() + creds = _make_credentials() + client = _make_client(credentials=creds) + with pytest.raises(ValueError): client.reserve_ids_sequential(complete_key, num_ids) - reserved_keys = ( - _Key("PARENT", "SINGLETON", _Key.kind, id) - for id in range(complete_key.id, complete_key.id + num_ids) - ) - expected_keys = [key.to_protobuf() for key in reserved_keys] - reserve_ids.assert_called_once_with( - request={"project_id": self.PROJECT, "keys": expected_keys} - ) - def test_reserve_ids_sequential_w_partial_key(self): - num_ids = 2 - incomplete_key = _Key(_Key.kind, None) - creds = _make_credentials() - client = self._make_one(credentials=creds) - with self.assertRaises(ValueError): - client.reserve_ids_sequential(incomplete_key, num_ids) - - def test_reserve_ids_sequential_w_wrong_num_ids(self): - num_ids = "2" - complete_key = _Key() - creds = _make_credentials() - client = self._make_one(credentials=creds) - with self.assertRaises(ValueError): - client.reserve_ids_sequential(complete_key, num_ids) - - def test_reserve_ids_sequential_w_non_numeric_key_name(self): - num_ids = 2 - complete_key = _Key(_Key.kind, "batman") - creds = _make_credentials() - client = self._make_one(credentials=creds) - with self.assertRaises(ValueError): - client.reserve_ids_sequential(complete_key, num_ids) - - def test_reserve_ids_w_completed_key(self): - import warnings - - num_ids = 2 - creds = _make_credentials() - client = self._make_one(credentials=creds, _use_grpc=False) - complete_key = _Key() - reserve_ids = mock.Mock() - ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) - client._datastore_api_internal = ds_api - self.assertTrue(not complete_key.is_partial) +def test_client_reserve_ids_sequential_w_non_numeric_key_name(): + num_ids = 2 + complete_key = _Key(_Key.kind, "batman") + creds = _make_credentials() + client = _make_client(credentials=creds) + with pytest.raises(ValueError): + client.reserve_ids_sequential(complete_key, num_ids) + +def _assert_reserve_ids_warning(warned): + assert len(warned) == 1 + assert "Client.reserve_ids is deprecated." in str(warned[0].message) + + +def test_client_reserve_ids_w_partial_key(): + import warnings + + num_ids = 2 + incomplete_key = _Key(_Key.kind, None) + creds = _make_credentials() + client = _make_client(credentials=creds) + with pytest.raises(ValueError): + with warnings.catch_warnings(record=True) as warned: + client.reserve_ids(incomplete_key, num_ids) + + _assert_reserve_ids_warning(warned) + + +def test_client_reserve_ids_w_wrong_num_ids(): + import warnings + + num_ids = "2" + complete_key = _Key() + creds = _make_credentials() + client = _make_client(credentials=creds) + with pytest.raises(ValueError): + with warnings.catch_warnings(record=True) as warned: + client.reserve_ids(complete_key, num_ids) + + _assert_reserve_ids_warning(warned) + + +def test_client_reserve_ids_w_non_numeric_key_name(): + import warnings + + num_ids = 2 + complete_key = _Key(_Key.kind, "batman") + creds = _make_credentials() + client = _make_client(credentials=creds) + with pytest.raises(ValueError): with warnings.catch_warnings(record=True) as warned: client.reserve_ids(complete_key, num_ids) - reserved_keys = ( - _Key(_Key.kind, id) - for id in range(complete_key.id, complete_key.id + num_ids) + _assert_reserve_ids_warning(warned) + + +def test_client_reserve_ids_w_completed_key(): + import warnings + + num_ids = 2 + creds = _make_credentials() + client = _make_client(credentials=creds, _use_grpc=False) + complete_key = _Key() + reserve_ids = mock.Mock() + ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) + client._datastore_api_internal = ds_api + assert not complete_key.is_partial + + with warnings.catch_warnings(record=True) as warned: + client.reserve_ids(complete_key, num_ids) + + reserved_keys = ( + _Key(_Key.kind, id) for id in range(complete_key.id, complete_key.id + num_ids) + ) + expected_keys = [key.to_protobuf() for key in reserved_keys] + reserve_ids.assert_called_once_with( + request={"project_id": PROJECT, "keys": expected_keys} + ) + _assert_reserve_ids_warning(warned) + + +def test_client_reserve_ids_w_completed_key_w_retry_w_timeout(): + import warnings + + num_ids = 2 + retry = mock.Mock() + timeout = 100000 + + creds = _make_credentials() + client = _make_client(credentials=creds, _use_grpc=False) + complete_key = _Key() + assert not complete_key.is_partial + reserve_ids = mock.Mock() + ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) + client._datastore_api_internal = ds_api + + with warnings.catch_warnings(record=True) as warned: + client.reserve_ids(complete_key, num_ids, retry=retry, timeout=timeout) + + reserved_keys = ( + _Key(_Key.kind, id) for id in range(complete_key.id, complete_key.id + num_ids) + ) + expected_keys = [key.to_protobuf() for key in reserved_keys] + reserve_ids.assert_called_once_with( + request={"project_id": PROJECT, "keys": expected_keys}, + retry=retry, + timeout=timeout, + ) + _assert_reserve_ids_warning(warned) + + +def test_client_reserve_ids_w_completed_key_w_ancestor(): + import warnings + + num_ids = 2 + creds = _make_credentials() + client = _make_client(credentials=creds, _use_grpc=False) + complete_key = _Key("PARENT", "SINGLETON", _Key.kind, 1234) + reserve_ids = mock.Mock() + ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) + client._datastore_api_internal = ds_api + assert not complete_key.is_partial + + with warnings.catch_warnings(record=True) as warned: + client.reserve_ids(complete_key, num_ids) + + reserved_keys = ( + _Key("PARENT", "SINGLETON", _Key.kind, id) + for id in range(complete_key.id, complete_key.id + num_ids) + ) + expected_keys = [key.to_protobuf() for key in reserved_keys] + reserve_ids.assert_called_once_with( + request={"project_id": PROJECT, "keys": expected_keys} + ) + + _assert_reserve_ids_warning(warned) + + +def test_client_key_w_project(): + KIND = "KIND" + ID = 1234 + + creds = _make_credentials() + client = _make_client(credentials=creds) + + with pytest.raises(TypeError): + client.key(KIND, ID, project=PROJECT) + + +def test_client_key_wo_project(): + kind = "KIND" + id_ = 1234 + + creds = _make_credentials() + client = _make_client(credentials=creds) + + patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"]) + with patch as mock_klass: + key = client.key(kind, id_) + assert key is mock_klass.return_value + mock_klass.assert_called_once_with(kind, id_, project=PROJECT, namespace=None) + + +def test_client_key_w_namespace(): + kind = "KIND" + id_ = 1234 + namespace = object() + + creds = _make_credentials() + client = _make_client(namespace=namespace, credentials=creds) + + patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"]) + with patch as mock_klass: + key = client.key(kind, id_) + assert key is mock_klass.return_value + mock_klass.assert_called_once_with( + kind, id_, project=PROJECT, namespace=namespace ) - expected_keys = [key.to_protobuf() for key in reserved_keys] - reserve_ids.assert_called_once_with( - request={"project_id": self.PROJECT, "keys": expected_keys} + + +def test_client_key_w_namespace_collision(): + kind = "KIND" + id_ = 1234 + namespace1 = object() + namespace2 = object() + + creds = _make_credentials() + client = _make_client(namespace=namespace1, credentials=creds) + + patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"]) + with patch as mock_klass: + key = client.key(kind, id_, namespace=namespace2) + assert key is mock_klass.return_value + mock_klass.assert_called_once_with( + kind, id_, project=PROJECT, namespace=namespace2 ) - self.assertEqual(len(warned), 1) - self.assertIn("Client.reserve_ids is deprecated.", str(warned[0].message)) - def test_reserve_ids_w_completed_key_w_retry_w_timeout(self): - import warnings +def test_client_entity_w_defaults(): + creds = _make_credentials() + client = _make_client(credentials=creds) - num_ids = 2 - retry = mock.Mock() - timeout = 100000 + patch = mock.patch("google.cloud.datastore.client.Entity", spec=["__call__"]) + with patch as mock_klass: + entity = client.entity() + assert entity is mock_klass.return_value + mock_klass.assert_called_once_with(key=None, exclude_from_indexes=()) - creds = _make_credentials() - client = self._make_one(credentials=creds, _use_grpc=False) - complete_key = _Key() - self.assertTrue(not complete_key.is_partial) - reserve_ids = mock.Mock() - ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) - client._datastore_api_internal = ds_api - with warnings.catch_warnings(record=True) as warned: - client.reserve_ids(complete_key, num_ids, retry=retry, timeout=timeout) +def test_client_entity_w_explicit(): + key = mock.Mock(spec=[]) + exclude_from_indexes = ["foo", "bar"] + creds = _make_credentials() + client = _make_client(credentials=creds) - reserved_keys = ( - _Key(_Key.kind, id) - for id in range(complete_key.id, complete_key.id + num_ids) - ) - expected_keys = [key.to_protobuf() for key in reserved_keys] - reserve_ids.assert_called_once_with( - request={"project_id": self.PROJECT, "keys": expected_keys}, - retry=retry, - timeout=timeout, + patch = mock.patch("google.cloud.datastore.client.Entity", spec=["__call__"]) + with patch as mock_klass: + entity = client.entity(key, exclude_from_indexes) + assert entity is mock_klass.return_value + mock_klass.assert_called_once_with( + key=key, exclude_from_indexes=exclude_from_indexes ) - self.assertEqual(len(warned), 1) - self.assertIn("Client.reserve_ids is deprecated.", str(warned[0].message)) - def test_reserve_ids_w_completed_key_w_ancestor(self): - import warnings +def test_client_batch(): + creds = _make_credentials() + client = _make_client(credentials=creds) - num_ids = 2 - creds = _make_credentials() - client = self._make_one(credentials=creds, _use_grpc=False) - complete_key = _Key("PARENT", "SINGLETON", _Key.kind, 1234) - reserve_ids = mock.Mock() - ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) - client._datastore_api_internal = ds_api - self.assertTrue(not complete_key.is_partial) + patch = mock.patch("google.cloud.datastore.client.Batch", spec=["__call__"]) + with patch as mock_klass: + batch = client.batch() + assert batch is mock_klass.return_value + mock_klass.assert_called_once_with(client) - with warnings.catch_warnings(record=True) as warned: - client.reserve_ids(complete_key, num_ids) - reserved_keys = ( - _Key("PARENT", "SINGLETON", _Key.kind, id) - for id in range(complete_key.id, complete_key.id + num_ids) +def test_client_transaction_w_defaults(): + creds = _make_credentials() + client = _make_client(credentials=creds) + + patch = mock.patch("google.cloud.datastore.client.Transaction", spec=["__call__"]) + with patch as mock_klass: + xact = client.transaction() + assert xact is mock_klass.return_value + mock_klass.assert_called_once_with(client) + + +def test_client_transaction_w_read_only(): + from google.cloud.datastore_v1.types import TransactionOptions + + creds = _make_credentials() + client = _make_client(credentials=creds) + xact = client.transaction(read_only=True) + options = TransactionOptions(read_only=TransactionOptions.ReadOnly()) + assert xact._options == options + assert not xact._options._pb.HasField("read_write") + assert xact._options._pb.HasField("read_only") + assert xact._options._pb.read_only == TransactionOptions.ReadOnly()._pb + + +def test_client_query_w_other_client(): + KIND = "KIND" + + creds = _make_credentials() + client = _make_client(credentials=creds) + other = _make_client(credentials=_make_credentials()) + + with pytest.raises(TypeError): + client.query(kind=KIND, client=other) + + +def test_client_query_w_project(): + KIND = "KIND" + + creds = _make_credentials() + client = _make_client(credentials=creds) + + with pytest.raises(TypeError): + client.query(kind=KIND, project=PROJECT) + + +def test_client_query_w_defaults(): + creds = _make_credentials() + client = _make_client(credentials=creds) + + patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"]) + with patch as mock_klass: + query = client.query() + assert query is mock_klass.return_value + mock_klass.assert_called_once_with(client, project=PROJECT, namespace=None) + + +def test_client_query_w_explicit(): + kind = "KIND" + namespace = "NAMESPACE" + ancestor = object() + filters = [("PROPERTY", "==", "VALUE")] + projection = ["__key__"] + order = ["PROPERTY"] + distinct_on = ["DISTINCT_ON"] + + creds = _make_credentials() + client = _make_client(credentials=creds) + + patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"]) + with patch as mock_klass: + query = client.query( + kind=kind, + namespace=namespace, + ancestor=ancestor, + filters=filters, + projection=projection, + order=order, + distinct_on=distinct_on, ) - expected_keys = [key.to_protobuf() for key in reserved_keys] - reserve_ids.assert_called_once_with( - request={"project_id": self.PROJECT, "keys": expected_keys} + assert query is mock_klass.return_value + mock_klass.assert_called_once_with( + client, + project=PROJECT, + kind=kind, + namespace=namespace, + ancestor=ancestor, + filters=filters, + projection=projection, + order=order, + distinct_on=distinct_on, ) - self.assertEqual(len(warned), 1) - self.assertIn("Client.reserve_ids is deprecated.", str(warned[0].message)) - - def test_reserve_ids_w_partial_key(self): - import warnings - - num_ids = 2 - incomplete_key = _Key(_Key.kind, None) - creds = _make_credentials() - client = self._make_one(credentials=creds) - with self.assertRaises(ValueError): - with warnings.catch_warnings(record=True) as warned: - client.reserve_ids(incomplete_key, num_ids) - - self.assertEqual(len(warned), 1) - self.assertIn("Client.reserve_ids is deprecated.", str(warned[0].message)) - - def test_reserve_ids_w_wrong_num_ids(self): - import warnings - - num_ids = "2" - complete_key = _Key() - creds = _make_credentials() - client = self._make_one(credentials=creds) - with self.assertRaises(ValueError): - with warnings.catch_warnings(record=True) as warned: - client.reserve_ids(complete_key, num_ids) - - self.assertEqual(len(warned), 1) - self.assertIn("Client.reserve_ids is deprecated.", str(warned[0].message)) - - def test_reserve_ids_w_non_numeric_key_name(self): - import warnings - - num_ids = 2 - complete_key = _Key(_Key.kind, "batman") - creds = _make_credentials() - client = self._make_one(credentials=creds) - with self.assertRaises(ValueError): - with warnings.catch_warnings(record=True) as warned: - client.reserve_ids(complete_key, num_ids) - - self.assertEqual(len(warned), 1) - self.assertIn("Client.reserve_ids is deprecated.", str(warned[0].message)) - - def test_reserve_ids_multi(self): - creds = _make_credentials() - client = self._make_one(credentials=creds, _use_grpc=False) - key1 = _Key(_Key.kind, "one") - key2 = _Key(_Key.kind, "two") - reserve_ids = mock.Mock() - ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) - client._datastore_api_internal = ds_api - - client.reserve_ids_multi([key1, key2]) - - expected_keys = [key1.to_protobuf(), key2.to_protobuf()] - reserve_ids.assert_called_once_with( - request={"project_id": self.PROJECT, "keys": expected_keys} - ) - def test_reserve_ids_multi_w_partial_key(self): - incomplete_key = _Key(_Key.kind, None) - creds = _make_credentials() - client = self._make_one(credentials=creds) - with self.assertRaises(ValueError): - client.reserve_ids_multi([incomplete_key]) - - def test_key_w_project(self): - KIND = "KIND" - ID = 1234 - - creds = _make_credentials() - client = self._make_one(credentials=creds) - - self.assertRaises(TypeError, client.key, KIND, ID, project=self.PROJECT) - - def test_key_wo_project(self): - kind = "KIND" - id_ = 1234 - - creds = _make_credentials() - client = self._make_one(credentials=creds) - - patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"]) - with patch as mock_klass: - key = client.key(kind, id_) - self.assertIs(key, mock_klass.return_value) - mock_klass.assert_called_once_with( - kind, id_, project=self.PROJECT, namespace=None - ) - - def test_key_w_namespace(self): - kind = "KIND" - id_ = 1234 - namespace = object() - - creds = _make_credentials() - client = self._make_one(namespace=namespace, credentials=creds) - - patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"]) - with patch as mock_klass: - key = client.key(kind, id_) - self.assertIs(key, mock_klass.return_value) - mock_klass.assert_called_once_with( - kind, id_, project=self.PROJECT, namespace=namespace - ) - - def test_key_w_namespace_collision(self): - kind = "KIND" - id_ = 1234 - namespace1 = object() - namespace2 = object() - - creds = _make_credentials() - client = self._make_one(namespace=namespace1, credentials=creds) - - patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"]) - with patch as mock_klass: - key = client.key(kind, id_, namespace=namespace2) - self.assertIs(key, mock_klass.return_value) - mock_klass.assert_called_once_with( - kind, id_, project=self.PROJECT, namespace=namespace2 - ) - - def test_entity_w_defaults(self): - creds = _make_credentials() - client = self._make_one(credentials=creds) - - patch = mock.patch("google.cloud.datastore.client.Entity", spec=["__call__"]) - with patch as mock_klass: - entity = client.entity() - self.assertIs(entity, mock_klass.return_value) - mock_klass.assert_called_once_with(key=None, exclude_from_indexes=()) - - def test_entity_w_explicit(self): - key = mock.Mock(spec=[]) - exclude_from_indexes = ["foo", "bar"] - creds = _make_credentials() - client = self._make_one(credentials=creds) - - patch = mock.patch("google.cloud.datastore.client.Entity", spec=["__call__"]) - with patch as mock_klass: - entity = client.entity(key, exclude_from_indexes) - self.assertIs(entity, mock_klass.return_value) - mock_klass.assert_called_once_with( - key=key, exclude_from_indexes=exclude_from_indexes - ) - - def test_batch(self): - creds = _make_credentials() - client = self._make_one(credentials=creds) - - patch = mock.patch("google.cloud.datastore.client.Batch", spec=["__call__"]) - with patch as mock_klass: - batch = client.batch() - self.assertIs(batch, mock_klass.return_value) - mock_klass.assert_called_once_with(client) - - def test_transaction_defaults(self): - creds = _make_credentials() - client = self._make_one(credentials=creds) - - patch = mock.patch( - "google.cloud.datastore.client.Transaction", spec=["__call__"] +def test_client_query_w_namespace(): + kind = "KIND" + namespace = object() + + creds = _make_credentials() + client = _make_client(namespace=namespace, credentials=creds) + + patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"]) + with patch as mock_klass: + query = client.query(kind=kind) + assert query is mock_klass.return_value + mock_klass.assert_called_once_with( + client, project=PROJECT, namespace=namespace, kind=kind ) - with patch as mock_klass: - xact = client.transaction() - self.assertIs(xact, mock_klass.return_value) - mock_klass.assert_called_once_with(client) - - def test_read_only_transaction_defaults(self): - from google.cloud.datastore_v1.types import TransactionOptions - - creds = _make_credentials() - client = self._make_one(credentials=creds) - xact = client.transaction(read_only=True) - self.assertEqual( - xact._options, TransactionOptions(read_only=TransactionOptions.ReadOnly()) + + +def test_client_query_w_namespace_collision(): + kind = "KIND" + namespace1 = object() + namespace2 = object() + + creds = _make_credentials() + client = _make_client(namespace=namespace1, credentials=creds) + + patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"]) + with patch as mock_klass: + query = client.query(kind=kind, namespace=namespace2) + assert query is mock_klass.return_value + mock_klass.assert_called_once_with( + client, project=PROJECT, namespace=namespace2, kind=kind ) - self.assertFalse(xact._options._pb.HasField("read_write")) - self.assertTrue(xact._options._pb.HasField("read_only")) - self.assertEqual(xact._options._pb.read_only, TransactionOptions.ReadOnly()._pb) - - def test_query_w_client(self): - KIND = "KIND" - - creds = _make_credentials() - client = self._make_one(credentials=creds) - other = self._make_one(credentials=_make_credentials()) - - self.assertRaises(TypeError, client.query, kind=KIND, client=other) - - def test_query_w_project(self): - KIND = "KIND" - - creds = _make_credentials() - client = self._make_one(credentials=creds) - - self.assertRaises(TypeError, client.query, kind=KIND, project=self.PROJECT) - - def test_query_w_defaults(self): - creds = _make_credentials() - client = self._make_one(credentials=creds) - - patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"]) - with patch as mock_klass: - query = client.query() - self.assertIs(query, mock_klass.return_value) - mock_klass.assert_called_once_with( - client, project=self.PROJECT, namespace=None - ) - - def test_query_explicit(self): - kind = "KIND" - namespace = "NAMESPACE" - ancestor = object() - filters = [("PROPERTY", "==", "VALUE")] - projection = ["__key__"] - order = ["PROPERTY"] - distinct_on = ["DISTINCT_ON"] - - creds = _make_credentials() - client = self._make_one(credentials=creds) - - patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"]) - with patch as mock_klass: - query = client.query( - kind=kind, - namespace=namespace, - ancestor=ancestor, - filters=filters, - projection=projection, - order=order, - distinct_on=distinct_on, - ) - self.assertIs(query, mock_klass.return_value) - mock_klass.assert_called_once_with( - client, - project=self.PROJECT, - kind=kind, - namespace=namespace, - ancestor=ancestor, - filters=filters, - projection=projection, - order=order, - distinct_on=distinct_on, - ) - - def test_query_w_namespace(self): - kind = "KIND" - namespace = object() - - creds = _make_credentials() - client = self._make_one(namespace=namespace, credentials=creds) - - patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"]) - with patch as mock_klass: - query = client.query(kind=kind) - self.assertIs(query, mock_klass.return_value) - mock_klass.assert_called_once_with( - client, project=self.PROJECT, namespace=namespace, kind=kind - ) - - def test_query_w_namespace_collision(self): - kind = "KIND" - namespace1 = object() - namespace2 = object() - - creds = _make_credentials() - client = self._make_one(namespace=namespace1, credentials=creds) - - patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"]) - with patch as mock_klass: - query = client.query(kind=kind, namespace=namespace2) - self.assertIs(query, mock_klass.return_value) - mock_klass.assert_called_once_with( - client, project=self.PROJECT, namespace=namespace2, kind=kind - ) + + +def test_client_reserve_ids_multi_w_partial_key(): + incomplete_key = _Key(_Key.kind, None) + creds = _make_credentials() + client = _make_client(credentials=creds) + with pytest.raises(ValueError): + client.reserve_ids_multi([incomplete_key]) + + +def test_client_reserve_ids_multi(): + creds = _make_credentials() + client = _make_client(credentials=creds, _use_grpc=False) + key1 = _Key(_Key.kind, "one") + key2 = _Key(_Key.kind, "two") + reserve_ids = mock.Mock() + ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) + client._datastore_api_internal = ds_api + + client.reserve_ids_multi([key1, key2]) + + expected_keys = [key1.to_protobuf(), key2.to_protobuf()] + reserve_ids.assert_called_once_with( + request={"project_id": PROJECT, "keys": expected_keys} + ) class _NoCommitBatch(object): @@ -1535,7 +1529,7 @@ class _Key(object): kind = "KIND" id = 1234 name = None - _project = project = "PROJECT" + _project = project = PROJECT _namespace = None _key = "KEY" @@ -1603,18 +1597,13 @@ def __init__(self, id_): self.path = [_PathElementPB(id_)] -def _assert_num_mutations(test_case, mutation_pb_list, num_mutations): - test_case.assertEqual(len(mutation_pb_list), num_mutations) - - -def _mutated_pb(test_case, mutation_pb_list, mutation_type): - # Make sure there is only one mutation. - _assert_num_mutations(test_case, mutation_pb_list, 1) +def _mutated_pb(mutation_pb_list, mutation_type): + assert len(mutation_pb_list) == 1 # We grab the only mutation. mutated_pb = mutation_pb_list[0] # Then check if it is the correct type. - test_case.assertEqual(mutated_pb._pb.WhichOneof("operation"), mutation_type) + assert mutated_pb._pb.WhichOneof("operation") == mutation_type return getattr(mutated_pb, mutation_type) @@ -1657,3 +1646,25 @@ def _make_datastore_api(*keys, **kwargs): return mock.Mock( commit=commit_method, lookup=lookup_method, spec=["commit", "lookup"] ) + + +def _make_credentials(): + import google.auth.credentials + + return mock.Mock(spec=google.auth.credentials.Credentials) + + +def _make_entity_pb(project, kind, integer_id, name=None, str_val=None): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _new_value_pb + + entity_pb = entity_pb2.Entity() + entity_pb.key.partition_id.project_id = project + path_element = entity_pb._pb.key.path.add() + path_element.kind = kind + path_element.id = integer_id + if name is not None and str_val is not None: + value_pb = _new_value_pb(entity_pb, name) + value_pb.string_value = str_val + + return entity_pb diff --git a/packages/google-cloud-datastore/tests/unit/test_entity.py b/packages/google-cloud-datastore/tests/unit/test_entity.py index c65541a45854..faa862e47f35 100644 --- a/packages/google-cloud-datastore/tests/unit/test_entity.py +++ b/packages/google-cloud-datastore/tests/unit/test_entity.py @@ -12,214 +12,222 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest +import pytest _PROJECT = "PROJECT" _KIND = "KIND" _ID = 1234 -class TestEntity(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.datastore.entity import Entity - - return Entity - - def _make_one(self, key=None, exclude_from_indexes=()): - klass = self._get_target_class() - return klass(key=key, exclude_from_indexes=exclude_from_indexes) - - def test_ctor_defaults(self): - klass = self._get_target_class() - entity = klass() - self.assertIsNone(entity.key) - self.assertIsNone(entity.kind) - self.assertEqual(sorted(entity.exclude_from_indexes), []) - - def test_ctor_explicit(self): - _EXCLUDE_FROM_INDEXES = ["foo", "bar"] - key = _Key() - entity = self._make_one(key=key, exclude_from_indexes=_EXCLUDE_FROM_INDEXES) - self.assertEqual( - sorted(entity.exclude_from_indexes), sorted(_EXCLUDE_FROM_INDEXES) - ) - - def test_ctor_bad_exclude_from_indexes(self): - BAD_EXCLUDE_FROM_INDEXES = object() - key = _Key() - self.assertRaises( - TypeError, - self._make_one, - key=key, - exclude_from_indexes=BAD_EXCLUDE_FROM_INDEXES, - ) - - def test___eq_____ne___w_non_entity(self): - from google.cloud.datastore.key import Key - - key = Key(_KIND, _ID, project=_PROJECT) - entity = self._make_one(key=key) - self.assertFalse(entity == object()) - self.assertTrue(entity != object()) - - def test___eq_____ne___w_different_keys(self): - from google.cloud.datastore.key import Key - - _ID1 = 1234 - _ID2 = 2345 - key1 = Key(_KIND, _ID1, project=_PROJECT) - entity1 = self._make_one(key=key1) - key2 = Key(_KIND, _ID2, project=_PROJECT) - entity2 = self._make_one(key=key2) - self.assertFalse(entity1 == entity2) - self.assertTrue(entity1 != entity2) - - def test___eq_____ne___w_same_keys(self): - from google.cloud.datastore.key import Key - - name = "foo" - value = 42 - meaning = 9 - - key1 = Key(_KIND, _ID, project=_PROJECT) - entity1 = self._make_one(key=key1, exclude_from_indexes=(name,)) - entity1[name] = value - entity1._meanings[name] = (meaning, value) - - key2 = Key(_KIND, _ID, project=_PROJECT) - entity2 = self._make_one(key=key2, exclude_from_indexes=(name,)) - entity2[name] = value - entity2._meanings[name] = (meaning, value) - - self.assertTrue(entity1 == entity2) - self.assertFalse(entity1 != entity2) - - def test___eq_____ne___w_same_keys_different_props(self): - from google.cloud.datastore.key import Key - - key1 = Key(_KIND, _ID, project=_PROJECT) - entity1 = self._make_one(key=key1) - entity1["foo"] = "Foo" - key2 = Key(_KIND, _ID, project=_PROJECT) - entity2 = self._make_one(key=key2) - entity1["bar"] = "Bar" - self.assertFalse(entity1 == entity2) - self.assertTrue(entity1 != entity2) - - def test___eq_____ne___w_same_keys_props_w_equiv_keys_as_value(self): - from google.cloud.datastore.key import Key - - key1 = Key(_KIND, _ID, project=_PROJECT) - key2 = Key(_KIND, _ID, project=_PROJECT) - entity1 = self._make_one(key=key1) - entity1["some_key"] = key1 - entity2 = self._make_one(key=key1) - entity2["some_key"] = key2 - self.assertTrue(entity1 == entity2) - self.assertFalse(entity1 != entity2) - - def test___eq_____ne___w_same_keys_props_w_diff_keys_as_value(self): - from google.cloud.datastore.key import Key - - _ID1 = 1234 - _ID2 = 2345 - key1 = Key(_KIND, _ID1, project=_PROJECT) - key2 = Key(_KIND, _ID2, project=_PROJECT) - entity1 = self._make_one(key=key1) - entity1["some_key"] = key1 - entity2 = self._make_one(key=key1) - entity2["some_key"] = key2 - self.assertFalse(entity1 == entity2) - self.assertTrue(entity1 != entity2) - - def test___eq_____ne___w_same_keys_props_w_equiv_entities_as_value(self): - from google.cloud.datastore.key import Key - - key = Key(_KIND, _ID, project=_PROJECT) - entity1 = self._make_one(key=key) - sub1 = self._make_one() - sub1.update({"foo": "Foo"}) - entity1["some_entity"] = sub1 - entity2 = self._make_one(key=key) - sub2 = self._make_one() - sub2.update({"foo": "Foo"}) - entity2["some_entity"] = sub2 - self.assertTrue(entity1 == entity2) - self.assertFalse(entity1 != entity2) - - def test___eq_____ne___w_same_keys_props_w_diff_entities_as_value(self): - from google.cloud.datastore.key import Key - - key = Key(_KIND, _ID, project=_PROJECT) - entity1 = self._make_one(key=key) - sub1 = self._make_one() - sub1.update({"foo": "Foo"}) - entity1["some_entity"] = sub1 - entity2 = self._make_one(key=key) - sub2 = self._make_one() - sub2.update({"foo": "Bar"}) - entity2["some_entity"] = sub2 - self.assertFalse(entity1 == entity2) - self.assertTrue(entity1 != entity2) - - def test__eq__same_value_different_exclude(self): - from google.cloud.datastore.key import Key - - name = "foo" - value = 42 - key = Key(_KIND, _ID, project=_PROJECT) - - entity1 = self._make_one(key=key, exclude_from_indexes=(name,)) - entity1[name] = value - - entity2 = self._make_one(key=key, exclude_from_indexes=()) - entity2[name] = value - - self.assertFalse(entity1 == entity2) - - def test__eq__same_value_different_meanings(self): - from google.cloud.datastore.key import Key - - name = "foo" - value = 42 - meaning = 9 - key = Key(_KIND, _ID, project=_PROJECT) - - entity1 = self._make_one(key=key, exclude_from_indexes=(name,)) - entity1[name] = value - - entity2 = self._make_one(key=key, exclude_from_indexes=(name,)) - entity2[name] = value - entity2._meanings[name] = (meaning, value) - - self.assertFalse(entity1 == entity2) - - def test_id(self): - from google.cloud.datastore.key import Key - - key = Key(_KIND, _ID, project=_PROJECT) - entity = self._make_one(key=key) - self.assertEqual(entity.id, _ID) - - def test_id_none(self): - - entity = self._make_one(key=None) - self.assertEqual(entity.id, None) - - def test___repr___no_key_empty(self): - entity = self._make_one() - self.assertEqual(repr(entity), "") - - def test___repr___w_key_non_empty(self): - key = _Key() - flat_path = ("bar", 12, "baz", "himom") - key._flat_path = flat_path - entity = self._make_one(key=key) - entity_vals = {"foo": "Foo"} - entity.update(entity_vals) - expected = "" % (flat_path, entity_vals) - self.assertEqual(repr(entity), expected) +def _make_entity(key=None, exclude_from_indexes=()): + from google.cloud.datastore.entity import Entity + + return Entity(key=key, exclude_from_indexes=exclude_from_indexes) + + +def test_entity_ctor_defaults(): + from google.cloud.datastore.entity import Entity + + entity = Entity() + assert entity.key is None + assert entity.kind is None + assert sorted(entity.exclude_from_indexes) == [] + + +def test_entity_ctor_explicit(): + _EXCLUDE_FROM_INDEXES = ["foo", "bar"] + key = _Key() + entity = _make_entity(key=key, exclude_from_indexes=_EXCLUDE_FROM_INDEXES) + assert sorted(entity.exclude_from_indexes) == sorted(_EXCLUDE_FROM_INDEXES) + + +def test_entity_ctor_bad_exclude_from_indexes(): + BAD_EXCLUDE_FROM_INDEXES = object() + key = _Key() + with pytest.raises(TypeError): + _make_entity(key=key, exclude_from_indexes=BAD_EXCLUDE_FROM_INDEXES) + + +def test_entity___eq_____ne___w_non_entity(): + from google.cloud.datastore.key import Key + + key = Key(_KIND, _ID, project=_PROJECT) + entity = _make_entity(key=key) + assert not entity == object() + assert entity != object() + + +def test_entity___eq_____ne___w_different_keys(): + from google.cloud.datastore.key import Key + + _ID1 = 1234 + _ID2 = 2345 + key1 = Key(_KIND, _ID1, project=_PROJECT) + entity1 = _make_entity(key=key1) + key2 = Key(_KIND, _ID2, project=_PROJECT) + entity2 = _make_entity(key=key2) + assert not entity1 == entity2 + assert entity1 != entity2 + + +def test_entity___eq_____ne___w_same_keys(): + from google.cloud.datastore.key import Key + + name = "foo" + value = 42 + meaning = 9 + + key1 = Key(_KIND, _ID, project=_PROJECT) + entity1 = _make_entity(key=key1, exclude_from_indexes=(name,)) + entity1[name] = value + entity1._meanings[name] = (meaning, value) + + key2 = Key(_KIND, _ID, project=_PROJECT) + entity2 = _make_entity(key=key2, exclude_from_indexes=(name,)) + entity2[name] = value + entity2._meanings[name] = (meaning, value) + + assert entity1 == entity2 + assert not entity1 != entity2 + + +def test_entity___eq_____ne___w_same_keys_different_props(): + from google.cloud.datastore.key import Key + + key1 = Key(_KIND, _ID, project=_PROJECT) + entity1 = _make_entity(key=key1) + entity1["foo"] = "Foo" + key2 = Key(_KIND, _ID, project=_PROJECT) + entity2 = _make_entity(key=key2) + entity1["bar"] = "Bar" + assert not entity1 == entity2 + assert entity1 != entity2 + + +def test_entity___eq_____ne___w_same_keys_props_w_equiv_keys_as_value(): + from google.cloud.datastore.key import Key + + key1 = Key(_KIND, _ID, project=_PROJECT) + key2 = Key(_KIND, _ID, project=_PROJECT) + entity1 = _make_entity(key=key1) + entity1["some_key"] = key1 + entity2 = _make_entity(key=key1) + entity2["some_key"] = key2 + assert entity1 == entity2 + assert not entity1 != entity2 + + +def test_entity___eq_____ne___w_same_keys_props_w_diff_keys_as_value(): + from google.cloud.datastore.key import Key + + _ID1 = 1234 + _ID2 = 2345 + key1 = Key(_KIND, _ID1, project=_PROJECT) + key2 = Key(_KIND, _ID2, project=_PROJECT) + entity1 = _make_entity(key=key1) + entity1["some_key"] = key1 + entity2 = _make_entity(key=key1) + entity2["some_key"] = key2 + assert not entity1 == entity2 + assert entity1 != entity2 + + +def test_entity___eq_____ne___w_same_keys_props_w_equiv_entities_as_value(): + from google.cloud.datastore.key import Key + + key = Key(_KIND, _ID, project=_PROJECT) + entity1 = _make_entity(key=key) + sub1 = _make_entity() + sub1.update({"foo": "Foo"}) + entity1["some_entity"] = sub1 + entity2 = _make_entity(key=key) + sub2 = _make_entity() + sub2.update({"foo": "Foo"}) + entity2["some_entity"] = sub2 + assert entity1 == entity2 + assert not entity1 != entity2 + + +def test_entity___eq_____ne___w_same_keys_props_w_diff_entities_as_value(): + from google.cloud.datastore.key import Key + + key = Key(_KIND, _ID, project=_PROJECT) + entity1 = _make_entity(key=key) + sub1 = _make_entity() + sub1.update({"foo": "Foo"}) + entity1["some_entity"] = sub1 + entity2 = _make_entity(key=key) + sub2 = _make_entity() + sub2.update({"foo": "Bar"}) + entity2["some_entity"] = sub2 + assert not entity1 == entity2 + assert entity1 != entity2 + + +def test__eq__same_value_different_exclude(): + from google.cloud.datastore.key import Key + + name = "foo" + value = 42 + key = Key(_KIND, _ID, project=_PROJECT) + + entity1 = _make_entity(key=key, exclude_from_indexes=(name,)) + entity1[name] = value + + entity2 = _make_entity(key=key, exclude_from_indexes=()) + entity2[name] = value + + assert not entity1 == entity2 + assert entity1 != entity2 + + +def test_entity___eq__same_value_different_meanings(): + from google.cloud.datastore.key import Key + + name = "foo" + value = 42 + meaning = 9 + key = Key(_KIND, _ID, project=_PROJECT) + + entity1 = _make_entity(key=key, exclude_from_indexes=(name,)) + entity1[name] = value + + entity2 = _make_entity(key=key, exclude_from_indexes=(name,)) + entity2[name] = value + entity2._meanings[name] = (meaning, value) + + assert not entity1 == entity2 + assert entity1 != entity2 + + +def test_id(): + from google.cloud.datastore.key import Key + + key = Key(_KIND, _ID, project=_PROJECT) + entity = _make_entity(key=key) + assert entity.id == _ID + + +def test_id_none(): + + entity = _make_entity(key=None) + assert entity.id is None + + +def test___repr___no_key_empty(): + entity = _make_entity() + assert repr(entity) == "" + + +def test___repr___w_key_non_empty(): + key = _Key() + flat_path = ("bar", 12, "baz", "himom") + key._flat_path = flat_path + entity = _make_entity(key=key) + entity_vals = {"foo": "Foo"} + entity.update(entity_vals) + expected = "" % (flat_path, entity_vals) + assert repr(entity) == expected class _Key(object): diff --git a/packages/google-cloud-datastore/tests/unit/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py index c37499caa029..4c1861a2537f 100644 --- a/packages/google-cloud-datastore/tests/unit/test_helpers.py +++ b/packages/google-cloud-datastore/tests/unit/test_helpers.py @@ -12,1010 +12,1123 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest +import pytest -class Test__new_value_pb(unittest.TestCase): - def _call_fut(self, entity_pb, name): - from google.cloud.datastore.helpers import _new_value_pb +def test__new_value_pb(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _new_value_pb - return _new_value_pb(entity_pb, name) + entity_pb = entity_pb2.Entity() + name = "foo" + result = _new_value_pb(entity_pb, name) - def test_it(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 + assert isinstance(result, type(entity_pb2.Value()._pb)) + assert len(entity_pb._pb.properties) == 1 + assert entity_pb._pb.properties[name] == result - entity_pb = entity_pb2.Entity() - name = "foo" - result = self._call_fut(entity_pb, name) - self.assertIsInstance(result, type(entity_pb2.Value()._pb)) - self.assertEqual(len(entity_pb._pb.properties), 1) - self.assertEqual(entity_pb._pb.properties[name], result) +def test_entity_from_protobuf_w_defaults(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _new_value_pb + from google.cloud.datastore.helpers import entity_from_protobuf + _PROJECT = "PROJECT" + _KIND = "KIND" + _ID = 1234 + entity_pb = entity_pb2.Entity() + entity_pb.key.partition_id.project_id = _PROJECT + entity_pb._pb.key.path.add(kind=_KIND, id=_ID) -class Test_entity_from_protobuf(unittest.TestCase): - def _call_fut(self, val): - from google.cloud.datastore.helpers import entity_from_protobuf + value_pb = _new_value_pb(entity_pb, "foo") + value_pb.string_value = "Foo" - return entity_from_protobuf(val) + unindexed_val_pb = _new_value_pb(entity_pb, "bar") + unindexed_val_pb.integer_value = 10 + unindexed_val_pb.exclude_from_indexes = True - def test_it(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.helpers import _new_value_pb + array_val_pb1 = _new_value_pb(entity_pb, "baz") + array_pb1 = array_val_pb1.array_value.values - _PROJECT = "PROJECT" - _KIND = "KIND" - _ID = 1234 - entity_pb = entity_pb2.Entity() - entity_pb.key.partition_id.project_id = _PROJECT - entity_pb._pb.key.path.add(kind=_KIND, id=_ID) + unindexed_array_val_pb = array_pb1.add() + unindexed_array_val_pb.integer_value = 11 + unindexed_array_val_pb.exclude_from_indexes = True - value_pb = _new_value_pb(entity_pb, "foo") - value_pb.string_value = "Foo" + array_val_pb2 = _new_value_pb(entity_pb, "qux") + array_pb2 = array_val_pb2.array_value.values - unindexed_val_pb = _new_value_pb(entity_pb, "bar") - unindexed_val_pb.integer_value = 10 - unindexed_val_pb.exclude_from_indexes = True + indexed_array_val_pb = array_pb2.add() + indexed_array_val_pb.integer_value = 12 - array_val_pb1 = _new_value_pb(entity_pb, "baz") - array_pb1 = array_val_pb1.array_value.values + entity = entity_from_protobuf(entity_pb._pb) + assert entity.kind == _KIND + assert entity.exclude_from_indexes == frozenset(["bar", "baz"]) + entity_props = dict(entity) + assert entity_props == {"foo": "Foo", "bar": 10, "baz": [11], "qux": [12]} - unindexed_array_val_pb = array_pb1.add() - unindexed_array_val_pb.integer_value = 11 - unindexed_array_val_pb.exclude_from_indexes = True + # Also check the key. + key = entity.key + assert key.project == _PROJECT + assert key.namespace is None + assert key.kind == _KIND + assert key.id == _ID - array_val_pb2 = _new_value_pb(entity_pb, "qux") - array_pb2 = array_val_pb2.array_value.values - indexed_array_val_pb = array_pb2.add() - indexed_array_val_pb.integer_value = 12 +def test_entity_from_protobuf_w_mismatched_value_indexed(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _new_value_pb + from google.cloud.datastore.helpers import entity_from_protobuf - entity = self._call_fut(entity_pb._pb) - self.assertEqual(entity.kind, _KIND) - self.assertEqual(entity.exclude_from_indexes, frozenset(["bar", "baz"])) - entity_props = dict(entity) - self.assertEqual( - entity_props, {"foo": "Foo", "bar": 10, "baz": [11], "qux": [12]} - ) + _PROJECT = "PROJECT" + _KIND = "KIND" + _ID = 1234 + entity_pb = entity_pb2.Entity() + entity_pb.key.partition_id.project_id = _PROJECT + entity_pb._pb.key.path.add(kind=_KIND, id=_ID) - # Also check the key. - key = entity.key - self.assertEqual(key.project, _PROJECT) - self.assertIsNone(key.namespace) - self.assertEqual(key.kind, _KIND) - self.assertEqual(key.id, _ID) + array_val_pb = _new_value_pb(entity_pb, "baz") + array_pb = array_val_pb.array_value.values - def test_mismatched_value_indexed(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.helpers import _new_value_pb + unindexed_value_pb1 = array_pb.add() + unindexed_value_pb1.integer_value = 10 + unindexed_value_pb1.exclude_from_indexes = True - _PROJECT = "PROJECT" - _KIND = "KIND" - _ID = 1234 - entity_pb = entity_pb2.Entity() - entity_pb.key.partition_id.project_id = _PROJECT - entity_pb._pb.key.path.add(kind=_KIND, id=_ID) + unindexed_value_pb2 = array_pb.add() + unindexed_value_pb2.integer_value = 11 - array_val_pb = _new_value_pb(entity_pb, "baz") - array_pb = array_val_pb.array_value.values + with pytest.raises(ValueError): + entity_from_protobuf(entity_pb._pb) - unindexed_value_pb1 = array_pb.add() - unindexed_value_pb1.integer_value = 10 - unindexed_value_pb1.exclude_from_indexes = True - unindexed_value_pb2 = array_pb.add() - unindexed_value_pb2.integer_value = 11 +def test_entity_from_protobuf_w_entity_no_key(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import entity_from_protobuf - with self.assertRaises(ValueError): - self._call_fut(entity_pb._pb) + entity_pb = entity_pb2.Entity() + entity = entity_from_protobuf(entity_pb._pb) - def test_entity_no_key(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 + assert entity.key is None + assert dict(entity) == {} - entity_pb = entity_pb2.Entity() - entity = self._call_fut(entity_pb._pb) - self.assertIsNone(entity.key) - self.assertEqual(dict(entity), {}) +def test_entity_from_protobuf_w_pb2_entity_no_key(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import entity_from_protobuf - def test_pb2_entity_no_key(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 + entity_pb = entity_pb2.Entity() + entity = entity_from_protobuf(entity_pb) - entity_pb = entity_pb2.Entity() - entity = self._call_fut(entity_pb) - - self.assertIsNone(entity.key) - self.assertEqual(dict(entity), {}) - - def test_entity_with_meaning(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.helpers import _new_value_pb - - entity_pb = entity_pb2.Entity() - name = "hello" - value_pb = _new_value_pb(entity_pb, name) - value_pb.meaning = meaning = 9 - value_pb.string_value = val = u"something" - - entity = self._call_fut(entity_pb) - self.assertIsNone(entity.key) - self.assertEqual(dict(entity), {name: val}) - self.assertEqual(entity._meanings, {name: (meaning, val)}) - - def test_nested_entity_no_key(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.helpers import _new_value_pb - - PROJECT = "FOO" - KIND = "KIND" - INSIDE_NAME = "IFOO" - OUTSIDE_NAME = "OBAR" - INSIDE_VALUE = 1337 - - entity_inside = entity_pb2.Entity() - inside_val_pb = _new_value_pb(entity_inside, INSIDE_NAME) - inside_val_pb.integer_value = INSIDE_VALUE - - entity_pb = entity_pb2.Entity() - entity_pb.key.partition_id.project_id = PROJECT - element = entity_pb._pb.key.path.add() - element.kind = KIND - - outside_val_pb = _new_value_pb(entity_pb, OUTSIDE_NAME) - outside_val_pb.entity_value.CopyFrom(entity_inside._pb) - - entity = self._call_fut(entity_pb._pb) - self.assertEqual(entity.key.project, PROJECT) - self.assertEqual(entity.key.flat_path, (KIND,)) - self.assertEqual(len(entity), 1) - - inside_entity = entity[OUTSIDE_NAME] - self.assertIsNone(inside_entity.key) - self.assertEqual(len(inside_entity), 1) - self.assertEqual(inside_entity[INSIDE_NAME], INSIDE_VALUE) - - def test_index_mismatch_ignores_empty_list(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - - _PROJECT = "PROJECT" - _KIND = "KIND" - _ID = 1234 - - array_val_pb = entity_pb2.Value(array_value=entity_pb2.ArrayValue(values=[])) - - entity_pb = entity_pb2.Entity(properties={"baz": array_val_pb}) - entity_pb.key.partition_id.project_id = _PROJECT - entity_pb.key._pb.path.add(kind=_KIND, id=_ID) - - entity = self._call_fut(entity_pb._pb) - entity_dict = dict(entity) - self.assertEqual(entity_dict["baz"], []) - - -class Test_entity_to_protobuf(unittest.TestCase): - def _call_fut(self, entity): - from google.cloud.datastore.helpers import entity_to_protobuf - - return entity_to_protobuf(entity) - - def _compare_entity_proto(self, entity_pb1, entity_pb2): - self.assertEqual(entity_pb1.key, entity_pb2.key) - value_list1 = sorted(entity_pb1.properties.items()) - value_list2 = sorted(entity_pb2.properties.items()) - self.assertEqual(len(value_list1), len(value_list2)) - for pair1, pair2 in zip(value_list1, value_list2): - name1, val1 = pair1 - name2, val2 = pair2 - self.assertEqual(name1, name2) - if val1._pb.HasField("entity_value"): # Message field (Entity) - self.assertEqual(val1.meaning, val2.meaning) - self._compare_entity_proto(val1.entity_value, val2.entity_value) - else: - self.assertEqual(val1, val2) - - def test_empty(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.entity import Entity - - entity = Entity() - entity_pb = self._call_fut(entity) - self._compare_entity_proto(entity_pb, entity_pb2.Entity()) - - def test_key_only(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.entity import Entity - from google.cloud.datastore.key import Key - - kind, name = "PATH", "NAME" - project = "PROJECT" - key = Key(kind, name, project=project) - entity = Entity(key=key) - entity_pb = self._call_fut(entity) - - expected_pb = entity_pb2.Entity() - expected_pb.key.partition_id.project_id = project - path_elt = expected_pb._pb.key.path.add() - path_elt.kind = kind - path_elt.name = name - - self._compare_entity_proto(entity_pb, expected_pb) - - def test_simple_fields(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.entity import Entity - from google.cloud.datastore.helpers import _new_value_pb - - entity = Entity() - name1 = "foo" - entity[name1] = value1 = 42 - name2 = "bar" - entity[name2] = value2 = u"some-string" - entity_pb = self._call_fut(entity) - - expected_pb = entity_pb2.Entity() - val_pb1 = _new_value_pb(expected_pb, name1) - val_pb1.integer_value = value1 - val_pb2 = _new_value_pb(expected_pb, name2) - val_pb2.string_value = value2 - - self._compare_entity_proto(entity_pb, expected_pb) - - def test_with_empty_list(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.entity import Entity - - entity = Entity() - entity["foo"] = [] - entity_pb = self._call_fut(entity) - - expected_pb = entity_pb2.Entity() - prop = expected_pb._pb.properties.get_or_create("foo") - prop.array_value.CopyFrom(entity_pb2.ArrayValue(values=[])._pb) - - self._compare_entity_proto(entity_pb, expected_pb) - - def test_inverts_to_protobuf(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.helpers import _new_value_pb - from google.cloud.datastore.helpers import entity_from_protobuf - - original_pb = entity_pb2.Entity() - # Add a key. - original_pb.key.partition_id.project_id = project = "PROJECT" - elem1 = original_pb._pb.key.path.add() - elem1.kind = "Family" - elem1.id = 1234 - elem2 = original_pb._pb.key.path.add() - elem2.kind = "King" - elem2.name = "Spades" - - # Add an integer property. - val_pb1 = _new_value_pb(original_pb, "foo") - val_pb1.integer_value = 1337 - val_pb1.exclude_from_indexes = True - # Add a string property. - val_pb2 = _new_value_pb(original_pb, "bar") - val_pb2.string_value = u"hello" - - # Add a nested (entity) property. - val_pb3 = _new_value_pb(original_pb, "entity-baz") - sub_pb = entity_pb2.Entity() - sub_val_pb1 = _new_value_pb(sub_pb, "x") - sub_val_pb1.double_value = 3.14 - sub_val_pb2 = _new_value_pb(sub_pb, "y") - sub_val_pb2.double_value = 2.718281828 - val_pb3.meaning = 9 - val_pb3.entity_value.CopyFrom(sub_pb._pb) - - # Add a list property. - val_pb4 = _new_value_pb(original_pb, "list-quux") - array_val1 = val_pb4.array_value.values.add() - array_val1.exclude_from_indexes = False - array_val1.meaning = meaning = 22 - array_val1.blob_value = b"\xe2\x98\x83" - array_val2 = val_pb4.array_value.values.add() - array_val2.exclude_from_indexes = False - array_val2.meaning = meaning - array_val2.blob_value = b"\xe2\x98\x85" - - # Convert to the user-space Entity. - entity = entity_from_protobuf(original_pb) - # Convert the user-space Entity back to a protobuf. - new_pb = self._call_fut(entity) - - # NOTE: entity_to_protobuf() strips the project so we "cheat". - new_pb.key.partition_id.project_id = project - self._compare_entity_proto(original_pb, new_pb) - - def test_meaning_with_change(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.entity import Entity - from google.cloud.datastore.helpers import _new_value_pb - - entity = Entity() - name = "foo" - entity[name] = value = 42 - entity._meanings[name] = (9, 1337) - entity_pb = self._call_fut(entity) - - expected_pb = entity_pb2.Entity() - value_pb = _new_value_pb(expected_pb, name) - value_pb.integer_value = value - # NOTE: No meaning is used since the value differs from the - # value stored. - self._compare_entity_proto(entity_pb, expected_pb) - - def test_variable_meanings(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.entity import Entity - from google.cloud.datastore.helpers import _new_value_pb - - entity = Entity() - name = "quux" - entity[name] = values = [1, 20, 300] - meaning = 9 - entity._meanings[name] = ([None, meaning, None], values) - entity_pb = self._call_fut(entity) - - # Construct the expected protobuf. - expected_pb = entity_pb2.Entity() - value_pb = _new_value_pb(expected_pb, name) - value0 = value_pb.array_value.values.add() - value0.integer_value = values[0] - # The only array entry with a meaning is the middle one. - value1 = value_pb.array_value.values.add() - value1.integer_value = values[1] - value1.meaning = meaning - value2 = value_pb.array_value.values.add() - value2.integer_value = values[2] - - self._compare_entity_proto(entity_pb, expected_pb) - - def test_dict_to_entity(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.entity import Entity - - entity = Entity() - entity["a"] = {"b": u"c"} - entity_pb = self._call_fut(entity) - - expected_pb = entity_pb2.Entity( - properties={ - "a": entity_pb2.Value( - entity_value=entity_pb2.Entity( - properties={"b": entity_pb2.Value(string_value="c")} - ) + assert entity.key is None + assert dict(entity) == {} + + +def test_entity_from_protobuf_w_entity_with_meaning(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _new_value_pb + from google.cloud.datastore.helpers import entity_from_protobuf + + entity_pb = entity_pb2.Entity() + name = "hello" + value_pb = _new_value_pb(entity_pb, name) + value_pb.meaning = meaning = 9 + value_pb.string_value = val = u"something" + + entity = entity_from_protobuf(entity_pb) + assert entity.key is None + assert dict(entity) == {name: val} + assert entity._meanings == {name: (meaning, val)} + + +def test_entity_from_protobuf_w_nested_entity_no_key(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _new_value_pb + from google.cloud.datastore.helpers import entity_from_protobuf + + PROJECT = "FOO" + KIND = "KIND" + INSIDE_NAME = "IFOO" + OUTSIDE_NAME = "OBAR" + INSIDE_VALUE = 1337 + + entity_inside = entity_pb2.Entity() + inside_val_pb = _new_value_pb(entity_inside, INSIDE_NAME) + inside_val_pb.integer_value = INSIDE_VALUE + + entity_pb = entity_pb2.Entity() + entity_pb.key.partition_id.project_id = PROJECT + element = entity_pb._pb.key.path.add() + element.kind = KIND + + outside_val_pb = _new_value_pb(entity_pb, OUTSIDE_NAME) + outside_val_pb.entity_value.CopyFrom(entity_inside._pb) + + entity = entity_from_protobuf(entity_pb._pb) + assert entity.key.project == PROJECT + assert entity.key.flat_path == (KIND,) + assert len(entity) == 1 + + inside_entity = entity[OUTSIDE_NAME] + assert inside_entity.key is None + assert len(inside_entity) == 1 + assert inside_entity[INSIDE_NAME] == INSIDE_VALUE + + +def test_entity_from_protobuf_w_index_mismatch_w_empty_list(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import entity_from_protobuf + + _PROJECT = "PROJECT" + _KIND = "KIND" + _ID = 1234 + + array_val_pb = entity_pb2.Value(array_value=entity_pb2.ArrayValue(values=[])) + + entity_pb = entity_pb2.Entity(properties={"baz": array_val_pb}) + entity_pb.key.partition_id.project_id = _PROJECT + entity_pb.key._pb.path.add(kind=_KIND, id=_ID) + + entity = entity_from_protobuf(entity_pb._pb) + entity_dict = dict(entity) + assert entity_dict["baz"] == [] + + +def _compare_entity_proto(entity_pb1, entity_pb2): + assert entity_pb1.key == entity_pb2.key + value_list1 = sorted(entity_pb1.properties.items()) + value_list2 = sorted(entity_pb2.properties.items()) + assert len(value_list1) == len(value_list2) + for pair1, pair2 in zip(value_list1, value_list2): + name1, val1 = pair1 + name2, val2 = pair2 + assert name1 == name2 + if val1._pb.HasField("entity_value"): # Message field (Entity) + assert val1.meaning == val2.meaning + _compare_entity_proto(val1.entity_value, val2.entity_value) + else: + assert val1 == val2 + + +def test_enity_to_protobf_w_empty(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.helpers import entity_to_protobuf + + entity = Entity() + entity_pb = entity_to_protobuf(entity) + _compare_entity_proto(entity_pb, entity_pb2.Entity()) + + +def test_enity_to_protobf_w_key_only(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.helpers import entity_to_protobuf + from google.cloud.datastore.key import Key + + kind, name = "PATH", "NAME" + project = "PROJECT" + key = Key(kind, name, project=project) + entity = Entity(key=key) + entity_pb = entity_to_protobuf(entity) + + expected_pb = entity_pb2.Entity() + expected_pb.key.partition_id.project_id = project + path_elt = expected_pb._pb.key.path.add() + path_elt.kind = kind + path_elt.name = name + + _compare_entity_proto(entity_pb, expected_pb) + + +def test_enity_to_protobf_w_simple_fields(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.helpers import _new_value_pb + from google.cloud.datastore.helpers import entity_to_protobuf + + entity = Entity() + name1 = "foo" + entity[name1] = value1 = 42 + name2 = "bar" + entity[name2] = value2 = u"some-string" + entity_pb = entity_to_protobuf(entity) + + expected_pb = entity_pb2.Entity() + val_pb1 = _new_value_pb(expected_pb, name1) + val_pb1.integer_value = value1 + val_pb2 = _new_value_pb(expected_pb, name2) + val_pb2.string_value = value2 + + _compare_entity_proto(entity_pb, expected_pb) + + +def test_enity_to_protobf_w_with_empty_list(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.helpers import entity_to_protobuf + + entity = Entity() + entity["foo"] = [] + entity_pb = entity_to_protobuf(entity) + + expected_pb = entity_pb2.Entity() + prop = expected_pb._pb.properties.get_or_create("foo") + prop.array_value.CopyFrom(entity_pb2.ArrayValue(values=[])._pb) + + _compare_entity_proto(entity_pb, expected_pb) + + +def test_enity_to_protobf_w_inverts_to_protobuf(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _new_value_pb + from google.cloud.datastore.helpers import entity_from_protobuf + from google.cloud.datastore.helpers import entity_to_protobuf + + original_pb = entity_pb2.Entity() + # Add a key. + original_pb.key.partition_id.project_id = project = "PROJECT" + elem1 = original_pb._pb.key.path.add() + elem1.kind = "Family" + elem1.id = 1234 + elem2 = original_pb._pb.key.path.add() + elem2.kind = "King" + elem2.name = "Spades" + + # Add an integer property. + val_pb1 = _new_value_pb(original_pb, "foo") + val_pb1.integer_value = 1337 + val_pb1.exclude_from_indexes = True + # Add a string property. + val_pb2 = _new_value_pb(original_pb, "bar") + val_pb2.string_value = u"hello" + + # Add a nested (entity) property. + val_pb3 = _new_value_pb(original_pb, "entity-baz") + sub_pb = entity_pb2.Entity() + sub_val_pb1 = _new_value_pb(sub_pb, "x") + sub_val_pb1.double_value = 3.14 + sub_val_pb2 = _new_value_pb(sub_pb, "y") + sub_val_pb2.double_value = 2.718281828 + val_pb3.meaning = 9 + val_pb3.entity_value.CopyFrom(sub_pb._pb) + + # Add a list property. + val_pb4 = _new_value_pb(original_pb, "list-quux") + array_val1 = val_pb4.array_value.values.add() + array_val1.exclude_from_indexes = False + array_val1.meaning = meaning = 22 + array_val1.blob_value = b"\xe2\x98\x83" + array_val2 = val_pb4.array_value.values.add() + array_val2.exclude_from_indexes = False + array_val2.meaning = meaning + array_val2.blob_value = b"\xe2\x98\x85" + + # Convert to the user-space Entity. + entity = entity_from_protobuf(original_pb) + # Convert the user-space Entity back to a protobuf. + new_pb = entity_to_protobuf(entity) + + # NOTE: entity_to_protobuf() strips the project so we "cheat". + new_pb.key.partition_id.project_id = project + _compare_entity_proto(original_pb, new_pb) + + +def test_enity_to_protobf_w_meaning_with_change(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.helpers import _new_value_pb + from google.cloud.datastore.helpers import entity_to_protobuf + + entity = Entity() + name = "foo" + entity[name] = value = 42 + entity._meanings[name] = (9, 1337) + entity_pb = entity_to_protobuf(entity) + + expected_pb = entity_pb2.Entity() + value_pb = _new_value_pb(expected_pb, name) + value_pb.integer_value = value + # NOTE: No meaning is used since the value differs from the + # value stored. + _compare_entity_proto(entity_pb, expected_pb) + + +def test_enity_to_protobf_w_variable_meanings(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.helpers import _new_value_pb + from google.cloud.datastore.helpers import entity_to_protobuf + + entity = Entity() + name = "quux" + entity[name] = values = [1, 20, 300] + meaning = 9 + entity._meanings[name] = ([None, meaning, None], values) + entity_pb = entity_to_protobuf(entity) + + # Construct the expected protobuf. + expected_pb = entity_pb2.Entity() + value_pb = _new_value_pb(expected_pb, name) + value0 = value_pb.array_value.values.add() + value0.integer_value = values[0] + # The only array entry with a meaning is the middle one. + value1 = value_pb.array_value.values.add() + value1.integer_value = values[1] + value1.meaning = meaning + value2 = value_pb.array_value.values.add() + value2.integer_value = values[2] + + _compare_entity_proto(entity_pb, expected_pb) + + +def test_enity_to_protobf_w_dict_to_entity(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.helpers import entity_to_protobuf + + entity = Entity() + entity["a"] = {"b": u"c"} + entity_pb = entity_to_protobuf(entity) + + expected_pb = entity_pb2.Entity( + properties={ + "a": entity_pb2.Value( + entity_value=entity_pb2.Entity( + properties={"b": entity_pb2.Value(string_value="c")} + ) + ) + } + ) + assert entity_pb == expected_pb + + +def test_enity_to_protobf_w_dict_to_entity_recursive(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.helpers import entity_to_protobuf + + entity = Entity() + entity["a"] = {"b": {"c": {"d": 1.25}, "e": True}, "f": 10} + entity_pb = entity_to_protobuf(entity) + + b_entity_pb = entity_pb2.Entity( + properties={ + "c": entity_pb2.Value( + entity_value=entity_pb2.Entity( + properties={"d": entity_pb2.Value(double_value=1.25)} ) - } - ) - self.assertEqual(entity_pb, expected_pb) - - def test_dict_to_entity_recursive(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.entity import Entity - - entity = Entity() - entity["a"] = {"b": {"c": {"d": 1.25}, "e": True}, "f": 10} - entity_pb = self._call_fut(entity) - - b_entity_pb = entity_pb2.Entity( - properties={ - "c": entity_pb2.Value( - entity_value=entity_pb2.Entity( - properties={"d": entity_pb2.Value(double_value=1.25)} - ) - ), - "e": entity_pb2.Value(boolean_value=True), - } - ) - expected_pb = entity_pb2.Entity( - properties={ - "a": entity_pb2.Value( - entity_value=entity_pb2.Entity( - properties={ - "b": entity_pb2.Value(entity_value=b_entity_pb), - "f": entity_pb2.Value(integer_value=10), - } - ) + ), + "e": entity_pb2.Value(boolean_value=True), + } + ) + expected_pb = entity_pb2.Entity( + properties={ + "a": entity_pb2.Value( + entity_value=entity_pb2.Entity( + properties={ + "b": entity_pb2.Value(entity_value=b_entity_pb), + "f": entity_pb2.Value(integer_value=10), + } ) - } - ) - self.assertEqual(entity_pb, expected_pb) - - -class Test_key_from_protobuf(unittest.TestCase): - def _call_fut(self, val): - from google.cloud.datastore.helpers import key_from_protobuf - - return key_from_protobuf(val) - - def _makePB(self, project=None, namespace=None, path=()): - from google.cloud.datastore_v1.types import entity as entity_pb2 - - pb = entity_pb2.Key() - if project is not None: - pb.partition_id.project_id = project - if namespace is not None: - pb.partition_id.namespace_id = namespace - for elem in path: - added = pb._pb.path.add() - added.kind = elem["kind"] - if "id" in elem: - added.id = elem["id"] - if "name" in elem: - added.name = elem["name"] - return pb - - def test_wo_namespace_in_pb(self): - _PROJECT = "PROJECT" - pb = self._makePB(path=[{"kind": "KIND"}], project=_PROJECT) - key = self._call_fut(pb) - self.assertEqual(key.project, _PROJECT) - self.assertIsNone(key.namespace) - - def test_w_namespace_in_pb(self): - _PROJECT = "PROJECT" - _NAMESPACE = "NAMESPACE" - pb = self._makePB( - path=[{"kind": "KIND"}], namespace=_NAMESPACE, project=_PROJECT - ) - key = self._call_fut(pb) - self.assertEqual(key.project, _PROJECT) - self.assertEqual(key.namespace, _NAMESPACE) - - def test_w_nested_path_in_pb(self): - _PATH = [ - {"kind": "PARENT", "name": "NAME"}, - {"kind": "CHILD", "id": 1234}, - {"kind": "GRANDCHILD", "id": 5678}, - ] - pb = self._makePB(path=_PATH, project="PROJECT") - key = self._call_fut(pb) - self.assertEqual(key.path, _PATH) - - def test_w_nothing_in_pb(self): - pb = self._makePB() - self.assertRaises(ValueError, self._call_fut, pb) - - -class Test__get_read_options(unittest.TestCase): - def _call_fut(self, eventual, transaction_id): - from google.cloud.datastore.helpers import get_read_options - - return get_read_options(eventual, transaction_id) - - def test_eventual_w_transaction(self): - with self.assertRaises(ValueError): - self._call_fut(True, b"123") - - def test_eventual_wo_transaction(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - - read_options = self._call_fut(True, None) - expected = datastore_pb2.ReadOptions( - read_consistency=datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL - ) - self.assertEqual(read_options, expected) - - def test_default_w_transaction(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - - txn_id = b"123abc-easy-as" - read_options = self._call_fut(False, txn_id) - expected = datastore_pb2.ReadOptions(transaction=txn_id) - self.assertEqual(read_options, expected) - - def test_default_wo_transaction(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - - read_options = self._call_fut(False, None) - expected = datastore_pb2.ReadOptions() - self.assertEqual(read_options, expected) - - -class Test__pb_attr_value(unittest.TestCase): - def _call_fut(self, val): - from google.cloud.datastore.helpers import _pb_attr_value - - return _pb_attr_value(val) - - def test_datetime_naive(self): - import calendar - import datetime - from google.cloud._helpers import UTC - - micros = 4375 - naive = datetime.datetime(2014, 9, 16, 10, 19, 32, micros) # No zone. - utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) - name, value = self._call_fut(naive) - self.assertEqual(name, "timestamp_value") - self.assertEqual(value.seconds, calendar.timegm(utc.timetuple())) - self.assertEqual(value.nanos, 1000 * micros) - - def test_datetime_w_zone(self): - import calendar - import datetime - from google.cloud._helpers import UTC - - micros = 4375 - utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) - name, value = self._call_fut(utc) - self.assertEqual(name, "timestamp_value") - self.assertEqual(value.seconds, calendar.timegm(utc.timetuple())) - self.assertEqual(value.nanos, 1000 * micros) - - def test_key(self): - from google.cloud.datastore.key import Key - - key = Key("PATH", 1234, project="PROJECT") - name, value = self._call_fut(key) - self.assertEqual(name, "key_value") - self.assertEqual(value, key.to_protobuf()) - - def test_bool(self): - name, value = self._call_fut(False) - self.assertEqual(name, "boolean_value") - self.assertEqual(value, False) - - def test_float(self): - name, value = self._call_fut(3.1415926) - self.assertEqual(name, "double_value") - self.assertEqual(value, 3.1415926) - - def test_int(self): - name, value = self._call_fut(42) - self.assertEqual(name, "integer_value") - self.assertEqual(value, 42) - - def test_long(self): - must_be_long = (1 << 63) - 1 - name, value = self._call_fut(must_be_long) - self.assertEqual(name, "integer_value") - self.assertEqual(value, must_be_long) - - def test_native_str(self): - name, value = self._call_fut("str") - - self.assertEqual(name, "string_value") - self.assertEqual(value, "str") - - def test_bytes(self): - name, value = self._call_fut(b"bytes") - self.assertEqual(name, "blob_value") - self.assertEqual(value, b"bytes") - - def test_unicode(self): - name, value = self._call_fut(u"str") - self.assertEqual(name, "string_value") - self.assertEqual(value, u"str") - - def test_entity(self): - from google.cloud.datastore.entity import Entity - - entity = Entity() - name, value = self._call_fut(entity) - self.assertEqual(name, "entity_value") - self.assertIs(value, entity) - - def test_dict(self): - from google.cloud.datastore.entity import Entity - - orig_value = {"richard": b"feynman"} - name, value = self._call_fut(orig_value) - self.assertEqual(name, "entity_value") - self.assertIsInstance(value, Entity) - self.assertIsNone(value.key) - self.assertEqual(value._meanings, {}) - self.assertEqual(value.exclude_from_indexes, set()) - self.assertEqual(dict(value), orig_value) - - def test_array(self): - values = ["a", 0, 3.14] - name, value = self._call_fut(values) - self.assertEqual(name, "array_value") - self.assertIs(value, values) - - def test_geo_point(self): - from google.type import latlng_pb2 - from google.cloud.datastore.helpers import GeoPoint - - lat = 42.42 - lng = 99.0007 - geo_pt = GeoPoint(latitude=lat, longitude=lng) - geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) - name, value = self._call_fut(geo_pt) - self.assertEqual(name, "geo_point_value") - self.assertEqual(value, geo_pt_pb) - - def test_null(self): - from google.protobuf import struct_pb2 - - name, value = self._call_fut(None) - self.assertEqual(name, "null_value") - self.assertEqual(value, struct_pb2.NULL_VALUE) - - def test_object(self): - self.assertRaises(ValueError, self._call_fut, object()) - - -class Test__get_value_from_value_pb(unittest.TestCase): - def _call_fut(self, pb): - from google.cloud.datastore.helpers import _get_value_from_value_pb - - return _get_value_from_value_pb(pb) - - def _makePB(self, attr_name, attr_value): - from google.cloud.datastore_v1.types import entity as entity_pb2 - - value = entity_pb2.Value() - setattr(value._pb, attr_name, attr_value) - return value - - def test_datetime(self): - import calendar - import datetime - from google.cloud._helpers import UTC - from google.cloud.datastore_v1.types import entity as entity_pb2 - - micros = 4375 - utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) - value = entity_pb2.Value() - value._pb.timestamp_value.seconds = calendar.timegm(utc.timetuple()) - value._pb.timestamp_value.nanos = 1000 * micros - self.assertEqual(self._call_fut(value._pb), utc) - - def test_key(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.key import Key - - value = entity_pb2.Value() - expected = Key("KIND", 1234, project="PROJECT").to_protobuf() - value.key_value._pb.CopyFrom(expected._pb) - found = self._call_fut(value._pb) - self.assertEqual(found.to_protobuf(), expected) - - def test_bool(self): - value = self._makePB("boolean_value", False) - self.assertEqual(self._call_fut(value._pb), False) - - def test_float(self): - value = self._makePB("double_value", 3.1415926) - self.assertEqual(self._call_fut(value._pb), 3.1415926) - - def test_int(self): - value = self._makePB("integer_value", 42) - self.assertEqual(self._call_fut(value._pb), 42) - - def test_bytes(self): - value = self._makePB("blob_value", b"str") - self.assertEqual(self._call_fut(value._pb), b"str") - - def test_unicode(self): - value = self._makePB("string_value", u"str") - self.assertEqual(self._call_fut(value._pb), u"str") - - def test_entity(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.entity import Entity - from google.cloud.datastore.helpers import _new_value_pb - - value = entity_pb2.Value() - entity_pb = value.entity_value - entity_pb._pb.key.path.add(kind="KIND") - entity_pb.key.partition_id.project_id = "PROJECT" - - value_pb = _new_value_pb(entity_pb, "foo") - value_pb.string_value = "Foo" - entity = self._call_fut(value._pb) - self.assertIsInstance(entity, Entity) - self.assertEqual(entity["foo"], "Foo") - - def test_array(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - - value = entity_pb2.Value() - array_pb = value.array_value.values - item_pb = array_pb._pb.add() - item_pb.string_value = "Foo" - item_pb = array_pb._pb.add() - item_pb.string_value = "Bar" - items = self._call_fut(value._pb) - self.assertEqual(items, ["Foo", "Bar"]) - - def test_geo_point(self): - from google.type import latlng_pb2 - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore.helpers import GeoPoint - - lat = -3.14 - lng = 13.37 - geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) - value = entity_pb2.Value(geo_point_value=geo_pt_pb) - result = self._call_fut(value._pb) - self.assertIsInstance(result, GeoPoint) - self.assertEqual(result.latitude, lat) - self.assertEqual(result.longitude, lng) - - def test_null(self): - from google.protobuf import struct_pb2 - from google.cloud.datastore_v1.types import entity as entity_pb2 - - value = entity_pb2.Value(null_value=struct_pb2.NULL_VALUE) - result = self._call_fut(value._pb) - self.assertIsNone(result) - - def test_unknown(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - - value = entity_pb2.Value() - with self.assertRaises(ValueError): - self._call_fut(value._pb) - - -class Test_set_protobuf_value(unittest.TestCase): - def _call_fut(self, value_pb, val): - from google.cloud.datastore.helpers import _set_protobuf_value - - return _set_protobuf_value(value_pb, val) - - def _makePB(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - - return entity_pb2.Value()._pb - - def test_datetime(self): - import calendar - import datetime - from google.cloud._helpers import UTC - - pb = self._makePB() - micros = 4375 - utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) - self._call_fut(pb, utc) - value = pb.timestamp_value - self.assertEqual(value.seconds, calendar.timegm(utc.timetuple())) - self.assertEqual(value.nanos, 1000 * micros) - - def test_key(self): - from google.cloud.datastore.key import Key - - pb = self._makePB() - key = Key("KIND", 1234, project="PROJECT") - self._call_fut(pb, key) - value = pb.key_value - self.assertEqual(value, key.to_protobuf()._pb) - - def test_none(self): - pb = self._makePB() - self._call_fut(pb, None) - self.assertEqual(pb.WhichOneof("value_type"), "null_value") - - def test_bool(self): - pb = self._makePB() - self._call_fut(pb, False) - value = pb.boolean_value - self.assertEqual(value, False) - - def test_float(self): - pb = self._makePB() - self._call_fut(pb, 3.1415926) - value = pb.double_value - self.assertEqual(value, 3.1415926) - - def test_int(self): - pb = self._makePB() - self._call_fut(pb, 42) - value = pb.integer_value - self.assertEqual(value, 42) - - def test_long(self): - pb = self._makePB() - must_be_long = (1 << 63) - 1 - self._call_fut(pb, must_be_long) - value = pb.integer_value - self.assertEqual(value, must_be_long) - - def test_native_str(self): - pb = self._makePB() - self._call_fut(pb, "str") - - value = pb.string_value - self.assertEqual(value, "str") - - def test_bytes(self): - pb = self._makePB() - self._call_fut(pb, b"str") - value = pb.blob_value - self.assertEqual(value, b"str") - - def test_unicode(self): - pb = self._makePB() - self._call_fut(pb, u"str") - value = pb.string_value - self.assertEqual(value, u"str") - - def test_entity_empty_wo_key(self): - from google.cloud.datastore.entity import Entity - - pb = self._makePB() - entity = Entity() - self._call_fut(pb, entity) - value = pb.entity_value - self.assertEqual(value.key.SerializeToString(), b"") - self.assertEqual(len(list(value.properties.items())), 0) - - def test_entity_w_key(self): - from google.cloud.datastore.entity import Entity - from google.cloud.datastore.key import Key - - name = "foo" - value = u"Foo" - pb = self._makePB() - key = Key("KIND", 123, project="PROJECT") - entity = Entity(key=key) - entity[name] = value - self._call_fut(pb, entity) - entity_pb = pb.entity_value - self.assertEqual(entity_pb.key, key.to_protobuf()._pb) - - prop_dict = dict(entity_pb.properties.items()) - self.assertEqual(len(prop_dict), 1) - self.assertEqual(list(prop_dict.keys()), [name]) - self.assertEqual(prop_dict[name].string_value, value) - - def test_array(self): - pb = self._makePB() - values = [u"a", 0, 3.14] - self._call_fut(pb, values) - marshalled = pb.array_value.values - self.assertEqual(len(marshalled), len(values)) - self.assertEqual(marshalled[0].string_value, values[0]) - self.assertEqual(marshalled[1].integer_value, values[1]) - self.assertEqual(marshalled[2].double_value, values[2]) - - def test_geo_point(self): - from google.type import latlng_pb2 - from google.cloud.datastore.helpers import GeoPoint - - pb = self._makePB() - lat = 9.11 - lng = 3.337 - geo_pt = GeoPoint(latitude=lat, longitude=lng) - geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) - self._call_fut(pb, geo_pt) - self.assertEqual(pb.geo_point_value, geo_pt_pb) - - -class Test__get_meaning(unittest.TestCase): - def _call_fut(self, *args, **kwargs): - from google.cloud.datastore.helpers import _get_meaning - - return _get_meaning(*args, **kwargs) - - def test_no_meaning(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - - value_pb = entity_pb2.Value() - result = self._call_fut(value_pb) - self.assertIsNone(result) - - def test_single(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - - value_pb = entity_pb2.Value() - value_pb.meaning = meaning = 22 - value_pb.string_value = u"hi" - result = self._call_fut(value_pb) - self.assertEqual(meaning, result) - - def test_empty_array_value(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - - value_pb = entity_pb2.Value() - value_pb._pb.array_value.values.add() - value_pb._pb.array_value.values.pop() - - result = self._call_fut(value_pb, is_list=True) - self.assertEqual(None, result) - - def test_array_value(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - - value_pb = entity_pb2.Value() - meaning = 9 - sub_value_pb1 = value_pb._pb.array_value.values.add() - sub_value_pb2 = value_pb._pb.array_value.values.add() - - sub_value_pb1.meaning = sub_value_pb2.meaning = meaning - sub_value_pb1.string_value = u"hi" - sub_value_pb2.string_value = u"bye" - - result = self._call_fut(value_pb, is_list=True) - self.assertEqual(meaning, result) - - def test_array_value_multiple_meanings(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - - value_pb = entity_pb2.Value() - meaning1 = 9 - meaning2 = 10 - sub_value_pb1 = value_pb._pb.array_value.values.add() - sub_value_pb2 = value_pb._pb.array_value.values.add() - - sub_value_pb1.meaning = meaning1 - sub_value_pb2.meaning = meaning2 - sub_value_pb1.string_value = u"hi" - sub_value_pb2.string_value = u"bye" - - result = self._call_fut(value_pb, is_list=True) - self.assertEqual(result, [meaning1, meaning2]) - - def test_array_value_meaning_partially_unset(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - - value_pb = entity_pb2.Value() - meaning1 = 9 - sub_value_pb1 = value_pb._pb.array_value.values.add() - sub_value_pb2 = value_pb._pb.array_value.values.add() - - sub_value_pb1.meaning = meaning1 - sub_value_pb1.string_value = u"hi" - sub_value_pb2.string_value = u"bye" - - result = self._call_fut(value_pb, is_list=True) - self.assertEqual(result, [meaning1, None]) - - -class TestGeoPoint(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.datastore.helpers import GeoPoint - - return GeoPoint - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_constructor(self): - lat = 81.2 - lng = 359.9999 - geo_pt = self._make_one(lat, lng) - self.assertEqual(geo_pt.latitude, lat) - self.assertEqual(geo_pt.longitude, lng) - - def test_to_protobuf(self): - from google.type import latlng_pb2 - - lat = 0.0001 - lng = 20.03 - geo_pt = self._make_one(lat, lng) - result = geo_pt.to_protobuf() - geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) - self.assertEqual(result, geo_pt_pb) - - def test___eq__(self): - lat = 0.0001 - lng = 20.03 - geo_pt1 = self._make_one(lat, lng) - geo_pt2 = self._make_one(lat, lng) - self.assertEqual(geo_pt1, geo_pt2) - - def test___eq__type_differ(self): - lat = 0.0001 - lng = 20.03 - geo_pt1 = self._make_one(lat, lng) - geo_pt2 = object() - self.assertNotEqual(geo_pt1, geo_pt2) - - def test___ne__same_value(self): - lat = 0.0001 - lng = 20.03 - geo_pt1 = self._make_one(lat, lng) - geo_pt2 = self._make_one(lat, lng) - comparison_val = geo_pt1 != geo_pt2 - self.assertFalse(comparison_val) - - def test___ne__(self): - geo_pt1 = self._make_one(0.0, 1.0) - geo_pt2 = self._make_one(2.0, 3.0) - self.assertNotEqual(geo_pt1, geo_pt2) + ) + } + ) + assert entity_pb == expected_pb + + +def _make_key_pb(project=None, namespace=None, path=()): + from google.cloud.datastore_v1.types import entity as entity_pb2 + + pb = entity_pb2.Key() + if project is not None: + pb.partition_id.project_id = project + if namespace is not None: + pb.partition_id.namespace_id = namespace + for elem in path: + added = pb._pb.path.add() + added.kind = elem["kind"] + if "id" in elem: + added.id = elem["id"] + if "name" in elem: + added.name = elem["name"] + return pb + + +def test_key_from_protobuf_wo_namespace_in_pb(): + from google.cloud.datastore.helpers import key_from_protobuf + + _PROJECT = "PROJECT" + pb = _make_key_pb(path=[{"kind": "KIND"}], project=_PROJECT) + key = key_from_protobuf(pb) + assert key.project == _PROJECT + assert key.namespace is None + + +def test_key_from_protobuf_w_namespace_in_pb(): + from google.cloud.datastore.helpers import key_from_protobuf + + _PROJECT = "PROJECT" + _NAMESPACE = "NAMESPACE" + pb = _make_key_pb(path=[{"kind": "KIND"}], namespace=_NAMESPACE, project=_PROJECT) + key = key_from_protobuf(pb) + assert key.project == _PROJECT + assert key.namespace == _NAMESPACE + + +def test_key_from_protobuf_w_nested_path_in_pb(): + from google.cloud.datastore.helpers import key_from_protobuf + + _PATH = [ + {"kind": "PARENT", "name": "NAME"}, + {"kind": "CHILD", "id": 1234}, + {"kind": "GRANDCHILD", "id": 5678}, + ] + pb = _make_key_pb(path=_PATH, project="PROJECT") + key = key_from_protobuf(pb) + assert key.path == _PATH + + +def test_w_nothing_in_pb(): + from google.cloud.datastore.helpers import key_from_protobuf + + pb = _make_key_pb() + with pytest.raises(ValueError): + key_from_protobuf(pb) + + +def test__get_read_options_w_eventual_w_txn(): + from google.cloud.datastore.helpers import get_read_options + + with pytest.raises(ValueError): + get_read_options(True, b"123") + + +def test__get_read_options_w_eventual_wo_txn(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore.helpers import get_read_options + + read_options = get_read_options(True, None) + expected = datastore_pb2.ReadOptions( + read_consistency=datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL + ) + assert read_options == expected + + +def test__get_read_options_w_default_w_txn(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore.helpers import get_read_options + + txn_id = b"123abc-easy-as" + read_options = get_read_options(False, txn_id) + expected = datastore_pb2.ReadOptions(transaction=txn_id) + assert read_options == expected + + +def test__get_read_options_w_default_wo_txn(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore.helpers import get_read_options + + read_options = get_read_options(False, None) + expected = datastore_pb2.ReadOptions() + assert read_options == expected + + +def test__pb_attr_value_w_datetime_naive(): + import calendar + import datetime + from google.cloud._helpers import UTC + from google.cloud.datastore.helpers import _pb_attr_value + + micros = 4375 + naive = datetime.datetime(2014, 9, 16, 10, 19, 32, micros) # No zone. + utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) + name, value = _pb_attr_value(naive) + assert name == "timestamp_value" + assert value.seconds == calendar.timegm(utc.timetuple()) + assert value.nanos == 1000 * micros + + +def test__pb_attr_value_w_datetime_w_zone(): + import calendar + import datetime + from google.cloud._helpers import UTC + from google.cloud.datastore.helpers import _pb_attr_value + + micros = 4375 + utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) + name, value = _pb_attr_value(utc) + assert name == "timestamp_value" + assert value.seconds == calendar.timegm(utc.timetuple()) + assert value.nanos == 1000 * micros + + +def test__pb_attr_value_w_key(): + from google.cloud.datastore.key import Key + from google.cloud.datastore.helpers import _pb_attr_value + + key = Key("PATH", 1234, project="PROJECT") + name, value = _pb_attr_value(key) + assert name == "key_value" + assert value == key.to_protobuf() + + +def test__pb_attr_value_w_bool(): + from google.cloud.datastore.helpers import _pb_attr_value + + name, value = _pb_attr_value(False) + assert name == "boolean_value" + assert not value + + +def test__pb_attr_value_w_float(): + from google.cloud.datastore.helpers import _pb_attr_value + + name, value = _pb_attr_value(3.1415926) + assert name == "double_value" + assert value == 3.1415926 + + +def test__pb_attr_value_w_int(): + from google.cloud.datastore.helpers import _pb_attr_value + + name, value = _pb_attr_value(42) + assert name == "integer_value" + assert value == 42 + + +def test__pb_attr_value_w_long(): + from google.cloud.datastore.helpers import _pb_attr_value + + must_be_long = (1 << 63) - 1 + name, value = _pb_attr_value(must_be_long) + assert name == "integer_value" + assert value == must_be_long + + +def test__pb_attr_value_w_native_str(): + from google.cloud.datastore.helpers import _pb_attr_value + + name, value = _pb_attr_value("str") + + assert name == "string_value" + assert value == "str" + + +def test__pb_attr_value_w_bytes(): + from google.cloud.datastore.helpers import _pb_attr_value + + name, value = _pb_attr_value(b"bytes") + assert name == "blob_value" + assert value == b"bytes" + + +def test__pb_attr_value_w_unicode(): + from google.cloud.datastore.helpers import _pb_attr_value + + name, value = _pb_attr_value(u"str") + assert name == "string_value" + assert value == u"str" + + +def test__pb_attr_value_w_entity(): + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.helpers import _pb_attr_value + + entity = Entity() + name, value = _pb_attr_value(entity) + assert name == "entity_value" + assert value is entity + + +def test__pb_attr_value_w_dict(): + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.helpers import _pb_attr_value + + orig_value = {"richard": b"feynman"} + name, value = _pb_attr_value(orig_value) + assert name == "entity_value" + assert isinstance(value, Entity) + assert value.key is None + assert value._meanings == {} + assert value.exclude_from_indexes == set() + assert dict(value) == orig_value + + +def test__pb_attr_value_w_array(): + from google.cloud.datastore.helpers import _pb_attr_value + + values = ["a", 0, 3.14] + name, value = _pb_attr_value(values) + assert name == "array_value" + assert value is values + + +def test__pb_attr_value_w_geo_point(): + from google.type import latlng_pb2 + from google.cloud.datastore.helpers import GeoPoint + from google.cloud.datastore.helpers import _pb_attr_value + + lat = 42.42 + lng = 99.0007 + geo_pt = GeoPoint(latitude=lat, longitude=lng) + geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) + name, value = _pb_attr_value(geo_pt) + assert name == "geo_point_value" + assert value == geo_pt_pb + + +def test__pb_attr_value_w_null(): + from google.protobuf import struct_pb2 + from google.cloud.datastore.helpers import _pb_attr_value + + name, value = _pb_attr_value(None) + assert name == "null_value" + assert value == struct_pb2.NULL_VALUE + + +def test__pb_attr_value_w_object(): + from google.cloud.datastore.helpers import _pb_attr_value + + with pytest.raises(ValueError): + _pb_attr_value(object()) + + +def _make_value_pb(attr_name, attr_value): + from google.cloud.datastore_v1.types import entity as entity_pb2 + + value = entity_pb2.Value() + setattr(value._pb, attr_name, attr_value) + return value + + +def test__get_value_from_value_pb_w_datetime(): + import calendar + import datetime + from google.cloud._helpers import UTC + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _get_value_from_value_pb + + micros = 4375 + utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) + value = entity_pb2.Value() + value._pb.timestamp_value.seconds = calendar.timegm(utc.timetuple()) + value._pb.timestamp_value.nanos = 1000 * micros + assert _get_value_from_value_pb(value._pb) == utc + + +def test__get_value_from_value_pb_w_key(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.key import Key + from google.cloud.datastore.helpers import _get_value_from_value_pb + + value = entity_pb2.Value() + expected = Key("KIND", 1234, project="PROJECT").to_protobuf() + value.key_value._pb.CopyFrom(expected._pb) + found = _get_value_from_value_pb(value._pb) + assert found.to_protobuf() == expected + + +def test__get_value_from_value_pb_w_bool(): + from google.cloud.datastore.helpers import _get_value_from_value_pb + + value = _make_value_pb("boolean_value", False) + assert not _get_value_from_value_pb(value._pb) + + +def test__get_value_from_value_pb_w_float(): + from google.cloud.datastore.helpers import _get_value_from_value_pb + + value = _make_value_pb("double_value", 3.1415926) + assert _get_value_from_value_pb(value._pb) == 3.1415926 + + +def test__get_value_from_value_pb_w_int(): + from google.cloud.datastore.helpers import _get_value_from_value_pb + + value = _make_value_pb("integer_value", 42) + assert _get_value_from_value_pb(value._pb) == 42 + + +def test__get_value_from_value_pb_w_bytes(): + from google.cloud.datastore.helpers import _get_value_from_value_pb + + value = _make_value_pb("blob_value", b"str") + assert _get_value_from_value_pb(value._pb) == b"str" + + +def test__get_value_from_value_pb_w_unicode(): + from google.cloud.datastore.helpers import _get_value_from_value_pb + + value = _make_value_pb("string_value", u"str") + assert _get_value_from_value_pb(value._pb) == u"str" + + +def test__get_value_from_value_pb_w_entity(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.helpers import _new_value_pb + from google.cloud.datastore.helpers import _get_value_from_value_pb + + value = entity_pb2.Value() + entity_pb = value.entity_value + entity_pb._pb.key.path.add(kind="KIND") + entity_pb.key.partition_id.project_id = "PROJECT" + + value_pb = _new_value_pb(entity_pb, "foo") + value_pb.string_value = "Foo" + entity = _get_value_from_value_pb(value._pb) + assert isinstance(entity, Entity) + assert entity["foo"] == "Foo" + + +def test__get_value_from_value_pb_w_array(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _get_value_from_value_pb + + value = entity_pb2.Value() + array_pb = value.array_value.values + item_pb = array_pb._pb.add() + item_pb.string_value = "Foo" + item_pb = array_pb._pb.add() + item_pb.string_value = "Bar" + items = _get_value_from_value_pb(value._pb) + assert items == ["Foo", "Bar"] + + +def test__get_value_from_value_pb_w_geo_point(): + from google.type import latlng_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import GeoPoint + from google.cloud.datastore.helpers import _get_value_from_value_pb + + lat = -3.14 + lng = 13.37 + geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) + value = entity_pb2.Value(geo_point_value=geo_pt_pb) + result = _get_value_from_value_pb(value._pb) + assert isinstance(result, GeoPoint) + assert result.latitude == lat + assert result.longitude == lng + + +def test__get_value_from_value_pb_w_null(): + from google.protobuf import struct_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _get_value_from_value_pb + + value = entity_pb2.Value(null_value=struct_pb2.NULL_VALUE) + result = _get_value_from_value_pb(value._pb) + assert result is None + + +def test__get_value_from_value_pb_w_unknown(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _get_value_from_value_pb + + value = entity_pb2.Value() + with pytest.raises(ValueError): + _get_value_from_value_pb(value._pb) + + +def _make_empty_value_pb(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + + return entity_pb2.Value()._pb + + +def test__set_protobuf_value_w_datetime(): + import calendar + import datetime + from google.cloud._helpers import UTC + from google.cloud.datastore.helpers import _set_protobuf_value + + pb = _make_empty_value_pb() + micros = 4375 + utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) + _set_protobuf_value(pb, utc) + value = pb.timestamp_value + assert value.seconds == calendar.timegm(utc.timetuple()) + assert value.nanos == 1000 * micros + + +def test__set_protobuf_value_w_key(): + from google.cloud.datastore.key import Key + from google.cloud.datastore.helpers import _set_protobuf_value + + pb = _make_empty_value_pb() + key = Key("KIND", 1234, project="PROJECT") + _set_protobuf_value(pb, key) + value = pb.key_value + assert value == key.to_protobuf()._pb + + +def test__set_protobuf_value_w_none(): + from google.cloud.datastore.helpers import _set_protobuf_value + + pb = _make_empty_value_pb() + _set_protobuf_value(pb, None) + assert pb.WhichOneof("value_type") == "null_value" + + +def test__set_protobuf_value_w_bool(): + from google.cloud.datastore.helpers import _set_protobuf_value + + pb = _make_empty_value_pb() + _set_protobuf_value(pb, False) + value = pb.boolean_value + assert not value + + +def test__set_protobuf_value_w_float(): + from google.cloud.datastore.helpers import _set_protobuf_value + + pb = _make_empty_value_pb() + _set_protobuf_value(pb, 3.1415926) + value = pb.double_value + assert value == 3.1415926 + + +def test__set_protobuf_value_w_int(): + from google.cloud.datastore.helpers import _set_protobuf_value + + pb = _make_empty_value_pb() + _set_protobuf_value(pb, 42) + value = pb.integer_value + assert value == 42 + + +def test__set_protobuf_value_w_long(): + from google.cloud.datastore.helpers import _set_protobuf_value + + pb = _make_empty_value_pb() + must_be_long = (1 << 63) - 1 + _set_protobuf_value(pb, must_be_long) + value = pb.integer_value + assert value == must_be_long + + +def test__set_protobuf_value_w_native_str(): + from google.cloud.datastore.helpers import _set_protobuf_value + + pb = _make_empty_value_pb() + _set_protobuf_value(pb, "str") + + value = pb.string_value + assert value == "str" + + +def test__set_protobuf_value_w_bytes(): + from google.cloud.datastore.helpers import _set_protobuf_value + + pb = _make_empty_value_pb() + _set_protobuf_value(pb, b"str") + value = pb.blob_value + assert value == b"str" + + +def test__set_protobuf_value_w_unicode(): + from google.cloud.datastore.helpers import _set_protobuf_value + + pb = _make_empty_value_pb() + _set_protobuf_value(pb, u"str") + value = pb.string_value + assert value == u"str" + + +def test__set_protobuf_value_w_entity_empty_wo_key(): + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.helpers import _set_protobuf_value + + pb = _make_empty_value_pb() + entity = Entity() + _set_protobuf_value(pb, entity) + value = pb.entity_value + assert value.key.SerializeToString() == b"" + assert len(list(value.properties.items())) == 0 + + +def test__set_protobuf_value_w_entity_w_key(): + from google.cloud.datastore.entity import Entity + from google.cloud.datastore.key import Key + from google.cloud.datastore.helpers import _set_protobuf_value + + name = "foo" + value = u"Foo" + pb = _make_empty_value_pb() + key = Key("KIND", 123, project="PROJECT") + entity = Entity(key=key) + entity[name] = value + _set_protobuf_value(pb, entity) + entity_pb = pb.entity_value + assert entity_pb.key == key.to_protobuf()._pb + + prop_dict = dict(entity_pb.properties.items()) + assert len(prop_dict) == 1 + assert list(prop_dict.keys()) == [name] + assert prop_dict[name].string_value == value + + +def test__set_protobuf_value_w_array(): + from google.cloud.datastore.helpers import _set_protobuf_value + + pb = _make_empty_value_pb() + values = [u"a", 0, 3.14] + _set_protobuf_value(pb, values) + marshalled = pb.array_value.values + assert len(marshalled) == len(values) + assert marshalled[0].string_value == values[0] + assert marshalled[1].integer_value == values[1] + assert marshalled[2].double_value == values[2] + + +def test__set_protobuf_value_w_geo_point(): + from google.type import latlng_pb2 + from google.cloud.datastore.helpers import GeoPoint + from google.cloud.datastore.helpers import _set_protobuf_value + + pb = _make_empty_value_pb() + lat = 9.11 + lng = 3.337 + geo_pt = GeoPoint(latitude=lat, longitude=lng) + geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) + _set_protobuf_value(pb, geo_pt) + assert pb.geo_point_value == geo_pt_pb + + +def test__get_meaning_w_no_meaning(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _get_meaning + + value_pb = entity_pb2.Value() + result = _get_meaning(value_pb) + assert result is None + + +def test__get_meaning_w_single(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _get_meaning + + value_pb = entity_pb2.Value() + value_pb.meaning = meaning = 22 + value_pb.string_value = u"hi" + result = _get_meaning(value_pb) + assert meaning == result + + +def test__get_meaning_w_empty_array_value(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _get_meaning + + value_pb = entity_pb2.Value() + value_pb._pb.array_value.values.add() + value_pb._pb.array_value.values.pop() + + result = _get_meaning(value_pb, is_list=True) + assert result is None + + +def test__get_meaning_w_array_value(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _get_meaning + + value_pb = entity_pb2.Value() + meaning = 9 + sub_value_pb1 = value_pb._pb.array_value.values.add() + sub_value_pb2 = value_pb._pb.array_value.values.add() + + sub_value_pb1.meaning = sub_value_pb2.meaning = meaning + sub_value_pb1.string_value = u"hi" + sub_value_pb2.string_value = u"bye" + + result = _get_meaning(value_pb, is_list=True) + assert meaning == result + + +def test__get_meaning_w_array_value_multiple_meanings(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _get_meaning + + value_pb = entity_pb2.Value() + meaning1 = 9 + meaning2 = 10 + sub_value_pb1 = value_pb._pb.array_value.values.add() + sub_value_pb2 = value_pb._pb.array_value.values.add() + + sub_value_pb1.meaning = meaning1 + sub_value_pb2.meaning = meaning2 + sub_value_pb1.string_value = u"hi" + sub_value_pb2.string_value = u"bye" + + result = _get_meaning(value_pb, is_list=True) + assert result == [meaning1, meaning2] + + +def test__get_meaning_w_array_value_meaning_partially_unset(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _get_meaning + + value_pb = entity_pb2.Value() + meaning1 = 9 + sub_value_pb1 = value_pb._pb.array_value.values.add() + sub_value_pb2 = value_pb._pb.array_value.values.add() + + sub_value_pb1.meaning = meaning1 + sub_value_pb1.string_value = u"hi" + sub_value_pb2.string_value = u"bye" + + result = _get_meaning(value_pb, is_list=True) + assert result == [meaning1, None] + + +def _make_geopoint(*args, **kwargs): + from google.cloud.datastore.helpers import GeoPoint + + return GeoPoint(*args, **kwargs) + + +def test_geopoint_ctor(): + lat = 81.2 + lng = 359.9999 + geo_pt = _make_geopoint(lat, lng) + assert geo_pt.latitude == lat + assert geo_pt.longitude == lng + + +def test_geopoint_to_protobuf(): + from google.type import latlng_pb2 + + lat = 0.0001 + lng = 20.03 + geo_pt = _make_geopoint(lat, lng) + result = geo_pt.to_protobuf() + geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) + assert result == geo_pt_pb + + +def test_geopoint___eq__(): + lat = 0.0001 + lng = 20.03 + geo_pt1 = _make_geopoint(lat, lng) + geo_pt2 = _make_geopoint(lat, lng) + assert geo_pt1 == geo_pt2 + + +def test_geopoint___eq__type_differ(): + lat = 0.0001 + lng = 20.03 + geo_pt1 = _make_geopoint(lat, lng) + geo_pt2 = object() + assert geo_pt1 != geo_pt2 + + +def test_geopoint___ne__same_value(): + lat = 0.0001 + lng = 20.03 + geo_pt1 = _make_geopoint(lat, lng) + geo_pt2 = _make_geopoint(lat, lng) + assert not geo_pt1 != geo_pt2 + + +def test_geopoint___ne__(): + geo_pt1 = _make_geopoint(0.0, 1.0) + geo_pt2 = _make_geopoint(2.0, 3.0) + assert geo_pt1 != geo_pt2 diff --git a/packages/google-cloud-datastore/tests/unit/test_key.py b/packages/google-cloud-datastore/tests/unit/test_key.py index 9d130fb49ddb..2d2a88e76035 100644 --- a/packages/google-cloud-datastore/tests/unit/test_key.py +++ b/packages/google-cloud-datastore/tests/unit/test_key.py @@ -12,735 +12,772 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest - - -class TestKey(unittest.TestCase): - - _DEFAULT_PROJECT = "PROJECT" - # NOTE: This comes directly from a running (in the dev appserver) - # App Engine app. Created via: - # - # from google.appengine.ext import ndb - # key = ndb.Key( - # 'Parent', 59, 'Child', 'Feather', - # namespace='space', app='s~sample-app') - # urlsafe = key.urlsafe() - _URLSAFE_EXAMPLE1 = ( - b"agxzfnNhbXBsZS1hcHByHgsSBlBhcmVudBg7DAsSBUNoaWxkIgdGZ" b"WF0aGVyDKIBBXNwYWNl" +import pytest + + +_DEFAULT_PROJECT = "PROJECT" +PROJECT = "my-prahjekt" +# NOTE: This comes directly from a running (in the dev appserver) +# App Engine app. Created via: +# +# from google.appengine.ext import ndb +# key = ndb.Key( +# 'Parent', 59, 'Child', 'Feather', +# namespace='space', app='s~sample-app') +# urlsafe = key.urlsafe() +_URLSAFE_EXAMPLE1 = ( + b"agxzfnNhbXBsZS1hcHByHgsSBlBhcmVudBg7DAsSBUNoaWxkIgdGZ" b"WF0aGVyDKIBBXNwYWNl" +) +_URLSAFE_APP1 = "s~sample-app" +_URLSAFE_NAMESPACE1 = "space" +_URLSAFE_FLAT_PATH1 = ("Parent", 59, "Child", "Feather") +_URLSAFE_EXAMPLE2 = b"agZzfmZpcmVyDwsSBEtpbmQiBVRoaW5nDA" +_URLSAFE_APP2 = "s~fire" +_URLSAFE_FLAT_PATH2 = ("Kind", "Thing") +_URLSAFE_EXAMPLE3 = b"ahhzfnNhbXBsZS1hcHAtbm8tbG9jYXRpb25yCgsSBFpvcnAYWAw" +_URLSAFE_APP3 = "sample-app-no-location" +_URLSAFE_FLAT_PATH3 = ("Zorp", 88) + + +def _make_key(*args, **kwargs): + from google.cloud.datastore.key import Key + + return Key(*args, **kwargs) + + +def test_key_ctor_empty(): + with pytest.raises(ValueError): + _make_key() + + +def test_key_ctor_no_project(): + with pytest.raises(ValueError): + _make_key("KIND") + + +def test_key_ctor_w_explicit_project_empty_path(): + with pytest.raises(ValueError): + _make_key(project=PROJECT) + + +def test_key_ctor_parent(): + _PARENT_KIND = "KIND1" + _PARENT_ID = 1234 + _PARENT_PROJECT = "PROJECT-ALT" + _PARENT_NAMESPACE = "NAMESPACE" + _CHILD_KIND = "KIND2" + _CHILD_ID = 2345 + _PATH = [ + {"kind": _PARENT_KIND, "id": _PARENT_ID}, + {"kind": _CHILD_KIND, "id": _CHILD_ID}, + ] + parent_key = _make_key( + _PARENT_KIND, _PARENT_ID, project=_PARENT_PROJECT, namespace=_PARENT_NAMESPACE, ) - _URLSAFE_APP1 = "s~sample-app" - _URLSAFE_NAMESPACE1 = "space" - _URLSAFE_FLAT_PATH1 = ("Parent", 59, "Child", "Feather") - _URLSAFE_EXAMPLE2 = b"agZzfmZpcmVyDwsSBEtpbmQiBVRoaW5nDA" - _URLSAFE_APP2 = "s~fire" - _URLSAFE_FLAT_PATH2 = ("Kind", "Thing") - _URLSAFE_EXAMPLE3 = b"ahhzfnNhbXBsZS1hcHAtbm8tbG9jYXRpb25yCgsSBFpvcnAYWAw" - _URLSAFE_APP3 = "sample-app-no-location" - _URLSAFE_FLAT_PATH3 = ("Zorp", 88) - - @staticmethod - def _get_target_class(): - from google.cloud.datastore.key import Key - - return Key - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_ctor_empty(self): - self.assertRaises(ValueError, self._make_one) - - def test_ctor_no_project(self): - klass = self._get_target_class() - self.assertRaises(ValueError, klass, "KIND") - - def test_ctor_w_explicit_project_empty_path(self): - _PROJECT = "PROJECT" - self.assertRaises(ValueError, self._make_one, project=_PROJECT) - - def test_ctor_parent(self): - _PARENT_KIND = "KIND1" - _PARENT_ID = 1234 - _PARENT_PROJECT = "PROJECT-ALT" - _PARENT_NAMESPACE = "NAMESPACE" - _CHILD_KIND = "KIND2" - _CHILD_ID = 2345 - _PATH = [ - {"kind": _PARENT_KIND, "id": _PARENT_ID}, - {"kind": _CHILD_KIND, "id": _CHILD_ID}, - ] - parent_key = self._make_one( - _PARENT_KIND, - _PARENT_ID, - project=_PARENT_PROJECT, - namespace=_PARENT_NAMESPACE, - ) - key = self._make_one(_CHILD_KIND, _CHILD_ID, parent=parent_key) - self.assertEqual(key.project, parent_key.project) - self.assertEqual(key.namespace, parent_key.namespace) - self.assertEqual(key.kind, _CHILD_KIND) - self.assertEqual(key.path, _PATH) - self.assertIs(key.parent, parent_key) - - def test_ctor_partial_parent(self): - parent_key = self._make_one("KIND", project=self._DEFAULT_PROJECT) - with self.assertRaises(ValueError): - self._make_one("KIND2", 1234, parent=parent_key) - - def test_ctor_parent_bad_type(self): - with self.assertRaises(AttributeError): - self._make_one( - "KIND2", 1234, parent=("KIND1", 1234), project=self._DEFAULT_PROJECT - ) - - def test_ctor_parent_bad_namespace(self): - parent_key = self._make_one( - "KIND", 1234, namespace="FOO", project=self._DEFAULT_PROJECT - ) - with self.assertRaises(ValueError): - self._make_one( - "KIND2", - 1234, - namespace="BAR", - parent=parent_key, - PROJECT=self._DEFAULT_PROJECT, - ) - - def test_ctor_parent_bad_project(self): - parent_key = self._make_one("KIND", 1234, project="FOO") - with self.assertRaises(ValueError): - self._make_one("KIND2", 1234, parent=parent_key, project="BAR") - - def test_ctor_parent_empty_path(self): - parent_key = self._make_one("KIND", 1234, project=self._DEFAULT_PROJECT) - with self.assertRaises(ValueError): - self._make_one(parent=parent_key) - - def test_ctor_explicit(self): - _PROJECT = "PROJECT-ALT" - _NAMESPACE = "NAMESPACE" - _KIND = "KIND" - _ID = 1234 - _PATH = [{"kind": _KIND, "id": _ID}] - key = self._make_one(_KIND, _ID, namespace=_NAMESPACE, project=_PROJECT) - self.assertEqual(key.project, _PROJECT) - self.assertEqual(key.namespace, _NAMESPACE) - self.assertEqual(key.kind, _KIND) - self.assertEqual(key.path, _PATH) - - def test_ctor_bad_kind(self): - self.assertRaises( - ValueError, self._make_one, object(), project=self._DEFAULT_PROJECT - ) + key = _make_key(_CHILD_KIND, _CHILD_ID, parent=parent_key) + assert key.project == parent_key.project + assert key.namespace == parent_key.namespace + assert key.kind == _CHILD_KIND + assert key.path == _PATH + assert key.parent is parent_key - def test_ctor_bad_id_or_name(self): - self.assertRaises( - ValueError, self._make_one, "KIND", object(), project=self._DEFAULT_PROJECT - ) - self.assertRaises( - ValueError, self._make_one, "KIND", None, project=self._DEFAULT_PROJECT - ) - self.assertRaises( - ValueError, - self._make_one, - "KIND", - 10, - "KIND2", - None, - project=self._DEFAULT_PROJECT, - ) - def test__clone(self): - _PROJECT = "PROJECT-ALT" - _NAMESPACE = "NAMESPACE" - _KIND = "KIND" - _ID = 1234 - _PATH = [{"kind": _KIND, "id": _ID}] - key = self._make_one(_KIND, _ID, namespace=_NAMESPACE, project=_PROJECT) - clone = key._clone() - self.assertEqual(clone.project, _PROJECT) - self.assertEqual(clone.namespace, _NAMESPACE) - self.assertEqual(clone.kind, _KIND) - self.assertEqual(clone.path, _PATH) - - def test__clone_with_parent(self): - _PROJECT = "PROJECT-ALT" - _NAMESPACE = "NAMESPACE" - _KIND1 = "PARENT" - _KIND2 = "KIND" - _ID1 = 1234 - _ID2 = 2345 - _PATH = [{"kind": _KIND1, "id": _ID1}, {"kind": _KIND2, "id": _ID2}] - - parent = self._make_one(_KIND1, _ID1, namespace=_NAMESPACE, project=_PROJECT) - key = self._make_one(_KIND2, _ID2, parent=parent) - self.assertIs(key.parent, parent) - clone = key._clone() - self.assertIs(clone.parent, key.parent) - self.assertEqual(clone.project, _PROJECT) - self.assertEqual(clone.namespace, _NAMESPACE) - self.assertEqual(clone.path, _PATH) - - def test___eq_____ne___w_non_key(self): - _PROJECT = "PROJECT" - _KIND = "KIND" - _NAME = "one" - key = self._make_one(_KIND, _NAME, project=_PROJECT) - self.assertFalse(key == object()) - self.assertTrue(key != object()) - - def test___eq_____ne___two_incomplete_keys_same_kind(self): - _PROJECT = "PROJECT" - _KIND = "KIND" - key1 = self._make_one(_KIND, project=_PROJECT) - key2 = self._make_one(_KIND, project=_PROJECT) - self.assertFalse(key1 == key2) - self.assertTrue(key1 != key2) - - def test___eq_____ne___incomplete_key_w_complete_key_same_kind(self): - _PROJECT = "PROJECT" - _KIND = "KIND" - _ID = 1234 - key1 = self._make_one(_KIND, project=_PROJECT) - key2 = self._make_one(_KIND, _ID, project=_PROJECT) - self.assertFalse(key1 == key2) - self.assertTrue(key1 != key2) - - def test___eq_____ne___complete_key_w_incomplete_key_same_kind(self): - _PROJECT = "PROJECT" - _KIND = "KIND" - _ID = 1234 - key1 = self._make_one(_KIND, _ID, project=_PROJECT) - key2 = self._make_one(_KIND, project=_PROJECT) - self.assertFalse(key1 == key2) - self.assertTrue(key1 != key2) - - def test___eq_____ne___same_kind_different_ids(self): - _PROJECT = "PROJECT" - _KIND = "KIND" - _ID1 = 1234 - _ID2 = 2345 - key1 = self._make_one(_KIND, _ID1, project=_PROJECT) - key2 = self._make_one(_KIND, _ID2, project=_PROJECT) - self.assertFalse(key1 == key2) - self.assertTrue(key1 != key2) - - def test___eq_____ne___same_kind_and_id(self): - _PROJECT = "PROJECT" - _KIND = "KIND" - _ID = 1234 - key1 = self._make_one(_KIND, _ID, project=_PROJECT) - key2 = self._make_one(_KIND, _ID, project=_PROJECT) - self.assertTrue(key1 == key2) - self.assertFalse(key1 != key2) - - def test___eq_____ne___same_kind_and_id_different_project(self): - _PROJECT1 = "PROJECT1" - _PROJECT2 = "PROJECT2" - _KIND = "KIND" - _ID = 1234 - key1 = self._make_one(_KIND, _ID, project=_PROJECT1) - key2 = self._make_one(_KIND, _ID, project=_PROJECT2) - self.assertFalse(key1 == key2) - self.assertTrue(key1 != key2) - - def test___eq_____ne___same_kind_and_id_different_namespace(self): - _PROJECT = "PROJECT" - _NAMESPACE1 = "NAMESPACE1" - _NAMESPACE2 = "NAMESPACE2" - _KIND = "KIND" - _ID = 1234 - key1 = self._make_one(_KIND, _ID, project=_PROJECT, namespace=_NAMESPACE1) - key2 = self._make_one(_KIND, _ID, project=_PROJECT, namespace=_NAMESPACE2) - self.assertFalse(key1 == key2) - self.assertTrue(key1 != key2) - - def test___eq_____ne___same_kind_different_names(self): - _PROJECT = "PROJECT" - _KIND = "KIND" - _NAME1 = "one" - _NAME2 = "two" - key1 = self._make_one(_KIND, _NAME1, project=_PROJECT) - key2 = self._make_one(_KIND, _NAME2, project=_PROJECT) - self.assertFalse(key1 == key2) - self.assertTrue(key1 != key2) - - def test___eq_____ne___same_kind_and_name(self): - _PROJECT = "PROJECT" - _KIND = "KIND" - _NAME = "one" - key1 = self._make_one(_KIND, _NAME, project=_PROJECT) - key2 = self._make_one(_KIND, _NAME, project=_PROJECT) - self.assertTrue(key1 == key2) - self.assertFalse(key1 != key2) - - def test___eq_____ne___same_kind_and_name_different_project(self): - _PROJECT1 = "PROJECT1" - _PROJECT2 = "PROJECT2" - _KIND = "KIND" - _NAME = "one" - key1 = self._make_one(_KIND, _NAME, project=_PROJECT1) - key2 = self._make_one(_KIND, _NAME, project=_PROJECT2) - self.assertFalse(key1 == key2) - self.assertTrue(key1 != key2) - - def test___eq_____ne___same_kind_and_name_different_namespace(self): - _PROJECT = "PROJECT" - _NAMESPACE1 = "NAMESPACE1" - _NAMESPACE2 = "NAMESPACE2" - _KIND = "KIND" - _NAME = "one" - key1 = self._make_one(_KIND, _NAME, project=_PROJECT, namespace=_NAMESPACE1) - key2 = self._make_one(_KIND, _NAME, project=_PROJECT, namespace=_NAMESPACE2) - self.assertFalse(key1 == key2) - self.assertTrue(key1 != key2) - - def test___hash___incomplete(self): - _PROJECT = "PROJECT" - _KIND = "KIND" - key = self._make_one(_KIND, project=_PROJECT) - self.assertNotEqual(hash(key), hash(_KIND) + hash(_PROJECT) + hash(None)) - - def test___hash___completed_w_id(self): - _PROJECT = "PROJECT" - _KIND = "KIND" - _ID = 1234 - key = self._make_one(_KIND, _ID, project=_PROJECT) - self.assertNotEqual( - hash(key), hash(_KIND) + hash(_ID) + hash(_PROJECT) + hash(None) - ) +def test_key_ctor_partial_parent(): + parent_key = _make_key("KIND", project=_DEFAULT_PROJECT) + with pytest.raises(ValueError): + _make_key("KIND2", 1234, parent=parent_key) - def test___hash___completed_w_name(self): - _PROJECT = "PROJECT" - _KIND = "KIND" - _NAME = "NAME" - key = self._make_one(_KIND, _NAME, project=_PROJECT) - self.assertNotEqual( - hash(key), hash(_KIND) + hash(_NAME) + hash(_PROJECT) + hash(None) - ) - def test_completed_key_on_partial_w_id(self): - key = self._make_one("KIND", project=self._DEFAULT_PROJECT) - _ID = 1234 - new_key = key.completed_key(_ID) - self.assertIsNot(key, new_key) - self.assertEqual(new_key.id, _ID) - self.assertIsNone(new_key.name) - - def test_completed_key_on_partial_w_name(self): - key = self._make_one("KIND", project=self._DEFAULT_PROJECT) - _NAME = "NAME" - new_key = key.completed_key(_NAME) - self.assertIsNot(key, new_key) - self.assertIsNone(new_key.id) - self.assertEqual(new_key.name, _NAME) - - def test_completed_key_on_partial_w_invalid(self): - key = self._make_one("KIND", project=self._DEFAULT_PROJECT) - self.assertRaises(ValueError, key.completed_key, object()) - - def test_completed_key_on_complete(self): - key = self._make_one("KIND", 1234, project=self._DEFAULT_PROJECT) - self.assertRaises(ValueError, key.completed_key, 5678) - - def test_to_protobuf_defaults(self): - from google.cloud.datastore_v1.types import entity as entity_pb2 - - _KIND = "KIND" - key = self._make_one(_KIND, project=self._DEFAULT_PROJECT) - pb = key.to_protobuf() - self.assertIsInstance(pb, entity_pb2.Key) - - # Check partition ID. - self.assertEqual(pb.partition_id.project_id, self._DEFAULT_PROJECT) - # Unset values are False-y. - self.assertEqual(pb.partition_id.namespace_id, "") - - # Check the element PB matches the partial key and kind. - (elem,) = list(pb.path) - self.assertEqual(elem.kind, _KIND) - # Unset values are False-y. - self.assertEqual(elem.name, "") - # Unset values are False-y. - self.assertEqual(elem.id, 0) - - def test_to_protobuf_w_explicit_project(self): - _PROJECT = "PROJECT-ALT" - key = self._make_one("KIND", project=_PROJECT) - pb = key.to_protobuf() - self.assertEqual(pb.partition_id.project_id, _PROJECT) - - def test_to_protobuf_w_explicit_namespace(self): - _NAMESPACE = "NAMESPACE" - key = self._make_one( - "KIND", namespace=_NAMESPACE, project=self._DEFAULT_PROJECT - ) - pb = key.to_protobuf() - self.assertEqual(pb.partition_id.namespace_id, _NAMESPACE) - - def test_to_protobuf_w_explicit_path(self): - _PARENT = "PARENT" - _CHILD = "CHILD" - _ID = 1234 - _NAME = "NAME" - key = self._make_one(_PARENT, _NAME, _CHILD, _ID, project=self._DEFAULT_PROJECT) - pb = key.to_protobuf() - elems = list(pb.path) - self.assertEqual(len(elems), 2) - self.assertEqual(elems[0].kind, _PARENT) - self.assertEqual(elems[0].name, _NAME) - self.assertEqual(elems[1].kind, _CHILD) - self.assertEqual(elems[1].id, _ID) - - def test_to_protobuf_w_no_kind(self): - key = self._make_one("KIND", project=self._DEFAULT_PROJECT) - # Force the 'kind' to be unset. Maybe `to_protobuf` should fail - # on this? The backend certainly will. - key._path[-1].pop("kind") - pb = key.to_protobuf() - # Unset values are False-y. - self.assertEqual(pb.path[0].kind, "") - - def test_to_legacy_urlsafe(self): - key = self._make_one( - *self._URLSAFE_FLAT_PATH1, - project=self._URLSAFE_APP1, - namespace=self._URLSAFE_NAMESPACE1 - ) - # NOTE: ``key.project`` is somewhat "invalid" but that is OK. - urlsafe = key.to_legacy_urlsafe() - self.assertEqual(urlsafe, self._URLSAFE_EXAMPLE1) - - def test_to_legacy_urlsafe_strip_padding(self): - key = self._make_one(*self._URLSAFE_FLAT_PATH2, project=self._URLSAFE_APP2) - # NOTE: ``key.project`` is somewhat "invalid" but that is OK. - urlsafe = key.to_legacy_urlsafe() - self.assertEqual(urlsafe, self._URLSAFE_EXAMPLE2) - # Make sure it started with base64 padding. - self.assertNotEqual(len(self._URLSAFE_EXAMPLE2) % 4, 0) - - def test_to_legacy_urlsafe_with_location_prefix(self): - key = self._make_one(*self._URLSAFE_FLAT_PATH3, project=self._URLSAFE_APP3) - urlsafe = key.to_legacy_urlsafe(location_prefix="s~") - self.assertEqual(urlsafe, self._URLSAFE_EXAMPLE3) - - def test_from_legacy_urlsafe(self): - klass = self._get_target_class() - key = klass.from_legacy_urlsafe(self._URLSAFE_EXAMPLE1) - - self.assertEqual("s~" + key.project, self._URLSAFE_APP1) - self.assertEqual(key.namespace, self._URLSAFE_NAMESPACE1) - self.assertEqual(key.flat_path, self._URLSAFE_FLAT_PATH1) - # Also make sure we didn't accidentally set the parent. - self.assertIsNone(key._parent) - self.assertIsNotNone(key.parent) - self.assertIs(key._parent, key.parent) - - def test_from_legacy_urlsafe_needs_padding(self): - klass = self._get_target_class() - # Make sure it will have base64 padding added. - self.assertNotEqual(len(self._URLSAFE_EXAMPLE2) % 4, 0) - key = klass.from_legacy_urlsafe(self._URLSAFE_EXAMPLE2) - - self.assertEqual("s~" + key.project, self._URLSAFE_APP2) - self.assertIsNone(key.namespace) - self.assertEqual(key.flat_path, self._URLSAFE_FLAT_PATH2) - - def test_from_legacy_urlsafe_with_location_prefix(self): - klass = self._get_target_class() - # Make sure it will have base64 padding added. - key = klass.from_legacy_urlsafe(self._URLSAFE_EXAMPLE3) - - self.assertEqual(key.project, self._URLSAFE_APP3) - self.assertIsNone(key.namespace) - self.assertEqual(key.flat_path, self._URLSAFE_FLAT_PATH3) - - def test_is_partial_no_name_or_id(self): - key = self._make_one("KIND", project=self._DEFAULT_PROJECT) - self.assertTrue(key.is_partial) - - def test_is_partial_w_id(self): - _ID = 1234 - key = self._make_one("KIND", _ID, project=self._DEFAULT_PROJECT) - self.assertFalse(key.is_partial) - - def test_is_partial_w_name(self): - _NAME = "NAME" - key = self._make_one("KIND", _NAME, project=self._DEFAULT_PROJECT) - self.assertFalse(key.is_partial) - - def test_id_or_name_no_name_or_id(self): - key = self._make_one("KIND", project=self._DEFAULT_PROJECT) - self.assertIsNone(key.id_or_name) - - def test_id_or_name_no_name_or_id_child(self): - key = self._make_one("KIND1", 1234, "KIND2", project=self._DEFAULT_PROJECT) - self.assertIsNone(key.id_or_name) - - def test_id_or_name_w_id_only(self): - _ID = 1234 - key = self._make_one("KIND", _ID, project=self._DEFAULT_PROJECT) - self.assertEqual(key.id_or_name, _ID) - - def test_id_or_name_w_name_only(self): - _NAME = "NAME" - key = self._make_one("KIND", _NAME, project=self._DEFAULT_PROJECT) - self.assertEqual(key.id_or_name, _NAME) - - def test_id_or_name_w_id_zero(self): - _ID = 0 - key = self._make_one("KIND", _ID, project=self._DEFAULT_PROJECT) - self.assertEqual(key.id_or_name, _ID) - - def test_parent_default(self): - key = self._make_one("KIND", project=self._DEFAULT_PROJECT) - self.assertIsNone(key.parent) - - def test_parent_explicit_top_level(self): - key = self._make_one("KIND", 1234, project=self._DEFAULT_PROJECT) - self.assertIsNone(key.parent) - - def test_parent_explicit_nested(self): - _PARENT_KIND = "KIND1" - _PARENT_ID = 1234 - _PARENT_PATH = [{"kind": _PARENT_KIND, "id": _PARENT_ID}] - key = self._make_one( - _PARENT_KIND, _PARENT_ID, "KIND2", project=self._DEFAULT_PROJECT - ) - self.assertEqual(key.parent.path, _PARENT_PATH) - - def test_parent_multiple_calls(self): - _PARENT_KIND = "KIND1" - _PARENT_ID = 1234 - _PARENT_PATH = [{"kind": _PARENT_KIND, "id": _PARENT_ID}] - key = self._make_one( - _PARENT_KIND, _PARENT_ID, "KIND2", project=self._DEFAULT_PROJECT +def test_key_ctor_parent_bad_type(): + with pytest.raises(AttributeError): + _make_key("KIND2", 1234, parent=("KIND1", 1234), project=_DEFAULT_PROJECT) + + +def test_key_ctor_parent_bad_namespace(): + parent_key = _make_key("KIND", 1234, namespace="FOO", project=_DEFAULT_PROJECT) + with pytest.raises(ValueError): + _make_key( + "KIND2", 1234, namespace="BAR", parent=parent_key, PROJECT=_DEFAULT_PROJECT, ) - parent = key.parent - self.assertEqual(parent.path, _PARENT_PATH) - new_parent = key.parent - self.assertIs(parent, new_parent) -class Test__clean_app(unittest.TestCase): +def test_key_ctor_parent_bad_project(): + parent_key = _make_key("KIND", 1234, project="FOO") + with pytest.raises(ValueError): + _make_key("KIND2", 1234, parent=parent_key, project="BAR") + + +def test_key_ctor_parent_empty_path(): + parent_key = _make_key("KIND", 1234, project=_DEFAULT_PROJECT) + with pytest.raises(ValueError): + _make_key(parent=parent_key) + + +def test_key_ctor_explicit(): + _PROJECT = "PROJECT-ALT" + _NAMESPACE = "NAMESPACE" + _KIND = "KIND" + _ID = 1234 + _PATH = [{"kind": _KIND, "id": _ID}] + key = _make_key(_KIND, _ID, namespace=_NAMESPACE, project=_PROJECT) + assert key.project == _PROJECT + assert key.namespace == _NAMESPACE + assert key.kind == _KIND + assert key.path == _PATH + + +def test_key_ctor_bad_kind(): + with pytest.raises(ValueError): + _make_key(object(), project=_DEFAULT_PROJECT) + + +def test_key_ctor_bad_id_or_name(): + with pytest.raises(ValueError): + _make_key("KIND", object(), project=_DEFAULT_PROJECT) + + with pytest.raises(ValueError): + _make_key("KIND", None, project=_DEFAULT_PROJECT) + + with pytest.raises(ValueError): + _make_key("KIND", 10, "KIND2", None, project=_DEFAULT_PROJECT) + + +def test_key__clone(): + _PROJECT = "PROJECT-ALT" + _NAMESPACE = "NAMESPACE" + _KIND = "KIND" + _ID = 1234 + _PATH = [{"kind": _KIND, "id": _ID}] + key = _make_key(_KIND, _ID, namespace=_NAMESPACE, project=_PROJECT) + + clone = key._clone() + + assert clone.project == _PROJECT + assert clone.namespace == _NAMESPACE + assert clone.kind == _KIND + assert clone.path == _PATH + + +def test_key__clone_with_parent(): + _PROJECT = "PROJECT-ALT" + _NAMESPACE = "NAMESPACE" + _KIND1 = "PARENT" + _KIND2 = "KIND" + _ID1 = 1234 + _ID2 = 2345 + _PATH = [{"kind": _KIND1, "id": _ID1}, {"kind": _KIND2, "id": _ID2}] + + parent = _make_key(_KIND1, _ID1, namespace=_NAMESPACE, project=_PROJECT) + key = _make_key(_KIND2, _ID2, parent=parent) + assert key.parent is parent + + clone = key._clone() + + assert clone.parent is key.parent + assert clone.project == _PROJECT + assert clone.namespace == _NAMESPACE + assert clone.path == _PATH + + +def test_key___eq_____ne___w_non_key(): + _PROJECT = "PROJECT" + _KIND = "KIND" + _NAME = "one" + key = _make_key(_KIND, _NAME, project=_PROJECT) + assert not key == object() + assert key != object() + + +def test_key___eq_____ne___two_incomplete_keys_same_kind(): + _PROJECT = "PROJECT" + _KIND = "KIND" + key1 = _make_key(_KIND, project=_PROJECT) + key2 = _make_key(_KIND, project=_PROJECT) + assert not key1 == key2 + assert key1 != key2 + + +def test_key___eq_____ne___incomplete_key_w_complete_key_same_kind(): + _PROJECT = "PROJECT" + _KIND = "KIND" + _ID = 1234 + key1 = _make_key(_KIND, project=_PROJECT) + key2 = _make_key(_KIND, _ID, project=_PROJECT) + assert not key1 == key2 + assert key1 != key2 + + +def test_key___eq_____ne___complete_key_w_incomplete_key_same_kind(): + _PROJECT = "PROJECT" + _KIND = "KIND" + _ID = 1234 + key1 = _make_key(_KIND, _ID, project=_PROJECT) + key2 = _make_key(_KIND, project=_PROJECT) + assert not key1 == key2 + assert key1 != key2 + + +def test_key___eq_____ne___same_kind_different_ids(): + _PROJECT = "PROJECT" + _KIND = "KIND" + _ID1 = 1234 + _ID2 = 2345 + key1 = _make_key(_KIND, _ID1, project=_PROJECT) + key2 = _make_key(_KIND, _ID2, project=_PROJECT) + assert not key1 == key2 + assert key1 != key2 + + +def test_key___eq_____ne___same_kind_and_id(): + _PROJECT = "PROJECT" + _KIND = "KIND" + _ID = 1234 + key1 = _make_key(_KIND, _ID, project=_PROJECT) + key2 = _make_key(_KIND, _ID, project=_PROJECT) + assert key1 == key2 + assert not key1 != key2 + + +def test_key___eq_____ne___same_kind_and_id_different_project(): + _PROJECT1 = "PROJECT1" + _PROJECT2 = "PROJECT2" + _KIND = "KIND" + _ID = 1234 + key1 = _make_key(_KIND, _ID, project=_PROJECT1) + key2 = _make_key(_KIND, _ID, project=_PROJECT2) + assert not key1 == key2 + assert key1 != key2 + + +def test_key___eq_____ne___same_kind_and_id_different_namespace(): + _PROJECT = "PROJECT" + _NAMESPACE1 = "NAMESPACE1" + _NAMESPACE2 = "NAMESPACE2" + _KIND = "KIND" + _ID = 1234 + key1 = _make_key(_KIND, _ID, project=_PROJECT, namespace=_NAMESPACE1) + key2 = _make_key(_KIND, _ID, project=_PROJECT, namespace=_NAMESPACE2) + assert not key1 == key2 + assert key1 != key2 + + +def test_key___eq_____ne___same_kind_different_names(): + _PROJECT = "PROJECT" + _KIND = "KIND" + _NAME1 = "one" + _NAME2 = "two" + key1 = _make_key(_KIND, _NAME1, project=_PROJECT) + key2 = _make_key(_KIND, _NAME2, project=_PROJECT) + assert not key1 == key2 + assert key1 != key2 + + +def test_key___eq_____ne___same_kind_and_name(): + _PROJECT = "PROJECT" + _KIND = "KIND" + _NAME = "one" + key1 = _make_key(_KIND, _NAME, project=_PROJECT) + key2 = _make_key(_KIND, _NAME, project=_PROJECT) + assert key1 == key2 + assert not key1 != key2 + + +def test_key___eq_____ne___same_kind_and_name_different_project(): + _PROJECT1 = "PROJECT1" + _PROJECT2 = "PROJECT2" + _KIND = "KIND" + _NAME = "one" + key1 = _make_key(_KIND, _NAME, project=_PROJECT1) + key2 = _make_key(_KIND, _NAME, project=_PROJECT2) + assert not key1 == key2 + assert key1 != key2 + + +def test_key___eq_____ne___same_kind_and_name_different_namespace(): + _PROJECT = "PROJECT" + _NAMESPACE1 = "NAMESPACE1" + _NAMESPACE2 = "NAMESPACE2" + _KIND = "KIND" + _NAME = "one" + key1 = _make_key(_KIND, _NAME, project=_PROJECT, namespace=_NAMESPACE1) + key2 = _make_key(_KIND, _NAME, project=_PROJECT, namespace=_NAMESPACE2) + assert not key1 == key2 + assert key1 != key2 + + +def test_key___hash___incomplete(): + _PROJECT = "PROJECT" + _KIND = "KIND" + key = _make_key(_KIND, project=_PROJECT) + assert hash(key) != hash(_KIND) + hash(_PROJECT) + hash(None) + + +def test_key___hash___completed_w_id(): + _PROJECT = "PROJECT" + _KIND = "KIND" + _ID = 1234 + key = _make_key(_KIND, _ID, project=_PROJECT) + assert hash(key) != hash(_KIND) + hash(_ID) + hash(_PROJECT) + hash(None) + + +def test_key___hash___completed_w_name(): + _PROJECT = "PROJECT" + _KIND = "KIND" + _NAME = "NAME" + key = _make_key(_KIND, _NAME, project=_PROJECT) + assert hash(key) != hash(_KIND) + hash(_NAME) + hash(_PROJECT) + hash(None) + + +def test_key_completed_key_on_partial_w_id(): + key = _make_key("KIND", project=_DEFAULT_PROJECT) + _ID = 1234 + new_key = key.completed_key(_ID) + assert key is not new_key + assert new_key.id == _ID + assert new_key.name is None + + +def test_key_completed_key_on_partial_w_name(): + key = _make_key("KIND", project=_DEFAULT_PROJECT) + _NAME = "NAME" + new_key = key.completed_key(_NAME) + assert key is not new_key + assert new_key.id is None + assert new_key.name == _NAME + + +def test_key_completed_key_on_partial_w_invalid(): + key = _make_key("KIND", project=_DEFAULT_PROJECT) + with pytest.raises(ValueError): + key.completed_key(object()) + + +def test_key_completed_key_on_complete(): + key = _make_key("KIND", 1234, project=_DEFAULT_PROJECT) + with pytest.raises(ValueError): + key.completed_key(5678) + + +def test_key_to_protobuf_defaults(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + + _KIND = "KIND" + key = _make_key(_KIND, project=_DEFAULT_PROJECT) + pb = key.to_protobuf() + assert isinstance(pb, entity_pb2.Key) + + # Check partition ID. + assert pb.partition_id.project_id == _DEFAULT_PROJECT + # Unset values are False-y. + assert pb.partition_id.namespace_id == "" + + # Check the element PB matches the partial key and kind. + (elem,) = list(pb.path) + assert elem.kind == _KIND + # Unset values are False-y. + assert elem.name == "" + # Unset values are False-y. + assert elem.id == 0 + + +def test_key_to_protobuf_w_explicit_project(): + _PROJECT = "PROJECT-ALT" + key = _make_key("KIND", project=_PROJECT) + pb = key.to_protobuf() + assert pb.partition_id.project_id == _PROJECT + + +def test_key_to_protobuf_w_explicit_namespace(): + _NAMESPACE = "NAMESPACE" + key = _make_key("KIND", namespace=_NAMESPACE, project=_DEFAULT_PROJECT) + pb = key.to_protobuf() + assert pb.partition_id.namespace_id == _NAMESPACE + + +def test_key_to_protobuf_w_explicit_path(): + _PARENT = "PARENT" + _CHILD = "CHILD" + _ID = 1234 + _NAME = "NAME" + key = _make_key(_PARENT, _NAME, _CHILD, _ID, project=_DEFAULT_PROJECT) + pb = key.to_protobuf() + elems = list(pb.path) + assert len(elems) == 2 + assert elems[0].kind == _PARENT + assert elems[0].name == _NAME + assert elems[1].kind == _CHILD + assert elems[1].id == _ID + + +def test_key_to_protobuf_w_no_kind(): + key = _make_key("KIND", project=_DEFAULT_PROJECT) + # Force the 'kind' to be unset. Maybe `to_protobuf` should fail + # on this? The backend certainly will. + key._path[-1].pop("kind") + pb = key.to_protobuf() + # Unset values are False-y. + assert pb.path[0].kind == "" + + +def test_key_to_legacy_urlsafe(): + key = _make_key( + *_URLSAFE_FLAT_PATH1, project=_URLSAFE_APP1, namespace=_URLSAFE_NAMESPACE1 + ) + # NOTE: ``key.project`` is somewhat "invalid" but that is OK. + urlsafe = key.to_legacy_urlsafe() + assert urlsafe == _URLSAFE_EXAMPLE1 + + +def test_key_to_legacy_urlsafe_strip_padding(): + key = _make_key(*_URLSAFE_FLAT_PATH2, project=_URLSAFE_APP2) + # NOTE: ``key.project`` is somewhat "invalid" but that is OK. + urlsafe = key.to_legacy_urlsafe() + assert urlsafe == _URLSAFE_EXAMPLE2 + # Make sure it started with base64 padding. + assert len(_URLSAFE_EXAMPLE2) % 4 != 0 + + +def test_key_to_legacy_urlsafe_with_location_prefix(): + key = _make_key(*_URLSAFE_FLAT_PATH3, project=_URLSAFE_APP3) + urlsafe = key.to_legacy_urlsafe(location_prefix="s~") + assert urlsafe == _URLSAFE_EXAMPLE3 + + +def test_key_from_legacy_urlsafe(): + from google.cloud.datastore.key import Key + + key = Key.from_legacy_urlsafe(_URLSAFE_EXAMPLE1) + + assert "s~" + key.project == _URLSAFE_APP1 + assert key.namespace == _URLSAFE_NAMESPACE1 + assert key.flat_path == _URLSAFE_FLAT_PATH1 + # Also make sure we didn't accidentally set the parent. + assert key._parent is None + assert key.parent is not None + assert key._parent is key.parent + + +def test_key_from_legacy_urlsafe_needs_padding(): + from google.cloud.datastore.key import Key + + # Make sure it will have base64 padding added. + len(_URLSAFE_EXAMPLE2) % 4 != 0 + key = Key.from_legacy_urlsafe(_URLSAFE_EXAMPLE2) + + assert "s~" + key.project == _URLSAFE_APP2 + assert key.namespace is None + assert key.flat_path == _URLSAFE_FLAT_PATH2 + + +def test_key_from_legacy_urlsafe_with_location_prefix(): + from google.cloud.datastore.key import Key + + # Make sure it will have base64 padding added. + key = Key.from_legacy_urlsafe(_URLSAFE_EXAMPLE3) + + assert key.project == _URLSAFE_APP3 + assert key.namespace is None + assert key.flat_path == _URLSAFE_FLAT_PATH3 + + +def test_key_is_partial_no_name_or_id(): + key = _make_key("KIND", project=_DEFAULT_PROJECT) + assert key.is_partial + + +def test_key_is_partial_w_id(): + _ID = 1234 + key = _make_key("KIND", _ID, project=_DEFAULT_PROJECT) + assert not key.is_partial + + +def test_key_is_partial_w_name(): + _NAME = "NAME" + key = _make_key("KIND", _NAME, project=_DEFAULT_PROJECT) + assert not key.is_partial + + +def test_key_id_or_name_no_name_or_id(): + key = _make_key("KIND", project=_DEFAULT_PROJECT) + assert key.id_or_name is None + + +def test_key_id_or_name_no_name_or_id_child(): + key = _make_key("KIND1", 1234, "KIND2", project=_DEFAULT_PROJECT) + assert key.id_or_name is None + + +def test_key_id_or_name_w_id_only(): + _ID = 1234 + key = _make_key("KIND", _ID, project=_DEFAULT_PROJECT) + assert key.id_or_name == _ID + + +def test_key_id_or_name_w_name_only(): + _NAME = "NAME" + key = _make_key("KIND", _NAME, project=_DEFAULT_PROJECT) + assert key.id_or_name == _NAME + + +def test_key_id_or_name_w_id_zero(): + _ID = 0 + key = _make_key("KIND", _ID, project=_DEFAULT_PROJECT) + assert key.id_or_name == _ID + + +def test_key_parent_default(): + key = _make_key("KIND", project=_DEFAULT_PROJECT) + assert key.parent is None + + +def test_key_parent_explicit_top_level(): + key = _make_key("KIND", 1234, project=_DEFAULT_PROJECT) + assert key.parent is None + + +def test_key_parent_explicit_nested(): + _PARENT_KIND = "KIND1" + _PARENT_ID = 1234 + _PARENT_PATH = [{"kind": _PARENT_KIND, "id": _PARENT_ID}] + key = _make_key(_PARENT_KIND, _PARENT_ID, "KIND2", project=_DEFAULT_PROJECT) + assert key.parent.path == _PARENT_PATH + + +def test_key_parent_multiple_calls(): + _PARENT_KIND = "KIND1" + _PARENT_ID = 1234 + _PARENT_PATH = [{"kind": _PARENT_KIND, "id": _PARENT_ID}] + key = _make_key(_PARENT_KIND, _PARENT_ID, "KIND2", project=_DEFAULT_PROJECT) + parent = key.parent + assert parent.path == _PARENT_PATH + new_parent = key.parent + assert parent is new_parent + + +def test__cliean_app_w_already_clean(): + from google.cloud.datastore.key import _clean_app + + app_str = PROJECT + assert _clean_app(app_str) == PROJECT + + +def test__cliean_app_w_standard(): + from google.cloud.datastore.key import _clean_app + + app_str = "s~" + PROJECT + assert _clean_app(app_str) == PROJECT + + +def test__cliean_app_w_european(): + from google.cloud.datastore.key import _clean_app + + app_str = "e~" + PROJECT + assert _clean_app(app_str) == PROJECT + + +def test__cliean_app_w_dev_server(): + from google.cloud.datastore.key import _clean_app + + app_str = "dev~" + PROJECT + assert _clean_app(app_str) == PROJECT + + +def test__get_empty_w_unset(): + from google.cloud.datastore.key import _get_empty + + for empty_value in (u"", 0, 0.0, []): + ret_val = _get_empty(empty_value, empty_value) + assert ret_val is None + + +def test__get_empty_w_actually_set(): + from google.cloud.datastore.key import _get_empty + + value_pairs = ((u"hello", u""), (10, 0), (3.14, 0.0), (["stuff", "here"], [])) + for value, empty_value in value_pairs: + ret_val = _get_empty(value, empty_value) + assert ret_val is value + + +def test__check_database_id_w_empty_value(): + from google.cloud.datastore.key import _check_database_id + + ret_val = _check_database_id(u"") + # Really we are just happy there was no exception. + assert ret_val is None + + +def test__check_database_id_w_failure(): + from google.cloud.datastore.key import _check_database_id + + with pytest.raises(ValueError): + _check_database_id(u"some-database-id") + + +def test__add_id_or_name_add_id(): + from google.cloud.datastore.key import _add_id_or_name + + flat_path = [] + id_ = 123 + element_pb = _make_element_pb(id=id_) + + ret_val = _add_id_or_name(flat_path, element_pb, False) + assert ret_val is None + assert flat_path == [id_] + ret_val = _add_id_or_name(flat_path, element_pb, True) + assert ret_val is None + assert flat_path == [id_, id_] + + +def test__add_id_or_name_add_name(): + from google.cloud.datastore.key import _add_id_or_name + + flat_path = [] + name = "moon-shadow" + element_pb = _make_element_pb(name=name) + + ret_val = _add_id_or_name(flat_path, element_pb, False) + assert ret_val is None + assert flat_path == [name] + ret_val = _add_id_or_name(flat_path, element_pb, True) + assert ret_val is None + assert flat_path == [name, name] + + +def test__add_id_or_name_both_present(): + from google.cloud.datastore.key import _add_id_or_name + + element_pb = _make_element_pb(id=17, name="seventeen") + flat_path = [] + with pytest.raises(ValueError): + _add_id_or_name(flat_path, element_pb, False) + with pytest.raises(ValueError): + _add_id_or_name(flat_path, element_pb, True) + + assert flat_path == [] + + +def test__add_id_or_name_both_empty_failure(): + from google.cloud.datastore.key import _add_id_or_name + + element_pb = _make_element_pb() + flat_path = [] + with pytest.raises(ValueError): + _add_id_or_name(flat_path, element_pb, False) + + assert flat_path == [] + + +def test__add_id_or_name_both_empty_allowed(): + from google.cloud.datastore.key import _add_id_or_name + + element_pb = _make_element_pb() + flat_path = [] + ret_val = _add_id_or_name(flat_path, element_pb, True) + assert ret_val is None + assert flat_path == [] + - PROJECT = "my-prahjekt" +def test__get_flat_path_one_pair(): + from google.cloud.datastore.key import _get_flat_path - @staticmethod - def _call_fut(app_str): - from google.cloud.datastore.key import _clean_app + kind = "Widget" + name = "Scooter" + element_pb = _make_element_pb(type=kind, name=name) + path_pb = _make_path_pb(element_pb) + flat_path = _get_flat_path(path_pb) + assert flat_path == (kind, name) - return _clean_app(app_str) - def test_already_clean(self): - app_str = self.PROJECT - self.assertEqual(self._call_fut(app_str), self.PROJECT) +def test__get_flat_path_two_pairs(): + from google.cloud.datastore.key import _get_flat_path - def test_standard(self): - app_str = "s~" + self.PROJECT - self.assertEqual(self._call_fut(app_str), self.PROJECT) + kind1 = "parent" + id1 = 59 + element_pb1 = _make_element_pb(type=kind1, id=id1) - def test_european(self): - app_str = "e~" + self.PROJECT - self.assertEqual(self._call_fut(app_str), self.PROJECT) + kind2 = "child" + name2 = "naem" + element_pb2 = _make_element_pb(type=kind2, name=name2) - def test_dev_server(self): - app_str = "dev~" + self.PROJECT - self.assertEqual(self._call_fut(app_str), self.PROJECT) + path_pb = _make_path_pb(element_pb1, element_pb2) + flat_path = _get_flat_path(path_pb) + assert flat_path == (kind1, id1, kind2, name2) -class Test__get_empty(unittest.TestCase): - @staticmethod - def _call_fut(value, empty_value): - from google.cloud.datastore.key import _get_empty +def test__get_flat_path_partial_key(): + from google.cloud.datastore.key import _get_flat_path - return _get_empty(value, empty_value) + kind1 = "grandparent" + name1 = "cats" + element_pb1 = _make_element_pb(type=kind1, name=name1) - def test_unset(self): - for empty_value in (u"", 0, 0.0, []): - ret_val = self._call_fut(empty_value, empty_value) - self.assertIsNone(ret_val) + kind2 = "parent" + id2 = 1337 + element_pb2 = _make_element_pb(type=kind2, id=id2) - def test_actually_set(self): - value_pairs = ((u"hello", u""), (10, 0), (3.14, 0.0), (["stuff", "here"], [])) - for value, empty_value in value_pairs: - ret_val = self._call_fut(value, empty_value) - self.assertIs(ret_val, value) + kind3 = "child" + element_pb3 = _make_element_pb(type=kind3) + path_pb = _make_path_pb(element_pb1, element_pb2, element_pb3) + flat_path = _get_flat_path(path_pb) + assert flat_path == (kind1, name1, kind2, id2, kind3) -class Test__check_database_id(unittest.TestCase): - @staticmethod - def _call_fut(database_id): - from google.cloud.datastore.key import _check_database_id - return _check_database_id(database_id) +def test__to_legacy_path_w_one_pair(): + from google.cloud.datastore.key import _to_legacy_path - def test_empty_value(self): - ret_val = self._call_fut(u"") - # Really we are just happy there was no exception. - self.assertIsNone(ret_val) + kind = "Widget" + name = "Scooter" + dict_path = [{"kind": kind, "name": name}] + path_pb = _to_legacy_path(dict_path) - def test_failure(self): - with self.assertRaises(ValueError): - self._call_fut(u"some-database-id") + element_pb = _make_element_pb(type=kind, name=name) + expected_pb = _make_path_pb(element_pb) + assert path_pb == expected_pb -class Test__add_id_or_name(unittest.TestCase): - @staticmethod - def _call_fut(flat_path, element_pb, empty_allowed): - from google.cloud.datastore.key import _add_id_or_name +def test__to_legacy_path_w_two_pairs(): + from google.cloud.datastore.key import _to_legacy_path - return _add_id_or_name(flat_path, element_pb, empty_allowed) + kind1 = "parent" + id1 = 59 - def test_add_id(self): - flat_path = [] - id_ = 123 - element_pb = _make_element_pb(id=id_) + kind2 = "child" + name2 = "naem" - ret_val = self._call_fut(flat_path, element_pb, False) - self.assertIsNone(ret_val) - self.assertEqual(flat_path, [id_]) - ret_val = self._call_fut(flat_path, element_pb, True) - self.assertIsNone(ret_val) - self.assertEqual(flat_path, [id_, id_]) + dict_path = [{"kind": kind1, "id": id1}, {"kind": kind2, "name": name2}] + path_pb = _to_legacy_path(dict_path) - def test_add_name(self): - flat_path = [] - name = "moon-shadow" - element_pb = _make_element_pb(name=name) + element_pb1 = _make_element_pb(type=kind1, id=id1) + element_pb2 = _make_element_pb(type=kind2, name=name2) + expected_pb = _make_path_pb(element_pb1, element_pb2) + assert path_pb == expected_pb - ret_val = self._call_fut(flat_path, element_pb, False) - self.assertIsNone(ret_val) - self.assertEqual(flat_path, [name]) - ret_val = self._call_fut(flat_path, element_pb, True) - self.assertIsNone(ret_val) - self.assertEqual(flat_path, [name, name]) - def test_both_present(self): - element_pb = _make_element_pb(id=17, name="seventeen") - flat_path = [] - with self.assertRaises(ValueError): - self._call_fut(flat_path, element_pb, False) - with self.assertRaises(ValueError): - self._call_fut(flat_path, element_pb, True) +def test__to_legacy_path_w_partial_key(): + from google.cloud.datastore.key import _to_legacy_path - self.assertEqual(flat_path, []) + kind1 = "grandparent" + name1 = "cats" - def test_both_empty_failure(self): - element_pb = _make_element_pb() - flat_path = [] - with self.assertRaises(ValueError): - self._call_fut(flat_path, element_pb, False) + kind2 = "parent" + id2 = 1337 - self.assertEqual(flat_path, []) - - def test_both_empty_allowed(self): - element_pb = _make_element_pb() - flat_path = [] - ret_val = self._call_fut(flat_path, element_pb, True) - self.assertIsNone(ret_val) - self.assertEqual(flat_path, []) - - -class Test__get_flat_path(unittest.TestCase): - @staticmethod - def _call_fut(path_pb): - from google.cloud.datastore.key import _get_flat_path - - return _get_flat_path(path_pb) - - def test_one_pair(self): - kind = "Widget" - name = "Scooter" - element_pb = _make_element_pb(type=kind, name=name) - path_pb = _make_path_pb(element_pb) - flat_path = self._call_fut(path_pb) - self.assertEqual(flat_path, (kind, name)) - - def test_two_pairs(self): - kind1 = "parent" - id1 = 59 - element_pb1 = _make_element_pb(type=kind1, id=id1) - - kind2 = "child" - name2 = "naem" - element_pb2 = _make_element_pb(type=kind2, name=name2) - - path_pb = _make_path_pb(element_pb1, element_pb2) - flat_path = self._call_fut(path_pb) - self.assertEqual(flat_path, (kind1, id1, kind2, name2)) - - def test_partial_key(self): - kind1 = "grandparent" - name1 = "cats" - element_pb1 = _make_element_pb(type=kind1, name=name1) - - kind2 = "parent" - id2 = 1337 - element_pb2 = _make_element_pb(type=kind2, id=id2) - - kind3 = "child" - element_pb3 = _make_element_pb(type=kind3) - - path_pb = _make_path_pb(element_pb1, element_pb2, element_pb3) - flat_path = self._call_fut(path_pb) - self.assertEqual(flat_path, (kind1, name1, kind2, id2, kind3)) - - -class Test__to_legacy_path(unittest.TestCase): - @staticmethod - def _call_fut(dict_path): - from google.cloud.datastore.key import _to_legacy_path - - return _to_legacy_path(dict_path) - - def test_one_pair(self): - kind = "Widget" - name = "Scooter" - dict_path = [{"kind": kind, "name": name}] - path_pb = self._call_fut(dict_path) - - element_pb = _make_element_pb(type=kind, name=name) - expected_pb = _make_path_pb(element_pb) - self.assertEqual(path_pb, expected_pb) - - def test_two_pairs(self): - kind1 = "parent" - id1 = 59 - - kind2 = "child" - name2 = "naem" - - dict_path = [{"kind": kind1, "id": id1}, {"kind": kind2, "name": name2}] - path_pb = self._call_fut(dict_path) - - element_pb1 = _make_element_pb(type=kind1, id=id1) - element_pb2 = _make_element_pb(type=kind2, name=name2) - expected_pb = _make_path_pb(element_pb1, element_pb2) - self.assertEqual(path_pb, expected_pb) - - def test_partial_key(self): - kind1 = "grandparent" - name1 = "cats" + kind3 = "child" - kind2 = "parent" - id2 = 1337 + dict_path = [ + {"kind": kind1, "name": name1}, + {"kind": kind2, "id": id2}, + {"kind": kind3}, + ] + path_pb = _to_legacy_path(dict_path) - kind3 = "child" - - dict_path = [ - {"kind": kind1, "name": name1}, - {"kind": kind2, "id": id2}, - {"kind": kind3}, - ] - path_pb = self._call_fut(dict_path) - - element_pb1 = _make_element_pb(type=kind1, name=name1) - element_pb2 = _make_element_pb(type=kind2, id=id2) - element_pb3 = _make_element_pb(type=kind3) - expected_pb = _make_path_pb(element_pb1, element_pb2, element_pb3) - self.assertEqual(path_pb, expected_pb) + element_pb1 = _make_element_pb(type=kind1, name=name1) + element_pb2 = _make_element_pb(type=kind2, id=id2) + element_pb3 = _make_element_pb(type=kind3) + expected_pb = _make_path_pb(element_pb1, element_pb2, element_pb3) + assert path_pb == expected_pb def _make_element_pb(**kwargs): diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index dcb4e9f53ab3..3cbd95b84155 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -12,770 +12,791 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest - import mock import pytest +_PROJECT = "PROJECT" + + +def test_query_ctor_defaults(): + client = _make_client() + query = _make_query(client) + assert query._client is client + assert query.project == client.project + assert query.kind is None + assert query.namespace == client.namespace + assert query.ancestor is None + assert query.filters == [] + assert query.projection == [] + assert query.order == [] + assert query.distinct_on == [] + + +def test_query_ctor_explicit(): + from google.cloud.datastore.key import Key + + _PROJECT = "OTHER_PROJECT" + _KIND = "KIND" + _NAMESPACE = "OTHER_NAMESPACE" + client = _make_client() + ancestor = Key("ANCESTOR", 123, project=_PROJECT) + FILTERS = [("foo", "=", "Qux"), ("bar", "<", 17)] + PROJECTION = ["foo", "bar", "baz"] + ORDER = ["foo", "bar"] + DISTINCT_ON = ["foo"] + query = _make_query( + client, + kind=_KIND, + project=_PROJECT, + namespace=_NAMESPACE, + ancestor=ancestor, + filters=FILTERS, + projection=PROJECTION, + order=ORDER, + distinct_on=DISTINCT_ON, + ) + assert query._client is client + assert query.project == _PROJECT + assert query.kind == _KIND + assert query.namespace == _NAMESPACE + assert query.ancestor.path == ancestor.path + assert query.filters == FILTERS + assert query.projection == PROJECTION + assert query.order == ORDER + assert query.distinct_on == DISTINCT_ON -class TestQuery(unittest.TestCase): - - _PROJECT = "PROJECT" - - @staticmethod - def _get_target_class(): - from google.cloud.datastore.query import Query - - return Query - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def _make_client(self): - return _Client(self._PROJECT) - - def test_ctor_defaults(self): - client = self._make_client() - query = self._make_one(client) - self.assertIs(query._client, client) - self.assertEqual(query.project, client.project) - self.assertIsNone(query.kind) - self.assertEqual(query.namespace, client.namespace) - self.assertIsNone(query.ancestor) - self.assertEqual(query.filters, []) - self.assertEqual(query.projection, []) - self.assertEqual(query.order, []) - self.assertEqual(query.distinct_on, []) - - def test_ctor_explicit(self): - from google.cloud.datastore.key import Key - - _PROJECT = "OTHER_PROJECT" - _KIND = "KIND" - _NAMESPACE = "OTHER_NAMESPACE" - client = self._make_client() - ancestor = Key("ANCESTOR", 123, project=_PROJECT) - FILTERS = [("foo", "=", "Qux"), ("bar", "<", 17)] - PROJECTION = ["foo", "bar", "baz"] - ORDER = ["foo", "bar"] - DISTINCT_ON = ["foo"] - query = self._make_one( - client, - kind=_KIND, - project=_PROJECT, - namespace=_NAMESPACE, - ancestor=ancestor, - filters=FILTERS, - projection=PROJECTION, - order=ORDER, - distinct_on=DISTINCT_ON, - ) - self.assertIs(query._client, client) - self.assertEqual(query.project, _PROJECT) - self.assertEqual(query.kind, _KIND) - self.assertEqual(query.namespace, _NAMESPACE) - self.assertEqual(query.ancestor.path, ancestor.path) - self.assertEqual(query.filters, FILTERS) - self.assertEqual(query.projection, PROJECTION) - self.assertEqual(query.order, ORDER) - self.assertEqual(query.distinct_on, DISTINCT_ON) - - def test_ctor_bad_projection(self): - BAD_PROJECTION = object() - self.assertRaises( - TypeError, self._make_one, self._make_client(), projection=BAD_PROJECTION - ) - def test_ctor_bad_order(self): - BAD_ORDER = object() - self.assertRaises( - TypeError, self._make_one, self._make_client(), order=BAD_ORDER - ) +def test_query_ctor_bad_projection(): + BAD_PROJECTION = object() + with pytest.raises(TypeError): + _make_query(_make_client(), projection=BAD_PROJECTION) - def test_ctor_bad_distinct_on(self): - BAD_DISTINCT_ON = object() - self.assertRaises( - TypeError, self._make_one, self._make_client(), distinct_on=BAD_DISTINCT_ON - ) - def test_ctor_bad_filters(self): - FILTERS_CANT_UNPACK = [("one", "two")] - self.assertRaises( - ValueError, self._make_one, self._make_client(), filters=FILTERS_CANT_UNPACK - ) +def test_query_ctor_bad_order(): + BAD_ORDER = object() + with pytest.raises(TypeError): + _make_query(_make_client(), order=BAD_ORDER) - def test_namespace_setter_w_non_string(self): - query = self._make_one(self._make_client()) - - def _assign(val): - query.namespace = val - - self.assertRaises(ValueError, _assign, object()) - - def test_namespace_setter(self): - _NAMESPACE = "OTHER_NAMESPACE" - query = self._make_one(self._make_client()) - query.namespace = _NAMESPACE - self.assertEqual(query.namespace, _NAMESPACE) - - def test_kind_setter_w_non_string(self): - query = self._make_one(self._make_client()) - - def _assign(val): - query.kind = val - - self.assertRaises(TypeError, _assign, object()) - - def test_kind_setter_wo_existing(self): - _KIND = "KIND" - query = self._make_one(self._make_client()) - query.kind = _KIND - self.assertEqual(query.kind, _KIND) - - def test_kind_setter_w_existing(self): - _KIND_BEFORE = "KIND_BEFORE" - _KIND_AFTER = "KIND_AFTER" - query = self._make_one(self._make_client(), kind=_KIND_BEFORE) - self.assertEqual(query.kind, _KIND_BEFORE) - query.kind = _KIND_AFTER - self.assertEqual(query.project, self._PROJECT) - self.assertEqual(query.kind, _KIND_AFTER) - - def test_ancestor_setter_w_non_key(self): - query = self._make_one(self._make_client()) - - def _assign(val): - query.ancestor = val - - self.assertRaises(TypeError, _assign, object()) - self.assertRaises(TypeError, _assign, ["KIND", "NAME"]) - - def test_ancestor_setter_w_key(self): - from google.cloud.datastore.key import Key - - _NAME = "NAME" - key = Key("KIND", 123, project=self._PROJECT) - query = self._make_one(self._make_client()) - query.add_filter("name", "=", _NAME) - query.ancestor = key - self.assertEqual(query.ancestor.path, key.path) - - def test_ancestor_deleter_w_key(self): - from google.cloud.datastore.key import Key - - key = Key("KIND", 123, project=self._PROJECT) - query = self._make_one(client=self._make_client(), ancestor=key) - del query.ancestor - self.assertIsNone(query.ancestor) - - def test_add_filter_setter_w_unknown_operator(self): - query = self._make_one(self._make_client()) - self.assertRaises(ValueError, query.add_filter, "firstname", "~~", "John") - - def test_add_filter_w_known_operator(self): - query = self._make_one(self._make_client()) - query.add_filter("firstname", "=", "John") - self.assertEqual(query.filters, [("firstname", "=", "John")]) - - def test_add_filter_w_all_operators(self): - query = self._make_one(self._make_client()) - query.add_filter("leq_prop", "<=", "val1") - query.add_filter("geq_prop", ">=", "val2") - query.add_filter("lt_prop", "<", "val3") - query.add_filter("gt_prop", ">", "val4") - query.add_filter("eq_prop", "=", "val5") - self.assertEqual(len(query.filters), 5) - self.assertEqual(query.filters[0], ("leq_prop", "<=", "val1")) - self.assertEqual(query.filters[1], ("geq_prop", ">=", "val2")) - self.assertEqual(query.filters[2], ("lt_prop", "<", "val3")) - self.assertEqual(query.filters[3], ("gt_prop", ">", "val4")) - self.assertEqual(query.filters[4], ("eq_prop", "=", "val5")) - - def test_add_filter_w_known_operator_and_entity(self): - from google.cloud.datastore.entity import Entity - - query = self._make_one(self._make_client()) - other = Entity() - other["firstname"] = "John" - other["lastname"] = "Smith" - query.add_filter("other", "=", other) - self.assertEqual(query.filters, [("other", "=", other)]) - - def test_add_filter_w_whitespace_property_name(self): - query = self._make_one(self._make_client()) - PROPERTY_NAME = " property with lots of space " - query.add_filter(PROPERTY_NAME, "=", "John") - self.assertEqual(query.filters, [(PROPERTY_NAME, "=", "John")]) - - def test_add_filter___key__valid_key(self): - from google.cloud.datastore.key import Key - - query = self._make_one(self._make_client()) - key = Key("Foo", project=self._PROJECT) - query.add_filter("__key__", "=", key) - self.assertEqual(query.filters, [("__key__", "=", key)]) - - def test_add_filter_return_query_obj(self): - from google.cloud.datastore.query import Query - - query = self._make_one(self._make_client()) - query_obj = query.add_filter("firstname", "=", "John") - self.assertIsInstance(query_obj, Query) - self.assertEqual(query_obj.filters, [("firstname", "=", "John")]) - - def test_filter___key__not_equal_operator(self): - from google.cloud.datastore.key import Key - - key = Key("Foo", project=self._PROJECT) - query = self._make_one(self._make_client()) - query.add_filter("__key__", "<", key) - self.assertEqual(query.filters, [("__key__", "<", key)]) - - def test_filter___key__invalid_value(self): - query = self._make_one(self._make_client()) - self.assertRaises(ValueError, query.add_filter, "__key__", "=", None) - - def test_projection_setter_empty(self): - query = self._make_one(self._make_client()) - query.projection = [] - self.assertEqual(query.projection, []) - - def test_projection_setter_string(self): - query = self._make_one(self._make_client()) - query.projection = "field1" - self.assertEqual(query.projection, ["field1"]) - - def test_projection_setter_non_empty(self): - query = self._make_one(self._make_client()) - query.projection = ["field1", "field2"] - self.assertEqual(query.projection, ["field1", "field2"]) - - def test_projection_setter_multiple_calls(self): - _PROJECTION1 = ["field1", "field2"] - _PROJECTION2 = ["field3"] - query = self._make_one(self._make_client()) - query.projection = _PROJECTION1 - self.assertEqual(query.projection, _PROJECTION1) - query.projection = _PROJECTION2 - self.assertEqual(query.projection, _PROJECTION2) - - def test_keys_only(self): - query = self._make_one(self._make_client()) - query.keys_only() - self.assertEqual(query.projection, ["__key__"]) - - def test_key_filter_defaults(self): - from google.cloud.datastore.key import Key - - client = self._make_client() - query = self._make_one(client) - self.assertEqual(query.filters, []) - key = Key("Kind", 1234, project="project") - query.key_filter(key) - self.assertEqual(query.filters, [("__key__", "=", key)]) - - def test_key_filter_explicit(self): - from google.cloud.datastore.key import Key - - client = self._make_client() - query = self._make_one(client) - self.assertEqual(query.filters, []) - key = Key("Kind", 1234, project="project") - query.key_filter(key, operator=">") - self.assertEqual(query.filters, [("__key__", ">", key)]) - - def test_order_setter_empty(self): - query = self._make_one(self._make_client(), order=["foo", "-bar"]) - query.order = [] - self.assertEqual(query.order, []) - - def test_order_setter_string(self): - query = self._make_one(self._make_client()) - query.order = "field" - self.assertEqual(query.order, ["field"]) - - def test_order_setter_single_item_list_desc(self): - query = self._make_one(self._make_client()) - query.order = ["-field"] - self.assertEqual(query.order, ["-field"]) - - def test_order_setter_multiple(self): - query = self._make_one(self._make_client()) - query.order = ["foo", "-bar"] - self.assertEqual(query.order, ["foo", "-bar"]) - - def test_distinct_on_setter_empty(self): - query = self._make_one(self._make_client(), distinct_on=["foo", "bar"]) - query.distinct_on = [] - self.assertEqual(query.distinct_on, []) - - def test_distinct_on_setter_string(self): - query = self._make_one(self._make_client()) - query.distinct_on = "field1" - self.assertEqual(query.distinct_on, ["field1"]) - - def test_distinct_on_setter_non_empty(self): - query = self._make_one(self._make_client()) - query.distinct_on = ["field1", "field2"] - self.assertEqual(query.distinct_on, ["field1", "field2"]) - - def test_distinct_on_multiple_calls(self): - _DISTINCT_ON1 = ["field1", "field2"] - _DISTINCT_ON2 = ["field3"] - query = self._make_one(self._make_client()) - query.distinct_on = _DISTINCT_ON1 - self.assertEqual(query.distinct_on, _DISTINCT_ON1) - query.distinct_on = _DISTINCT_ON2 - self.assertEqual(query.distinct_on, _DISTINCT_ON2) - - def test_fetch_defaults_w_client_attr(self): - from google.cloud.datastore.query import Iterator - - client = self._make_client() - query = self._make_one(client) - - iterator = query.fetch() - - self.assertIsInstance(iterator, Iterator) - self.assertIs(iterator._query, query) - self.assertIs(iterator.client, client) - self.assertIsNone(iterator.max_results) - self.assertEqual(iterator._offset, 0) - self.assertIsNone(iterator._retry) - self.assertIsNone(iterator._timeout) - - def test_fetch_w_explicit_client_w_retry_w_timeout(self): - from google.cloud.datastore.query import Iterator - - client = self._make_client() - other_client = self._make_client() - query = self._make_one(client) - retry = mock.Mock() - timeout = 100000 - - iterator = query.fetch( - limit=7, offset=8, client=other_client, retry=retry, timeout=timeout - ) - self.assertIsInstance(iterator, Iterator) - self.assertIs(iterator._query, query) - self.assertIs(iterator.client, other_client) - self.assertEqual(iterator.max_results, 7) - self.assertEqual(iterator._offset, 8) - self.assertEqual(iterator._retry, retry) - self.assertEqual(iterator._timeout, timeout) - - -class TestIterator(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.datastore.query import Iterator - - return Iterator - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_constructor_defaults(self): - query = object() - client = object() - - iterator = self._make_one(query, client) - - self.assertFalse(iterator._started) - self.assertIs(iterator.client, client) - self.assertIsNone(iterator.max_results) - self.assertEqual(iterator.page_number, 0) - self.assertIsNone(iterator.next_page_token) - self.assertEqual(iterator.num_results, 0) - self.assertIs(iterator._query, query) - self.assertIsNone(iterator._offset) - self.assertIsNone(iterator._end_cursor) - self.assertTrue(iterator._more_results) - self.assertIsNone(iterator._retry) - self.assertIsNone(iterator._timeout) - - def test_constructor_explicit(self): - query = object() - client = object() - limit = 43 - offset = 9 - start_cursor = b"8290\xff" - end_cursor = b"so20rc\ta" - retry = mock.Mock() - timeout = 100000 - - iterator = self._make_one( - query, - client, - limit=limit, - offset=offset, - start_cursor=start_cursor, - end_cursor=end_cursor, - retry=retry, - timeout=timeout, - ) +def test_query_ctor_bad_distinct_on(): + BAD_DISTINCT_ON = object() + with pytest.raises(TypeError): + _make_query(_make_client(), distinct_on=BAD_DISTINCT_ON) - self.assertFalse(iterator._started) - self.assertIs(iterator.client, client) - self.assertEqual(iterator.max_results, limit) - self.assertEqual(iterator.page_number, 0) - self.assertEqual(iterator.next_page_token, start_cursor) - self.assertEqual(iterator.num_results, 0) - self.assertIs(iterator._query, query) - self.assertEqual(iterator._offset, offset) - self.assertEqual(iterator._end_cursor, end_cursor) - self.assertTrue(iterator._more_results) - self.assertEqual(iterator._retry, retry) - self.assertEqual(iterator._timeout, timeout) - - def test__build_protobuf_empty(self): - from google.cloud.datastore_v1.types import query as query_pb2 - from google.cloud.datastore.query import Query - - client = _Client(None) - query = Query(client) - iterator = self._make_one(query, client) - - pb = iterator._build_protobuf() - expected_pb = query_pb2.Query() - self.assertEqual(pb, expected_pb) - - def test__build_protobuf_all_values_except_offset(self): - # this test and the following (all_values_except_start_and_end_cursor) - # test mutually exclusive states; the offset is ignored - # if a start_cursor is supplied - from google.cloud.datastore_v1.types import query as query_pb2 - from google.cloud.datastore.query import Query - - client = _Client(None) - query = Query(client) - limit = 15 - start_bytes = b"i\xb7\x1d" - start_cursor = "abcd" - end_bytes = b"\xc3\x1c\xb3" - end_cursor = "wxyz" - iterator = self._make_one( - query, client, limit=limit, start_cursor=start_cursor, end_cursor=end_cursor - ) - self.assertEqual(iterator.max_results, limit) - iterator.num_results = 4 - iterator._skipped_results = 1 - - pb = iterator._build_protobuf() - expected_pb = query_pb2.Query(start_cursor=start_bytes, end_cursor=end_bytes) - expected_pb._pb.limit.value = limit - iterator.num_results - self.assertEqual(pb, expected_pb) - - def test__build_protobuf_all_values_except_start_and_end_cursor(self): - # this test and the previous (all_values_except_start_offset) - # test mutually exclusive states; the offset is ignored - # if a start_cursor is supplied - from google.cloud.datastore_v1.types import query as query_pb2 - from google.cloud.datastore.query import Query - - client = _Client(None) - query = Query(client) - limit = 15 - offset = 9 - iterator = self._make_one(query, client, limit=limit, offset=offset) - self.assertEqual(iterator.max_results, limit) - iterator.num_results = 4 - - pb = iterator._build_protobuf() - expected_pb = query_pb2.Query(offset=offset - iterator._skipped_results) - expected_pb._pb.limit.value = limit - iterator.num_results - self.assertEqual(pb, expected_pb) - - def test__process_query_results(self): - from google.cloud.datastore_v1.types import query as query_pb2 - - iterator = self._make_one(None, None, end_cursor="abcd") - self.assertIsNotNone(iterator._end_cursor) - - entity_pbs = [_make_entity("Hello", 9998, "PRAHJEKT")] - cursor_as_bytes = b"\x9ai\xe7" - cursor = b"mmnn" - skipped_results = 4 - more_results_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED - response_pb = _make_query_response( - entity_pbs, cursor_as_bytes, more_results_enum, skipped_results - ) - result = iterator._process_query_results(response_pb) - self.assertEqual(result, entity_pbs) - self.assertEqual(iterator._skipped_results, skipped_results) - self.assertEqual(iterator.next_page_token, cursor) - self.assertTrue(iterator._more_results) +def test_query_ctor_bad_filters(): + FILTERS_CANT_UNPACK = [("one", "two")] + with pytest.raises(ValueError): + _make_query(_make_client(), filters=FILTERS_CANT_UNPACK) - def test__process_query_results_done(self): - from google.cloud.datastore_v1.types import query as query_pb2 - iterator = self._make_one(None, None, end_cursor="abcd") - self.assertIsNotNone(iterator._end_cursor) +def test_query_namespace_setter_w_non_string(): + query = _make_query(_make_client()) + with pytest.raises(ValueError): + query.namespace = object() - entity_pbs = [_make_entity("World", 1234, "PROJECT")] - cursor_as_bytes = b"\x9ai\xe7" - skipped_results = 44 - more_results_enum = query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS - response_pb = _make_query_response( - entity_pbs, cursor_as_bytes, more_results_enum, skipped_results - ) - result = iterator._process_query_results(response_pb) - self.assertEqual(result, entity_pbs) - - self.assertEqual(iterator._skipped_results, skipped_results) - self.assertIsNone(iterator.next_page_token) - self.assertFalse(iterator._more_results) - - @pytest.mark.filterwarnings("ignore") - def test__process_query_results_bad_enum(self): - iterator = self._make_one(None, None) - more_results_enum = 999 - response_pb = _make_query_response([], b"", more_results_enum, 0) - with self.assertRaises(ValueError): - iterator._process_query_results(response_pb) - - def _next_page_helper(self, txn_id=None, retry=None, timeout=None): - from google.api_core import page_iterator - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore_v1.types import query as query_pb2 - from google.cloud.datastore.query import Query - - more_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED - result = _make_query_response([], b"", more_enum, 0) - project = "prujekt" - ds_api = _make_datastore_api(result) - if txn_id is None: - client = _Client(project, datastore_api=ds_api) - else: - transaction = mock.Mock(id=txn_id, spec=["id"]) - client = _Client(project, datastore_api=ds_api, transaction=transaction) - - query = Query(client) - kwargs = {} - - if retry is not None: - kwargs["retry"] = retry - - if timeout is not None: - kwargs["timeout"] = timeout - - iterator = self._make_one(query, client, **kwargs) - - page = iterator._next_page() - - self.assertIsInstance(page, page_iterator.Page) - self.assertIs(page._parent, iterator) - - partition_id = entity_pb2.PartitionId(project_id=project) - if txn_id is None: - read_options = datastore_pb2.ReadOptions() - else: - read_options = datastore_pb2.ReadOptions(transaction=txn_id) - empty_query = query_pb2.Query() - ds_api.run_query.assert_called_once_with( + +def test_query_namespace_setter(): + _NAMESPACE = "OTHER_NAMESPACE" + query = _make_query(_make_client()) + query.namespace = _NAMESPACE + assert query.namespace == _NAMESPACE + + +def test_query_kind_setter_w_non_string(): + query = _make_query(_make_client()) + with pytest.raises(TypeError): + query.kind = object() + + +def test_query_kind_setter_wo_existing(): + _KIND = "KIND" + query = _make_query(_make_client()) + query.kind = _KIND + assert query.kind == _KIND + + +def test_query_kind_setter_w_existing(): + _KIND_BEFORE = "KIND_BEFORE" + _KIND_AFTER = "KIND_AFTER" + query = _make_query(_make_client(), kind=_KIND_BEFORE) + assert query.kind == _KIND_BEFORE + query.kind = _KIND_AFTER + assert query.project == _PROJECT + assert query.kind == _KIND_AFTER + + +def test_query_ancestor_setter_w_non_key(): + query = _make_query(_make_client()) + + with pytest.raises(TypeError): + query.ancestor = object() + + with pytest.raises(TypeError): + query.ancestor = ["KIND", "NAME"] + + +def test_query_ancestor_setter_w_key(): + from google.cloud.datastore.key import Key + + _NAME = "NAME" + key = Key("KIND", 123, project=_PROJECT) + query = _make_query(_make_client()) + query.add_filter("name", "=", _NAME) + query.ancestor = key + assert query.ancestor.path == key.path + + +def test_query_ancestor_deleter_w_key(): + from google.cloud.datastore.key import Key + + key = Key("KIND", 123, project=_PROJECT) + query = _make_query(client=_make_client(), ancestor=key) + del query.ancestor + assert query.ancestor is None + + +def test_query_add_filter_setter_w_unknown_operator(): + query = _make_query(_make_client()) + with pytest.raises(ValueError): + query.add_filter("firstname", "~~", "John") + + +def test_query_add_filter_w_known_operator(): + query = _make_query(_make_client()) + query.add_filter("firstname", "=", "John") + assert query.filters == [("firstname", "=", "John")] + + +def test_query_add_filter_w_all_operators(): + query = _make_query(_make_client()) + query.add_filter("leq_prop", "<=", "val1") + query.add_filter("geq_prop", ">=", "val2") + query.add_filter("lt_prop", "<", "val3") + query.add_filter("gt_prop", ">", "val4") + query.add_filter("eq_prop", "=", "val5") + assert len(query.filters) == 5 + assert query.filters[0] == ("leq_prop", "<=", "val1") + assert query.filters[1] == ("geq_prop", ">=", "val2") + assert query.filters[2] == ("lt_prop", "<", "val3") + assert query.filters[3] == ("gt_prop", ">", "val4") + assert query.filters[4] == ("eq_prop", "=", "val5") + + +def test_query_add_filter_w_known_operator_and_entity(): + from google.cloud.datastore.entity import Entity + + query = _make_query(_make_client()) + other = Entity() + other["firstname"] = "John" + other["lastname"] = "Smith" + query.add_filter("other", "=", other) + assert query.filters == [("other", "=", other)] + + +def test_query_add_filter_w_whitespace_property_name(): + query = _make_query(_make_client()) + PROPERTY_NAME = " property with lots of space " + query.add_filter(PROPERTY_NAME, "=", "John") + assert query.filters == [(PROPERTY_NAME, "=", "John")] + + +def test_query_add_filter___key__valid_key(): + from google.cloud.datastore.key import Key + + query = _make_query(_make_client()) + key = Key("Foo", project=_PROJECT) + query.add_filter("__key__", "=", key) + assert query.filters == [("__key__", "=", key)] + + +def test_query_add_filter_return_query_obj(): + from google.cloud.datastore.query import Query + + query = _make_query(_make_client()) + query_obj = query.add_filter("firstname", "=", "John") + assert isinstance(query_obj, Query) + assert query_obj.filters == [("firstname", "=", "John")] + + +def test_query_filter___key__not_equal_operator(): + from google.cloud.datastore.key import Key + + key = Key("Foo", project=_PROJECT) + query = _make_query(_make_client()) + query.add_filter("__key__", "<", key) + assert query.filters == [("__key__", "<", key)] + + +def test_query_filter___key__invalid_value(): + query = _make_query(_make_client()) + with pytest.raises(ValueError): + query.add_filter("__key__", "=", None) + + +def test_query_projection_setter_empty(): + query = _make_query(_make_client()) + query.projection = [] + assert query.projection == [] + + +def test_query_projection_setter_string(): + query = _make_query(_make_client()) + query.projection = "field1" + assert query.projection == ["field1"] + + +def test_query_projection_setter_non_empty(): + query = _make_query(_make_client()) + query.projection = ["field1", "field2"] + assert query.projection == ["field1", "field2"] + + +def test_query_projection_setter_multiple_calls(): + _PROJECTION1 = ["field1", "field2"] + _PROJECTION2 = ["field3"] + query = _make_query(_make_client()) + query.projection = _PROJECTION1 + assert query.projection == _PROJECTION1 + query.projection = _PROJECTION2 + assert query.projection == _PROJECTION2 + + +def test_query_keys_only(): + query = _make_query(_make_client()) + query.keys_only() + assert query.projection == ["__key__"] + + +def test_query_key_filter_defaults(): + from google.cloud.datastore.key import Key + + client = _make_client() + query = _make_query(client) + assert query.filters == [] + key = Key("Kind", 1234, project="project") + query.key_filter(key) + assert query.filters == [("__key__", "=", key)] + + +def test_query_key_filter_explicit(): + from google.cloud.datastore.key import Key + + client = _make_client() + query = _make_query(client) + assert query.filters == [] + key = Key("Kind", 1234, project="project") + query.key_filter(key, operator=">") + assert query.filters == [("__key__", ">", key)] + + +def test_query_order_setter_empty(): + query = _make_query(_make_client(), order=["foo", "-bar"]) + query.order = [] + assert query.order == [] + + +def test_query_order_setter_string(): + query = _make_query(_make_client()) + query.order = "field" + assert query.order == ["field"] + + +def test_query_order_setter_single_item_list_desc(): + query = _make_query(_make_client()) + query.order = ["-field"] + assert query.order == ["-field"] + + +def test_query_order_setter_multiple(): + query = _make_query(_make_client()) + query.order = ["foo", "-bar"] + assert query.order == ["foo", "-bar"] + + +def test_query_distinct_on_setter_empty(): + query = _make_query(_make_client(), distinct_on=["foo", "bar"]) + query.distinct_on = [] + assert query.distinct_on == [] + + +def test_query_distinct_on_setter_string(): + query = _make_query(_make_client()) + query.distinct_on = "field1" + assert query.distinct_on == ["field1"] + + +def test_query_distinct_on_setter_non_empty(): + query = _make_query(_make_client()) + query.distinct_on = ["field1", "field2"] + assert query.distinct_on == ["field1", "field2"] + + +def test_query_distinct_on_multiple_calls(): + _DISTINCT_ON1 = ["field1", "field2"] + _DISTINCT_ON2 = ["field3"] + query = _make_query(_make_client()) + query.distinct_on = _DISTINCT_ON1 + assert query.distinct_on == _DISTINCT_ON1 + query.distinct_on = _DISTINCT_ON2 + assert query.distinct_on == _DISTINCT_ON2 + + +def test_query_fetch_defaults_w_client_attr(): + from google.cloud.datastore.query import Iterator + + client = _make_client() + query = _make_query(client) + + iterator = query.fetch() + + assert isinstance(iterator, Iterator) + assert iterator._query is query + assert iterator.client is client + assert iterator.max_results is None + assert iterator._offset == 0 + assert iterator._retry is None + assert iterator._timeout is None + + +def test_query_fetch_w_explicit_client_w_retry_w_timeout(): + from google.cloud.datastore.query import Iterator + + client = _make_client() + other_client = _make_client() + query = _make_query(client) + retry = mock.Mock() + timeout = 100000 + + iterator = query.fetch( + limit=7, offset=8, client=other_client, retry=retry, timeout=timeout + ) + + assert isinstance(iterator, Iterator) + assert iterator._query is query + assert iterator.client is other_client + assert iterator.max_results == 7 + assert iterator._offset == 8 + assert iterator._retry == retry + assert iterator._timeout == timeout + + +def test_iterator_constructor_defaults(): + query = object() + client = object() + + iterator = _make_iterator(query, client) + + assert not iterator._started + assert iterator.client is client + assert iterator.max_results is None + assert iterator.page_number == 0 + assert iterator.next_page_token is None + assert iterator.num_results == 0 + assert iterator._query is query + assert iterator._offset is None + assert iterator._end_cursor is None + assert iterator._more_results + assert iterator._retry is None + assert iterator._timeout is None + + +def test_iterator_constructor_explicit(): + query = object() + client = object() + limit = 43 + offset = 9 + start_cursor = b"8290\xff" + end_cursor = b"so20rc\ta" + retry = mock.Mock() + timeout = 100000 + + iterator = _make_iterator( + query, + client, + limit=limit, + offset=offset, + start_cursor=start_cursor, + end_cursor=end_cursor, + retry=retry, + timeout=timeout, + ) + + assert not iterator._started + assert iterator.client is client + assert iterator.max_results == limit + assert iterator.page_number == 0 + assert iterator.next_page_token == start_cursor + assert iterator.num_results == 0 + assert iterator._query is query + assert iterator._offset == offset + assert iterator._end_cursor == end_cursor + assert iterator._more_results + assert iterator._retry == retry + assert iterator._timeout == timeout + + +def test_iterator__build_protobuf_empty(): + from google.cloud.datastore_v1.types import query as query_pb2 + from google.cloud.datastore.query import Query + + client = _Client(None) + query = Query(client) + iterator = _make_iterator(query, client) + + pb = iterator._build_protobuf() + expected_pb = query_pb2.Query() + assert pb == expected_pb + + +def test_iterator__build_protobuf_all_values_except_offset(): + # this test and the following (all_values_except_start_and_end_cursor) + # test mutually exclusive states; the offset is ignored + # if a start_cursor is supplied + from google.cloud.datastore_v1.types import query as query_pb2 + from google.cloud.datastore.query import Query + + client = _Client(None) + query = Query(client) + limit = 15 + start_bytes = b"i\xb7\x1d" + start_cursor = "abcd" + end_bytes = b"\xc3\x1c\xb3" + end_cursor = "wxyz" + iterator = _make_iterator( + query, client, limit=limit, start_cursor=start_cursor, end_cursor=end_cursor + ) + assert iterator.max_results == limit + iterator.num_results = 4 + iterator._skipped_results = 1 + + pb = iterator._build_protobuf() + expected_pb = query_pb2.Query(start_cursor=start_bytes, end_cursor=end_bytes) + expected_pb._pb.limit.value = limit - iterator.num_results + assert pb == expected_pb + + +def test_iterator__build_protobuf_all_values_except_start_and_end_cursor(): + # this test and the previous (all_values_except_start_offset) + # test mutually exclusive states; the offset is ignored + # if a start_cursor is supplied + from google.cloud.datastore_v1.types import query as query_pb2 + from google.cloud.datastore.query import Query + + client = _Client(None) + query = Query(client) + limit = 15 + offset = 9 + iterator = _make_iterator(query, client, limit=limit, offset=offset) + assert iterator.max_results == limit + iterator.num_results = 4 + + pb = iterator._build_protobuf() + expected_pb = query_pb2.Query(offset=offset - iterator._skipped_results) + expected_pb._pb.limit.value = limit - iterator.num_results + assert pb == expected_pb + + +def test_iterator__process_query_results(): + from google.cloud.datastore_v1.types import query as query_pb2 + + iterator = _make_iterator(None, None, end_cursor="abcd") + assert iterator._end_cursor is not None + + entity_pbs = [_make_entity("Hello", 9998, "PRAHJEKT")] + cursor_as_bytes = b"\x9ai\xe7" + cursor = b"mmnn" + skipped_results = 4 + more_results_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED + response_pb = _make_query_response( + entity_pbs, cursor_as_bytes, more_results_enum, skipped_results + ) + result = iterator._process_query_results(response_pb) + assert result == entity_pbs + + assert iterator._skipped_results == skipped_results + assert iterator.next_page_token == cursor + assert iterator._more_results + + +def test_iterator__process_query_results_done(): + from google.cloud.datastore_v1.types import query as query_pb2 + + iterator = _make_iterator(None, None, end_cursor="abcd") + assert iterator._end_cursor is not None + + entity_pbs = [_make_entity("World", 1234, "PROJECT")] + cursor_as_bytes = b"\x9ai\xe7" + skipped_results = 44 + more_results_enum = query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS + response_pb = _make_query_response( + entity_pbs, cursor_as_bytes, more_results_enum, skipped_results + ) + result = iterator._process_query_results(response_pb) + assert result == entity_pbs + + assert iterator._skipped_results == skipped_results + assert iterator.next_page_token is None + assert not iterator._more_results + + +@pytest.mark.filterwarnings("ignore") +def test_iterator__process_query_results_bad_enum(): + iterator = _make_iterator(None, None) + more_results_enum = 999 + response_pb = _make_query_response([], b"", more_results_enum, 0) + with pytest.raises(ValueError): + iterator._process_query_results(response_pb) + + +def _next_page_helper(txn_id=None, retry=None, timeout=None): + from google.api_core import page_iterator + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 + from google.cloud.datastore.query import Query + + more_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED + result = _make_query_response([], b"", more_enum, 0) + project = "prujekt" + ds_api = _make_datastore_api(result) + if txn_id is None: + client = _Client(project, datastore_api=ds_api) + else: + transaction = mock.Mock(id=txn_id, spec=["id"]) + client = _Client(project, datastore_api=ds_api, transaction=transaction) + + query = Query(client) + kwargs = {} + + if retry is not None: + kwargs["retry"] = retry + + if timeout is not None: + kwargs["timeout"] = timeout + + iterator = _make_iterator(query, client, **kwargs) + + page = iterator._next_page() + + assert isinstance(page, page_iterator.Page) + assert page._parent is iterator + + partition_id = entity_pb2.PartitionId(project_id=project) + if txn_id is None: + read_options = datastore_pb2.ReadOptions() + else: + read_options = datastore_pb2.ReadOptions(transaction=txn_id) + empty_query = query_pb2.Query() + ds_api.run_query.assert_called_once_with( + request={ + "project_id": project, + "partition_id": partition_id, + "read_options": read_options, + "query": empty_query, + }, + **kwargs, + ) + + +def test_iterator__next_page(): + _next_page_helper() + + +def test_iterator__next_page_w_retry(): + _next_page_helper(retry=mock.Mock()) + + +def test_iterator__next_page_w_timeout(): + _next_page_helper(timeout=100000) + + +def test_iterator__next_page_in_transaction(): + txn_id = b"1xo1md\xe2\x98\x83" + _next_page_helper(txn_id) + + +def test_iterator__next_page_no_more(): + from google.cloud.datastore.query import Query + + ds_api = _make_datastore_api() + client = _Client(None, datastore_api=ds_api) + query = Query(client) + iterator = _make_iterator(query, client) + iterator._more_results = False + + page = iterator._next_page() + assert page is None + ds_api.run_query.assert_not_called() + + +def test_iterator__next_page_w_skipped_lt_offset(): + from google.api_core import page_iterator + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 + from google.cloud.datastore.query import Query + + project = "prujekt" + skipped_1 = 100 + skipped_cursor_1 = b"DEADBEEF" + skipped_2 = 50 + skipped_cursor_2 = b"FACEDACE" + + more_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED + + result_1 = _make_query_response([], b"", more_enum, skipped_1) + result_1.batch.skipped_cursor = skipped_cursor_1 + result_2 = _make_query_response([], b"", more_enum, skipped_2) + result_2.batch.skipped_cursor = skipped_cursor_2 + + ds_api = _make_datastore_api(result_1, result_2) + client = _Client(project, datastore_api=ds_api) + + query = Query(client) + offset = 150 + iterator = _make_iterator(query, client, offset=offset) + + page = iterator._next_page() + + assert isinstance(page, page_iterator.Page) + assert page._parent is iterator + + partition_id = entity_pb2.PartitionId(project_id=project) + read_options = datastore_pb2.ReadOptions() + + query_1 = query_pb2.Query(offset=offset) + query_2 = query_pb2.Query( + start_cursor=skipped_cursor_1, offset=(offset - skipped_1) + ) + expected_calls = [ + mock.call( request={ "project_id": project, "partition_id": partition_id, "read_options": read_options, - "query": empty_query, - }, - **kwargs, + "query": query, + } ) + for query in [query_1, query_2] + ] + assert ds_api.run_query.call_args_list == expected_calls - def test__next_page(self): - self._next_page_helper() - def test__next_page_w_retry(self): - self._next_page_helper(retry=mock.Mock()) +def test__item_to_entity(): + from google.cloud.datastore.query import _item_to_entity - def test__next_page_w_timeout(self): - self._next_page_helper(timeout=100000) + entity_pb = mock.Mock() + entity_pb._pb = mock.sentinel.entity_pb + patch = mock.patch("google.cloud.datastore.helpers.entity_from_protobuf") + with patch as entity_from_protobuf: + result = _item_to_entity(None, entity_pb) + assert result is entity_from_protobuf.return_value - def test__next_page_in_transaction(self): - txn_id = b"1xo1md\xe2\x98\x83" - self._next_page_helper(txn_id) + entity_from_protobuf.assert_called_once_with(entity_pb) - def test__next_page_no_more(self): - from google.cloud.datastore.query import Query - ds_api = _make_datastore_api() - client = _Client(None, datastore_api=ds_api) - query = Query(client) - iterator = self._make_one(query, client) - iterator._more_results = False +def test_pb_from_query_empty(): + from google.cloud.datastore_v1.types import query as query_pb2 + from google.cloud.datastore.query import _pb_from_query - page = iterator._next_page() - self.assertIsNone(page) - ds_api.run_query.assert_not_called() + pb = _pb_from_query(_Query()) + assert list(pb.projection) == [] + assert list(pb.kind) == [] + assert list(pb.order) == [] + assert list(pb.distinct_on) == [] + assert pb.filter.property_filter.property.name == "" + cfilter = pb.filter.composite_filter + assert cfilter.op == query_pb2.CompositeFilter.Operator.OPERATOR_UNSPECIFIED + assert list(cfilter.filters) == [] + assert pb.start_cursor == b"" + assert pb.end_cursor == b"" + assert pb._pb.limit.value == 0 + assert pb.offset == 0 - def test__next_page_w_skipped_lt_offset(self): - from google.api_core import page_iterator - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - from google.cloud.datastore_v1.types import entity as entity_pb2 - from google.cloud.datastore_v1.types import query as query_pb2 - from google.cloud.datastore.query import Query - project = "prujekt" - skipped_1 = 100 - skipped_cursor_1 = b"DEADBEEF" - skipped_2 = 50 - skipped_cursor_2 = b"FACEDACE" +def test_pb_from_query_projection(): + from google.cloud.datastore.query import _pb_from_query - more_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED + pb = _pb_from_query(_Query(projection=["a", "b", "c"])) + assert [item.property.name for item in pb.projection] == ["a", "b", "c"] - result_1 = _make_query_response([], b"", more_enum, skipped_1) - result_1.batch.skipped_cursor = skipped_cursor_1 - result_2 = _make_query_response([], b"", more_enum, skipped_2) - result_2.batch.skipped_cursor = skipped_cursor_2 - ds_api = _make_datastore_api(result_1, result_2) - client = _Client(project, datastore_api=ds_api) +def test_pb_from_query_kind(): + from google.cloud.datastore.query import _pb_from_query - query = Query(client) - offset = 150 - iterator = self._make_one(query, client, offset=offset) + pb = _pb_from_query(_Query(kind="KIND")) + assert [item.name for item in pb.kind] == ["KIND"] - page = iterator._next_page() - self.assertIsInstance(page, page_iterator.Page) - self.assertIs(page._parent, iterator) +def test_pb_from_query_ancestor(): + from google.cloud.datastore.key import Key + from google.cloud.datastore_v1.types import query as query_pb2 + from google.cloud.datastore.query import _pb_from_query - partition_id = entity_pb2.PartitionId(project_id=project) - read_options = datastore_pb2.ReadOptions() + ancestor = Key("Ancestor", 123, project="PROJECT") + pb = _pb_from_query(_Query(ancestor=ancestor)) + cfilter = pb.filter.composite_filter + assert cfilter.op == query_pb2.CompositeFilter.Operator.AND + assert len(cfilter.filters) == 1 + pfilter = cfilter.filters[0].property_filter + assert pfilter.property.name == "__key__" + ancestor_pb = ancestor.to_protobuf() + assert pfilter.value.key_value == ancestor_pb - query_1 = query_pb2.Query(offset=offset) - query_2 = query_pb2.Query( - start_cursor=skipped_cursor_1, offset=(offset - skipped_1) - ) - expected_calls = [ - mock.call( - request={ - "project_id": project, - "partition_id": partition_id, - "read_options": read_options, - "query": query, - } - ) - for query in [query_1, query_2] - ] - self.assertEqual(ds_api.run_query.call_args_list, expected_calls) - - -class Test__item_to_entity(unittest.TestCase): - def _call_fut(self, iterator, entity_pb): - from google.cloud.datastore.query import _item_to_entity - - return _item_to_entity(iterator, entity_pb) - - def test_it(self): - entity_pb = mock.Mock() - entity_pb._pb = mock.sentinel.entity_pb - patch = mock.patch("google.cloud.datastore.helpers.entity_from_protobuf") - with patch as entity_from_protobuf: - result = self._call_fut(None, entity_pb) - self.assertIs(result, entity_from_protobuf.return_value) - - entity_from_protobuf.assert_called_once_with(entity_pb) - - -class Test__pb_from_query(unittest.TestCase): - def _call_fut(self, query): - from google.cloud.datastore.query import _pb_from_query - - return _pb_from_query(query) - - def test_empty(self): - from google.cloud.datastore_v1.types import query as query_pb2 - - pb = self._call_fut(_Query()) - self.assertEqual(list(pb.projection), []) - self.assertEqual(list(pb.kind), []) - self.assertEqual(list(pb.order), []) - self.assertEqual(list(pb.distinct_on), []) - self.assertEqual(pb.filter.property_filter.property.name, "") - cfilter = pb.filter.composite_filter - self.assertEqual( - cfilter.op, query_pb2.CompositeFilter.Operator.OPERATOR_UNSPECIFIED - ) - self.assertEqual(list(cfilter.filters), []) - self.assertEqual(pb.start_cursor, b"") - self.assertEqual(pb.end_cursor, b"") - self.assertEqual(pb._pb.limit.value, 0) - self.assertEqual(pb.offset, 0) - - def test_projection(self): - pb = self._call_fut(_Query(projection=["a", "b", "c"])) - self.assertEqual( - [item.property.name for item in pb.projection], ["a", "b", "c"] - ) - def test_kind(self): - pb = self._call_fut(_Query(kind="KIND")) - self.assertEqual([item.name for item in pb.kind], ["KIND"]) - - def test_ancestor(self): - from google.cloud.datastore.key import Key - from google.cloud.datastore_v1.types import query as query_pb2 - - ancestor = Key("Ancestor", 123, project="PROJECT") - pb = self._call_fut(_Query(ancestor=ancestor)) - cfilter = pb.filter.composite_filter - self.assertEqual(cfilter.op, query_pb2.CompositeFilter.Operator.AND) - self.assertEqual(len(cfilter.filters), 1) - pfilter = cfilter.filters[0].property_filter - self.assertEqual(pfilter.property.name, "__key__") - ancestor_pb = ancestor.to_protobuf() - self.assertEqual(pfilter.value.key_value, ancestor_pb) - - def test_filter(self): - from google.cloud.datastore_v1.types import query as query_pb2 - - query = _Query(filters=[("name", "=", "John")]) - query.OPERATORS = {"=": query_pb2.PropertyFilter.Operator.EQUAL} - pb = self._call_fut(query) - cfilter = pb.filter.composite_filter - self.assertEqual(cfilter.op, query_pb2.CompositeFilter.Operator.AND) - self.assertEqual(len(cfilter.filters), 1) - pfilter = cfilter.filters[0].property_filter - self.assertEqual(pfilter.property.name, "name") - self.assertEqual(pfilter.value.string_value, "John") - - def test_filter_key(self): - from google.cloud.datastore.key import Key - from google.cloud.datastore_v1.types import query as query_pb2 - - key = Key("Kind", 123, project="PROJECT") - query = _Query(filters=[("__key__", "=", key)]) - query.OPERATORS = {"=": query_pb2.PropertyFilter.Operator.EQUAL} - pb = self._call_fut(query) - cfilter = pb.filter.composite_filter - self.assertEqual(cfilter.op, query_pb2.CompositeFilter.Operator.AND) - self.assertEqual(len(cfilter.filters), 1) - pfilter = cfilter.filters[0].property_filter - self.assertEqual(pfilter.property.name, "__key__") - key_pb = key.to_protobuf() - self.assertEqual(pfilter.value.key_value, key_pb) - - def test_order(self): - from google.cloud.datastore_v1.types import query as query_pb2 - - pb = self._call_fut(_Query(order=["a", "-b", "c"])) - self.assertEqual([item.property.name for item in pb.order], ["a", "b", "c"]) - self.assertEqual( - [item.direction for item in pb.order], - [ - query_pb2.PropertyOrder.Direction.ASCENDING, - query_pb2.PropertyOrder.Direction.DESCENDING, - query_pb2.PropertyOrder.Direction.ASCENDING, - ], - ) +def test_pb_from_query_filter(): + from google.cloud.datastore_v1.types import query as query_pb2 + from google.cloud.datastore.query import _pb_from_query + + query = _Query(filters=[("name", "=", "John")]) + query.OPERATORS = {"=": query_pb2.PropertyFilter.Operator.EQUAL} + pb = _pb_from_query(query) + cfilter = pb.filter.composite_filter + assert cfilter.op == query_pb2.CompositeFilter.Operator.AND + assert len(cfilter.filters) == 1 + pfilter = cfilter.filters[0].property_filter + assert pfilter.property.name == "name" + assert pfilter.value.string_value == "John" + + +def test_pb_from_query_filter_key(): + from google.cloud.datastore.key import Key + from google.cloud.datastore_v1.types import query as query_pb2 + from google.cloud.datastore.query import _pb_from_query + + key = Key("Kind", 123, project="PROJECT") + query = _Query(filters=[("__key__", "=", key)]) + query.OPERATORS = {"=": query_pb2.PropertyFilter.Operator.EQUAL} + pb = _pb_from_query(query) + cfilter = pb.filter.composite_filter + assert cfilter.op == query_pb2.CompositeFilter.Operator.AND + assert len(cfilter.filters) == 1 + pfilter = cfilter.filters[0].property_filter + assert pfilter.property.name == "__key__" + key_pb = key.to_protobuf() + assert pfilter.value.key_value == key_pb + + +def test_pb_from_query_order(): + from google.cloud.datastore_v1.types import query as query_pb2 + from google.cloud.datastore.query import _pb_from_query + + pb = _pb_from_query(_Query(order=["a", "-b", "c"])) + assert [item.property.name for item in pb.order] == ["a", "b", "c"] + expected_directions = [ + query_pb2.PropertyOrder.Direction.ASCENDING, + query_pb2.PropertyOrder.Direction.DESCENDING, + query_pb2.PropertyOrder.Direction.ASCENDING, + ] + assert [item.direction for item in pb.order] == expected_directions + - def test_distinct_on(self): - pb = self._call_fut(_Query(distinct_on=["a", "b", "c"])) - self.assertEqual([item.name for item in pb.distinct_on], ["a", "b", "c"]) +def test_pb_from_query_distinct_on(): + from google.cloud.datastore.query import _pb_from_query + + pb = _pb_from_query(_Query(distinct_on=["a", "b", "c"])) + assert [item.name for item in pb.distinct_on] == ["a", "b", "c"] class _Query(object): @@ -814,6 +835,22 @@ def current_transaction(self): return self._transaction +def _make_query(*args, **kw): + from google.cloud.datastore.query import Query + + return Query(*args, **kw) + + +def _make_iterator(*args, **kw): + from google.cloud.datastore.query import Iterator + + return Iterator(*args, **kw) + + +def _make_client(): + return _Client(_PROJECT) + + def _make_entity(kind, id_, project): from google.cloud.datastore_v1.types import entity as entity_pb2 diff --git a/packages/google-cloud-datastore/tests/unit/test_transaction.py b/packages/google-cloud-datastore/tests/unit/test_transaction.py index bae419dfdcd8..648ae7e450fe 100644 --- a/packages/google-cloud-datastore/tests/unit/test_transaction.py +++ b/packages/google-cloud-datastore/tests/unit/test_transaction.py @@ -12,360 +12,349 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest - import mock +import pytest + +def test_transaction_ctor_defaults(): + from google.cloud.datastore.transaction import Transaction -class TestTransaction(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.datastore.transaction import Transaction + project = "PROJECT" + client = _Client(project) - return Transaction + xact = _make_transaction(client) - def _make_one(self, client, **kw): - return self._get_target_class()(client, **kw) + assert xact.project == project + assert xact._client is client + assert xact.id is None + assert xact._status == Transaction._INITIAL + assert xact._mutations == [] + assert len(xact._partial_key_entities) == 0 - def _make_options(self, read_only=False, previous_transaction=None): - from google.cloud.datastore_v1.types import TransactionOptions - kw = {} +def test_transaction_constructor_read_only(): + project = "PROJECT" + id_ = 850302 + ds_api = _make_datastore_api(xact=id_) + client = _Client(project, datastore_api=ds_api) + options = _make_options(read_only=True) - if read_only: - kw["read_only"] = TransactionOptions.ReadOnly() + xact = _make_transaction(client, read_only=True) - return TransactionOptions(**kw) + assert xact._options == options - def test_ctor_defaults(self): - project = "PROJECT" - client = _Client(project) - xact = self._make_one(client) +def test_transaction_current(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 - self.assertEqual(xact.project, project) - self.assertIs(xact._client, client) - self.assertIsNone(xact.id) - self.assertEqual(xact._status, self._get_target_class()._INITIAL) - self.assertEqual(xact._mutations, []) - self.assertEqual(len(xact._partial_key_entities), 0) + project = "PROJECT" + id_ = 678 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) + xact1 = _make_transaction(client) + xact2 = _make_transaction(client) + assert xact1.current() is None + assert xact2.current() is None - def test_constructor_read_only(self): - project = "PROJECT" - id_ = 850302 - ds_api = _make_datastore_api(xact=id_) - client = _Client(project, datastore_api=ds_api) - options = self._make_options(read_only=True) + with xact1: + assert xact1.current() is xact1 + assert xact2.current() is xact1 - xact = self._make_one(client, read_only=True) + with _NoCommitBatch(client): + assert xact1.current() is None + assert xact2.current() is None - self.assertEqual(xact._options, options) + with xact2: + assert xact1.current() is xact2 + assert xact2.current() is xact2 + + with _NoCommitBatch(client): + assert xact1.current() is None + assert xact2.current() is None - def _make_begin_request(self, project, read_only=False): - expected_options = self._make_options(read_only=read_only) - return { + assert xact1.current() is xact1 + assert xact2.current() is xact1 + + assert xact1.current() is None + assert xact2.current() is None + + begin_txn = ds_api.begin_transaction + assert begin_txn.call_count == 2 + expected_request = _make_begin_request(project) + begin_txn.assert_called_with(request=expected_request) + + commit_method = ds_api.commit + assert commit_method.call_count == 2 + mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL + commit_method.assert_called_with( + request={ "project_id": project, - "transaction_options": expected_options, + "mode": mode, + "mutations": [], + "transaction": id_, } + ) - def test_current(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 + ds_api.rollback.assert_not_called() - project = "PROJECT" - id_ = 678 - ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) - xact1 = self._make_one(client) - xact2 = self._make_one(client) - self.assertIsNone(xact1.current()) - self.assertIsNone(xact2.current()) - with xact1: - self.assertIs(xact1.current(), xact1) - self.assertIs(xact2.current(), xact1) +def test_transaction_begin(): + project = "PROJECT" + id_ = 889 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) + xact = _make_transaction(client) - with _NoCommitBatch(client): - self.assertIsNone(xact1.current()) - self.assertIsNone(xact2.current()) - - with xact2: - self.assertIs(xact1.current(), xact2) - self.assertIs(xact2.current(), xact2) - - with _NoCommitBatch(client): - self.assertIsNone(xact1.current()) - self.assertIsNone(xact2.current()) - - self.assertIs(xact1.current(), xact1) - self.assertIs(xact2.current(), xact1) - - self.assertIsNone(xact1.current()) - self.assertIsNone(xact2.current()) - - begin_txn = ds_api.begin_transaction - self.assertEqual(begin_txn.call_count, 2) - expected_request = self._make_begin_request(project) - begin_txn.assert_called_with(request=expected_request) - - commit_method = ds_api.commit - self.assertEqual(commit_method.call_count, 2) - mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL - commit_method.assert_called_with( - request={ - "project_id": project, - "mode": mode, - "mutations": [], - "transaction": id_, - } - ) - - ds_api.rollback.assert_not_called() - - def test_begin(self): - project = "PROJECT" - id_ = 889 - ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) - xact = self._make_one(client) + xact.begin() - xact.begin() + assert xact.id == id_ - self.assertEqual(xact.id, id_) + expected_request = _make_begin_request(project) + ds_api.begin_transaction.assert_called_once_with(request=expected_request) - expected_request = self._make_begin_request(project) - ds_api.begin_transaction.assert_called_once_with(request=expected_request) - def test_begin_w_readonly(self): - project = "PROJECT" - id_ = 889 - ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) - xact = self._make_one(client, read_only=True) +def test_transaction_begin_w_readonly(): + project = "PROJECT" + id_ = 889 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) + xact = _make_transaction(client, read_only=True) - xact.begin() + xact.begin() - self.assertEqual(xact.id, id_) + assert xact.id == id_ - expected_request = self._make_begin_request(project, read_only=True) - ds_api.begin_transaction.assert_called_once_with(request=expected_request) + expected_request = _make_begin_request(project, read_only=True) + ds_api.begin_transaction.assert_called_once_with(request=expected_request) - def test_begin_w_retry_w_timeout(self): - project = "PROJECT" - id_ = 889 - retry = mock.Mock() - timeout = 100000 - ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) - xact = self._make_one(client) +def test_transaction_begin_w_retry_w_timeout(): + project = "PROJECT" + id_ = 889 + retry = mock.Mock() + timeout = 100000 - xact.begin(retry=retry, timeout=timeout) + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) + xact = _make_transaction(client) - self.assertEqual(xact.id, id_) + xact.begin(retry=retry, timeout=timeout) - expected_request = self._make_begin_request(project) - ds_api.begin_transaction.assert_called_once_with( - request=expected_request, retry=retry, timeout=timeout, - ) + assert xact.id == id_ - def test_begin_tombstoned(self): - project = "PROJECT" - id_ = 1094 - ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) - xact = self._make_one(client) + expected_request = _make_begin_request(project) + ds_api.begin_transaction.assert_called_once_with( + request=expected_request, retry=retry, timeout=timeout, + ) - xact.begin() - self.assertEqual(xact.id, id_) +def test_transaction_begin_tombstoned(): + project = "PROJECT" + id_ = 1094 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) + xact = _make_transaction(client) - expected_request = self._make_begin_request(project) - ds_api.begin_transaction.assert_called_once_with(request=expected_request) + xact.begin() - xact.rollback() + assert xact.id == id_ - client._datastore_api.rollback.assert_called_once_with( - request={"project_id": project, "transaction": id_} - ) - self.assertIsNone(xact.id) + expected_request = _make_begin_request(project) + ds_api.begin_transaction.assert_called_once_with(request=expected_request) - with self.assertRaises(ValueError): - xact.begin() + xact.rollback() - def test_begin_w_begin_transaction_failure(self): - project = "PROJECT" - id_ = 712 - ds_api = _make_datastore_api(xact_id=id_) - ds_api.begin_transaction = mock.Mock(side_effect=RuntimeError, spec=[]) - client = _Client(project, datastore_api=ds_api) - xact = self._make_one(client) + client._datastore_api.rollback.assert_called_once_with( + request={"project_id": project, "transaction": id_} + ) + assert xact.id is None - with self.assertRaises(RuntimeError): - xact.begin() + with pytest.raises(ValueError): + xact.begin() - self.assertIsNone(xact.id) - expected_request = self._make_begin_request(project) - ds_api.begin_transaction.assert_called_once_with(request=expected_request) +def test_transaction_begin_w_begin_transaction_failure(): + project = "PROJECT" + id_ = 712 + ds_api = _make_datastore_api(xact_id=id_) + ds_api.begin_transaction = mock.Mock(side_effect=RuntimeError, spec=[]) + client = _Client(project, datastore_api=ds_api) + xact = _make_transaction(client) - def test_rollback(self): - project = "PROJECT" - id_ = 239 - ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) - xact = self._make_one(client) + with pytest.raises(RuntimeError): xact.begin() - xact.rollback() + assert xact.id is None - self.assertIsNone(xact.id) - ds_api.rollback.assert_called_once_with( - request={"project_id": project, "transaction": id_} - ) + expected_request = _make_begin_request(project) + ds_api.begin_transaction.assert_called_once_with(request=expected_request) - def test_rollback_w_retry_w_timeout(self): - project = "PROJECT" - id_ = 239 - retry = mock.Mock() - timeout = 100000 - ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) - xact = self._make_one(client) - xact.begin() +def test_transaction_rollback(): + project = "PROJECT" + id_ = 239 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) + xact = _make_transaction(client) + xact.begin() - xact.rollback(retry=retry, timeout=timeout) + xact.rollback() - self.assertIsNone(xact.id) - ds_api.rollback.assert_called_once_with( - request={"project_id": project, "transaction": id_}, - retry=retry, - timeout=timeout, - ) + assert xact.id is None + ds_api.rollback.assert_called_once_with( + request={"project_id": project, "transaction": id_} + ) - def test_commit_no_partial_keys(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - project = "PROJECT" - id_ = 1002930 - mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL +def test_transaction_rollback_w_retry_w_timeout(): + project = "PROJECT" + id_ = 239 + retry = mock.Mock() + timeout = 100000 - ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) - xact = self._make_one(client) - xact.begin() - xact.commit() - - ds_api.commit.assert_called_once_with( - request={ - "project_id": project, - "mode": mode, - "mutations": [], - "transaction": id_, - } - ) - self.assertIsNone(xact.id) - - def test_commit_w_partial_keys_w_retry_w_timeout(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - - project = "PROJECT" - kind = "KIND" - id1 = 123 - mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL - key = _make_key(kind, id1, project) - id2 = 234 - retry = mock.Mock() - timeout = 100000 - - ds_api = _make_datastore_api(key, xact_id=id2) - client = _Client(project, datastore_api=ds_api) - xact = self._make_one(client) - xact.begin() - entity = _Entity() + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) + xact = _make_transaction(client) + xact.begin() - xact.put(entity) - xact.commit(retry=retry, timeout=timeout) - - ds_api.commit.assert_called_once_with( - request={ - "project_id": project, - "mode": mode, - "mutations": xact.mutations, - "transaction": id2, - }, - retry=retry, - timeout=timeout, - ) - self.assertIsNone(xact.id) - self.assertEqual(entity.key.path, [{"kind": kind, "id": id1}]) - - def test_context_manager_no_raise(self): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 - - project = "PROJECT" - id_ = 912830 - ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) - xact = self._make_one(client) + xact.rollback(retry=retry, timeout=timeout) + + assert xact.id is None + ds_api.rollback.assert_called_once_with( + request={"project_id": project, "transaction": id_}, + retry=retry, + timeout=timeout, + ) + + +def test_transaction_commit_no_partial_keys(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + + project = "PROJECT" + id_ = 1002930 + mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL + + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) + xact = _make_transaction(client) + xact.begin() + xact.commit() + + ds_api.commit.assert_called_once_with( + request={ + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": id_, + } + ) + assert xact.id is None + + +def test_transaction_commit_w_partial_keys_w_retry_w_timeout(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + project = "PROJECT" + kind = "KIND" + id1 = 123 + mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL + key = _make_key(kind, id1, project) + id2 = 234 + retry = mock.Mock() + timeout = 100000 + + ds_api = _make_datastore_api(key, xact_id=id2) + client = _Client(project, datastore_api=ds_api) + xact = _make_transaction(client) + xact.begin() + entity = _Entity() + + xact.put(entity) + xact.commit(retry=retry, timeout=timeout) + + ds_api.commit.assert_called_once_with( + request={ + "project_id": project, + "mode": mode, + "mutations": xact.mutations, + "transaction": id2, + }, + retry=retry, + timeout=timeout, + ) + assert xact.id is None + assert entity.key.path == [{"kind": kind, "id": id1}] + + +def test_transaction_context_manager_no_raise(): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + + project = "PROJECT" + id_ = 912830 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) + xact = _make_transaction(client) + + with xact: + # only set between begin / commit + assert xact.id == id_ + + assert xact.id is None + + expected_request = _make_begin_request(project) + ds_api.begin_transaction.assert_called_once_with(request=expected_request) + + mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL + client._datastore_api.commit.assert_called_once_with( + request={ + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": id_, + }, + ) + + +def test_transaction_context_manager_w_raise(): + class Foo(Exception): + pass + + project = "PROJECT" + id_ = 614416 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) + xact = _make_transaction(client) + xact._mutation = object() + try: with xact: - self.assertEqual(xact.id, id_) # only set between begin / commit - - self.assertIsNone(xact.id) - - expected_request = self._make_begin_request(project) - ds_api.begin_transaction.assert_called_once_with(request=expected_request) - - mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL - client._datastore_api.commit.assert_called_once_with( - request={ - "project_id": project, - "mode": mode, - "mutations": [], - "transaction": id_, - }, - ) - - def test_context_manager_w_raise(self): - class Foo(Exception): - pass - - project = "PROJECT" - id_ = 614416 - ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) - xact = self._make_one(client) - xact._mutation = object() - try: - with xact: - self.assertEqual(xact.id, id_) - raise Foo() - except Foo: - pass - - self.assertIsNone(xact.id) - - expected_request = self._make_begin_request(project) - ds_api.begin_transaction.assert_called_once_with(request=expected_request) - - client._datastore_api.commit.assert_not_called() - - client._datastore_api.rollback.assert_called_once_with( - request={"project_id": project, "transaction": id_} - ) - - def test_put_read_only(self): - project = "PROJECT" - id_ = 943243 - ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) - entity = _Entity() - xact = self._make_one(client, read_only=True) - xact.begin() + assert xact.id == id_ + raise Foo() + except Foo: + pass + + assert xact.id is None - with self.assertRaises(RuntimeError): - xact.put(entity) + expected_request = _make_begin_request(project) + ds_api.begin_transaction.assert_called_once_with(request=expected_request) + + client._datastore_api.commit.assert_not_called() + + client._datastore_api.rollback.assert_called_once_with( + request={"project_id": project, "transaction": id_} + ) + + +def test_transaction_put_read_only(): + project = "PROJECT" + id_ = 943243 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) + entity = _Entity() + xact = _make_transaction(client, read_only=True) + xact.begin() + + with pytest.raises(RuntimeError): + xact.put(entity) def _make_key(kind, id_, project): @@ -422,6 +411,31 @@ def __exit__(self, *args): self._client._pop_batch() +def _make_options(read_only=False, previous_transaction=None): + from google.cloud.datastore_v1.types import TransactionOptions + + kw = {} + + if read_only: + kw["read_only"] = TransactionOptions.ReadOnly() + + return TransactionOptions(**kw) + + +def _make_transaction(client, **kw): + from google.cloud.datastore.transaction import Transaction + + return Transaction(client, **kw) + + +def _make_begin_request(project, read_only=False): + expected_options = _make_options(read_only=read_only) + return { + "project_id": project, + "transaction_options": expected_options, + } + + def _make_commit_response(*keys): from google.cloud.datastore_v1.types import datastore as datastore_pb2 From 24b43daca17d0cf00c82e027ed5249d2895d67e3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 25 Oct 2021 17:59:19 -0400 Subject: [PATCH 378/611] chore(python): push cloud library docs to staging bucket for Cloud RAD (#245) * chore(python): push cloud library docs to staging bucket for Cloud RAD Source-Link: https://github.com/googleapis/synthtool/commit/7fd61f8efae782a7cfcecc599faf52f9737fe584 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:4ee57a76a176ede9087c14330c625a71553cf9c72828b2c0ca12f5338171ba60 * fix coverage Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/google-cloud-datastore/.coveragerc | 1 + packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/.kokoro/docs/common.cfg | 1 + packages/google-cloud-datastore/noxfile.py | 2 +- 4 files changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/.coveragerc b/packages/google-cloud-datastore/.coveragerc index ce32b3227567..cab8fd728a5f 100644 --- a/packages/google-cloud-datastore/.coveragerc +++ b/packages/google-cloud-datastore/.coveragerc @@ -20,6 +20,7 @@ branch = True fail_under = 100 show_missing = True omit = + google/__init__.py google/cloud/__init__.py google/cloud/datastore_v1/__init__.py google/cloud/datastore_admin_v1/__init__.py diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 7d98291cc35f..108063d4dee4 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:58f73ba196b5414782605236dd0712a73541b44ff2ff4d3a36ec41092dd6fa5b + digest: sha256:4ee57a76a176ede9087c14330c625a71553cf9c72828b2c0ca12f5338171ba60 diff --git a/packages/google-cloud-datastore/.kokoro/docs/common.cfg b/packages/google-cloud-datastore/.kokoro/docs/common.cfg index cc2ce85aa8c8..6b7da47ba536 100644 --- a/packages/google-cloud-datastore/.kokoro/docs/common.cfg +++ b/packages/google-cloud-datastore/.kokoro/docs/common.cfg @@ -30,6 +30,7 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" + # Push google cloud library docs to the Cloud RAD bucket `docs-staging-v2` value: "docs-staging-v2" } diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 4eeac549d24f..2510a58b77cc 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -112,7 +112,7 @@ def default(session): "py.test", "--quiet", f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google/cloud", + "--cov=google", "--cov=tests/unit", "--cov-append", "--cov-config=.coveragerc", From 3aa13e231aa1756fb0df0b0cca744e1cf9ac7bd9 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 8 Nov 2021 11:00:14 -0500 Subject: [PATCH 379/611] feat: Added methods for creating and deleting composite indexes (#248) This PR fixes an issue with the `owlbot.py` file which prevented [google/cloud/datastore_v1](https://github.com/googleapis/python-datastore/commits/main/google/cloud/datastore_v1) from being updated. This PR also brings the client up to date with the generated client in googleapis-gen [here](https://github.com/googleapis/googleapis-gen/tree/master/google/datastore). chore: update owlbot.py to properly copy folders from googleapis-gen fix(deps): require google-api-core >= 1.28.0 docs: list oneofs in docstring feat: add context manager support in client fix: add 'dict' annotation type to 'request' fix: add async client to %name_%version/init.py feat: support self-signed JWT flow for service accounts feat: Added methods for creating and deleting composite indexes --- .../cloud/datastore_admin_v1/__init__.py | 13 +- .../datastore_admin_v1/gapic_metadata.json | 83 ++ .../datastore_admin_v1/services/__init__.py | 1 - .../services/datastore_admin/__init__.py | 2 - .../services/datastore_admin/async_client.py | 293 +++++-- .../services/datastore_admin/client.py | 354 +++++--- .../services/datastore_admin/pagers.py | 39 +- .../datastore_admin/transports/__init__.py | 3 - .../datastore_admin/transports/base.py | 131 ++- .../datastore_admin/transports/grpc.py | 241 ++++-- .../transports/grpc_asyncio.py | 249 ++++-- .../datastore_admin_v1/types/__init__.py | 37 +- .../types/datastore_admin.py | 157 ++-- .../cloud/datastore_admin_v1/types/index.py | 26 +- .../google/cloud/datastore_v1/__init__.py | 7 +- .../cloud/datastore_v1/gapic_metadata.json | 93 +++ .../cloud/datastore_v1/services/__init__.py | 1 - .../services/datastore/__init__.py | 2 - .../services/datastore/async_client.py | 176 ++-- .../datastore_v1/services/datastore/client.py | 249 +++--- .../services/datastore/transports/__init__.py | 3 - .../services/datastore/transports/base.py | 131 +-- .../services/datastore/transports/grpc.py | 143 ++-- .../datastore/transports/grpc_asyncio.py | 149 ++-- .../cloud/datastore_v1/types/__init__.py | 117 ++- .../cloud/datastore_v1/types/datastore.py | 163 ++-- .../google/cloud/datastore_v1/types/entity.py | 105 +-- .../google/cloud/datastore_v1/types/query.py | 124 ++- packages/google-cloud-datastore/owlbot.py | 87 +- .../fixup_datastore_admin_v1_keywords.py | 17 +- .../scripts/fixup_datastore_v1_keywords.py | 21 +- packages/google-cloud-datastore/setup.py | 2 +- .../testing/constraints-3.6.txt | 3 +- .../google-cloud-datastore/tests/__init__.py | 15 + .../tests/unit/__init__.py | 4 +- .../tests/unit/gapic/__init__.py | 15 + .../unit/gapic/datastore_admin_v1/__init__.py | 16 +- .../test_datastore_admin.py | 785 +++++++++++++----- .../tests/unit/gapic/datastore_v1/__init__.py | 16 +- .../unit/gapic/datastore_v1/test_datastore.py | 723 ++++++++++------ 40 files changed, 3119 insertions(+), 1677 deletions(-) create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_metadata.json create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_metadata.json create mode 100644 packages/google-cloud-datastore/tests/unit/gapic/__init__.py diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py index 89cac8e11028..70a79c07616e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,7 +15,11 @@ # from .services.datastore_admin import DatastoreAdminClient +from .services.datastore_admin import DatastoreAdminAsyncClient + from .types.datastore_admin import CommonMetadata +from .types.datastore_admin import CreateIndexRequest +from .types.datastore_admin import DeleteIndexRequest from .types.datastore_admin import EntityFilter from .types.datastore_admin import ExportEntitiesMetadata from .types.datastore_admin import ExportEntitiesRequest @@ -27,13 +30,16 @@ from .types.datastore_admin import IndexOperationMetadata from .types.datastore_admin import ListIndexesRequest from .types.datastore_admin import ListIndexesResponse -from .types.datastore_admin import OperationType from .types.datastore_admin import Progress +from .types.datastore_admin import OperationType from .types.index import Index - __all__ = ( + "DatastoreAdminAsyncClient", "CommonMetadata", + "CreateIndexRequest", + "DatastoreAdminClient", + "DeleteIndexRequest", "EntityFilter", "ExportEntitiesMetadata", "ExportEntitiesRequest", @@ -47,5 +53,4 @@ "ListIndexesResponse", "OperationType", "Progress", - "DatastoreAdminClient", ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_metadata.json b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_metadata.json new file mode 100644 index 000000000000..8df5d4747bfe --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_metadata.json @@ -0,0 +1,83 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.datastore_admin_v1", + "protoPackage": "google.datastore.admin.v1", + "schema": "1.0", + "services": { + "DatastoreAdmin": { + "clients": { + "grpc": { + "libraryClient": "DatastoreAdminClient", + "rpcs": { + "CreateIndex": { + "methods": [ + "create_index" + ] + }, + "DeleteIndex": { + "methods": [ + "delete_index" + ] + }, + "ExportEntities": { + "methods": [ + "export_entities" + ] + }, + "GetIndex": { + "methods": [ + "get_index" + ] + }, + "ImportEntities": { + "methods": [ + "import_entities" + ] + }, + "ListIndexes": { + "methods": [ + "list_indexes" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DatastoreAdminAsyncClient", + "rpcs": { + "CreateIndex": { + "methods": [ + "create_index" + ] + }, + "DeleteIndex": { + "methods": [ + "delete_index" + ] + }, + "ExportEntities": { + "methods": [ + "export_entities" + ] + }, + "GetIndex": { + "methods": [ + "get_index" + ] + }, + "ImportEntities": { + "methods": [ + "import_entities" + ] + }, + "ListIndexes": { + "methods": [ + "list_indexes" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py index 42ffdf2bc43d..4de65971c238 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py index a004406b5070..951a69a98da9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import DatastoreAdminClient from .async_client import DatastoreAdminAsyncClient diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 0cd7d99e2d85..e1d24d1684e9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,27 +13,27 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.datastore_admin_v1.services.datastore_admin import pagers from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .transports.base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DatastoreAdminGrpcAsyncIOTransport from .client import DatastoreAdminClient @@ -112,35 +111,61 @@ class DatastoreAdminAsyncClient: parse_common_billing_account_path = staticmethod( DatastoreAdminClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(DatastoreAdminClient.common_folder_path) parse_common_folder_path = staticmethod( DatastoreAdminClient.parse_common_folder_path ) - common_organization_path = staticmethod( DatastoreAdminClient.common_organization_path ) parse_common_organization_path = staticmethod( DatastoreAdminClient.parse_common_organization_path ) - common_project_path = staticmethod(DatastoreAdminClient.common_project_path) parse_common_project_path = staticmethod( DatastoreAdminClient.parse_common_project_path ) - common_location_path = staticmethod(DatastoreAdminClient.common_location_path) parse_common_location_path = staticmethod( DatastoreAdminClient.parse_common_location_path ) - from_service_account_file = DatastoreAdminClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatastoreAdminAsyncClient: The constructed client. + """ + return DatastoreAdminClient.from_service_account_info.__func__(DatastoreAdminAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatastoreAdminAsyncClient: The constructed client. + """ + return DatastoreAdminClient.from_service_account_file.__func__(DatastoreAdminAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property def transport(self) -> DatastoreAdminTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: DatastoreAdminTransport: The transport used by the client instance. @@ -154,12 +179,12 @@ def transport(self) -> DatastoreAdminTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, DatastoreAdminTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the datastore admin client. + """Instantiates the datastore admin client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -191,7 +216,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = DatastoreAdminClient( credentials=credentials, transport=transport, @@ -201,13 +225,13 @@ def __init__( async def export_entities( self, - request: datastore_admin.ExportEntitiesRequest = None, + request: Union[datastore_admin.ExportEntitiesRequest, dict] = None, *, project_id: str = None, labels: Sequence[datastore_admin.ExportEntitiesRequest.LabelsEntry] = None, entity_filter: datastore_admin.EntityFilter = None, output_url_prefix: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -223,23 +247,25 @@ async def export_entities( Google Cloud Storage. Args: - request (:class:`~.datastore_admin.ExportEntitiesRequest`): + request (Union[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest, dict]): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. project_id (:class:`str`): Required. Project ID against which to make the request. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Sequence[~.datastore_admin.ExportEntitiesRequest.LabelsEntry]`): + labels (:class:`Sequence[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest.LabelsEntry]`): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - entity_filter (:class:`~.datastore_admin.EntityFilter`): + entity_filter (:class:`google.cloud.datastore_admin_v1.types.EntityFilter`): Description of what data from the project is included in the export. + This corresponds to the ``entity_filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -268,10 +294,10 @@ async def export_entities( By nesting the data files deeper, the same Cloud Storage bucket can be used in multiple ExportEntities operations without conflict. + This corresponds to the ``output_url_prefix`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -279,13 +305,11 @@ async def export_entities( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.datastore_admin.ExportEntitiesResponse``: The - response for - [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. + The result type for the operation will be :class:`google.cloud.datastore_admin_v1.types.ExportEntitiesResponse` The response for + [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. """ # Create or coerce a protobuf request object. @@ -304,7 +328,6 @@ async def export_entities( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if entity_filter is not None: @@ -339,13 +362,13 @@ async def export_entities( async def import_entities( self, - request: datastore_admin.ImportEntitiesRequest = None, + request: Union[datastore_admin.ImportEntitiesRequest, dict] = None, *, project_id: str = None, labels: Sequence[datastore_admin.ImportEntitiesRequest.LabelsEntry] = None, input_url: str = None, entity_filter: datastore_admin.EntityFilter = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -358,16 +381,17 @@ async def import_entities( imported to Cloud Datastore. Args: - request (:class:`~.datastore_admin.ImportEntitiesRequest`): + request (Union[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest, dict]): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. project_id (:class:`str`): Required. Project ID against which to make the request. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Sequence[~.datastore_admin.ImportEntitiesRequest.LabelsEntry]`): + labels (:class:`Sequence[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest.LabelsEntry]`): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -388,20 +412,21 @@ async def import_entities( For more information, see [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. + This corresponds to the ``input_url`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - entity_filter (:class:`~.datastore_admin.EntityFilter`): + entity_filter (:class:`google.cloud.datastore_admin_v1.types.EntityFilter`): Optionally specify which kinds/namespaces are to be imported. If provided, the list must be a subset of the EntityFilter used in creating the export, otherwise a FAILED_PRECONDITION error will be returned. If no filter is specified then all entities from the export are imported. + This corresponds to the ``entity_filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -409,24 +434,22 @@ async def import_entities( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -443,7 +466,6 @@ async def import_entities( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if input_url is not None: @@ -469,28 +491,162 @@ async def import_entities( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=datastore_admin.ImportEntitiesMetadata, ) # Done; return the response. return response + async def create_index( + self, + request: Union[datastore_admin.CreateIndexRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates the specified index. A newly created index's initial + state is ``CREATING``. On completion of the returned + [google.longrunning.Operation][google.longrunning.Operation], + the state will be ``READY``. If the index already exists, the + call will return an ``ALREADY_EXISTS`` status. + + During index creation, the process could result in an error, in + which case the index will move to the ``ERROR`` state. The + process can be recovered by fixing the data that caused the + error, removing the index with + [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex], + then re-creating the index with [create] + [google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. + + Indexes with a single property cannot be created. + + Args: + request (Union[google.cloud.datastore_admin_v1.types.CreateIndexRequest, dict]): + The request object. The request for + [google.datastore.admin.v1.DatastoreAdmin.CreateIndex][google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.datastore_admin_v1.types.Index` + Datastore composite index definition. + + """ + # Create or coerce a protobuf request object. + request = datastore_admin.CreateIndexRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_index, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + index.Index, + metadata_type=datastore_admin.IndexOperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_index( + self, + request: Union[datastore_admin.DeleteIndexRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes an existing index. An index can only be deleted if it is + in a ``READY`` or ``ERROR`` state. On successful execution of + the request, the index will be in a ``DELETING`` + [state][google.datastore.admin.v1.Index.State]. And on + completion of the returned + [google.longrunning.Operation][google.longrunning.Operation], + the index will be removed. + + During index deletion, the process could result in an error, in + which case the index will move to the ``ERROR`` state. The + process can be recovered by fixing the data that caused the + error, followed by calling + [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex] + again. + + Args: + request (Union[google.cloud.datastore_admin_v1.types.DeleteIndexRequest, dict]): + The request object. The request for + [google.datastore.admin.v1.DatastoreAdmin.DeleteIndex][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.datastore_admin_v1.types.Index` + Datastore composite index definition. + + """ + # Create or coerce a protobuf request object. + request = datastore_admin.DeleteIndexRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_index, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + index.Index, + metadata_type=datastore_admin.IndexOperationMetadata, + ) + + # Done; return the response. + return response + async def get_index( self, - request: datastore_admin.GetIndexRequest = None, + request: Union[datastore_admin.GetIndexRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> index.Index: r"""Gets an index. Args: - request (:class:`~.datastore_admin.GetIndexRequest`): + request (Union[google.cloud.datastore_admin_v1.types.GetIndexRequest, dict]): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.GetIndex][google.datastore.admin.v1.DatastoreAdmin.GetIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -498,11 +654,10 @@ async def get_index( sent along with the request as metadata. Returns: - ~.index.Index: - A minimal index definition. + google.cloud.datastore_admin_v1.types.Index: + Datastore composite index definition. """ # Create or coerce a protobuf request object. - request = datastore_admin.GetIndexRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -514,8 +669,10 @@ async def get_index( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -529,9 +686,9 @@ async def get_index( async def list_indexes( self, - request: datastore_admin.ListIndexesRequest = None, + request: Union[datastore_admin.ListIndexesRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListIndexesAsyncPager: @@ -541,10 +698,9 @@ async def list_indexes( results. Args: - request (:class:`~.datastore_admin.ListIndexesRequest`): + request (Union[google.cloud.datastore_admin_v1.types.ListIndexesRequest, dict]): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -552,16 +708,15 @@ async def list_indexes( sent along with the request as metadata. Returns: - ~.pagers.ListIndexesAsyncPager: + google.cloud.datastore_admin_v1.services.datastore_admin.pagers.ListIndexesAsyncPager: The response for - [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. + [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. Iterating over this object will yield results and resolve additional pages automatically. """ # Create or coerce a protobuf request object. - request = datastore_admin.ListIndexesRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -573,8 +728,10 @@ async def list_indexes( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -592,6 +749,12 @@ async def list_indexes( # Done; return the response. return response + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index a97567590ce6..b8ca70c4eadd 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,31 +13,31 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.datastore_admin_v1.services.datastore_admin import pagers from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .transports.base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DatastoreAdminGrpcTransport from .transports.grpc_asyncio import DatastoreAdminGrpcAsyncIOTransport @@ -59,7 +58,7 @@ class DatastoreAdminClientMeta(type): _transport_registry["grpc_asyncio"] = DatastoreAdminGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[DatastoreAdminTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -137,7 +136,8 @@ class DatastoreAdminClient(metaclass=DatastoreAdminClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -169,10 +169,27 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatastoreAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -181,7 +198,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + DatastoreAdminClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -191,16 +208,17 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> DatastoreAdminTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - DatastoreAdminTransport: The transport used by the client instance. + DatastoreAdminTransport: The transport used by the client + instance. """ return self._transport @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -213,7 +231,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -224,7 +242,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -235,7 +253,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -246,7 +264,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -260,12 +278,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, DatastoreAdminTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the datastore admin client. + """Instantiates the datastore admin client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -273,10 +291,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.DatastoreAdminTransport]): The + transport (Union[str, DatastoreAdminTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -312,21 +330,18 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -338,12 +353,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -358,8 +375,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -369,20 +386,21 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def export_entities( self, - request: datastore_admin.ExportEntitiesRequest = None, + request: Union[datastore_admin.ExportEntitiesRequest, dict] = None, *, project_id: str = None, labels: Sequence[datastore_admin.ExportEntitiesRequest.LabelsEntry] = None, entity_filter: datastore_admin.EntityFilter = None, output_url_prefix: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -398,27 +416,29 @@ def export_entities( Google Cloud Storage. Args: - request (:class:`~.datastore_admin.ExportEntitiesRequest`): + request (Union[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest, dict]): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. - project_id (:class:`str`): + project_id (str): Required. Project ID against which to make the request. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Sequence[~.datastore_admin.ExportEntitiesRequest.LabelsEntry]`): + labels (Sequence[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest.LabelsEntry]): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - entity_filter (:class:`~.datastore_admin.EntityFilter`): + entity_filter (google.cloud.datastore_admin_v1.types.EntityFilter): Description of what data from the project is included in the export. + This corresponds to the ``entity_filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - output_url_prefix (:class:`str`): + output_url_prefix (str): Required. Location for the export metadata and data files. @@ -443,10 +463,10 @@ def export_entities( By nesting the data files deeper, the same Cloud Storage bucket can be used in multiple ExportEntities operations without conflict. + This corresponds to the ``output_url_prefix`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -454,13 +474,11 @@ def export_entities( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.datastore_admin.ExportEntitiesResponse``: The - response for - [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. + The result type for the operation will be :class:`google.cloud.datastore_admin_v1.types.ExportEntitiesResponse` The response for + [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. """ # Create or coerce a protobuf request object. @@ -481,20 +499,17 @@ def export_entities( # there are no flattened fields. if not isinstance(request, datastore_admin.ExportEntitiesRequest): request = datastore_admin.ExportEntitiesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id + if labels is not None: + request.labels = labels if entity_filter is not None: request.entity_filter = entity_filter if output_url_prefix is not None: request.output_url_prefix = output_url_prefix - if labels: - request.labels.update(labels) - # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.export_entities] @@ -515,13 +530,13 @@ def export_entities( def import_entities( self, - request: datastore_admin.ImportEntitiesRequest = None, + request: Union[datastore_admin.ImportEntitiesRequest, dict] = None, *, project_id: str = None, labels: Sequence[datastore_admin.ImportEntitiesRequest.LabelsEntry] = None, input_url: str = None, entity_filter: datastore_admin.EntityFilter = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -534,21 +549,22 @@ def import_entities( imported to Cloud Datastore. Args: - request (:class:`~.datastore_admin.ImportEntitiesRequest`): + request (Union[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest, dict]): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. - project_id (:class:`str`): + project_id (str): Required. Project ID against which to make the request. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Sequence[~.datastore_admin.ImportEntitiesRequest.LabelsEntry]`): + labels (Sequence[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest.LabelsEntry]): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - input_url (:class:`str`): + input_url (str): Required. The full resource URL of the external storage location. Currently, only Google Cloud Storage is supported. So input_url should be of the form: @@ -564,20 +580,21 @@ def import_entities( For more information, see [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. + This corresponds to the ``input_url`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - entity_filter (:class:`~.datastore_admin.EntityFilter`): + entity_filter (google.cloud.datastore_admin_v1.types.EntityFilter): Optionally specify which kinds/namespaces are to be imported. If provided, the list must be a subset of the EntityFilter used in creating the export, otherwise a FAILED_PRECONDITION error will be returned. If no filter is specified then all entities from the export are imported. + This corresponds to the ``entity_filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -585,24 +602,22 @@ def import_entities( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -621,20 +636,17 @@ def import_entities( # there are no flattened fields. if not isinstance(request, datastore_admin.ImportEntitiesRequest): request = datastore_admin.ImportEntitiesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id + if labels is not None: + request.labels = labels if input_url is not None: request.input_url = input_url if entity_filter is not None: request.entity_filter = entity_filter - if labels: - request.labels.update(labels) - # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.import_entities] @@ -646,28 +658,164 @@ def import_entities( response = operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=datastore_admin.ImportEntitiesMetadata, ) # Done; return the response. return response + def create_index( + self, + request: Union[datastore_admin.CreateIndexRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates the specified index. A newly created index's initial + state is ``CREATING``. On completion of the returned + [google.longrunning.Operation][google.longrunning.Operation], + the state will be ``READY``. If the index already exists, the + call will return an ``ALREADY_EXISTS`` status. + + During index creation, the process could result in an error, in + which case the index will move to the ``ERROR`` state. The + process can be recovered by fixing the data that caused the + error, removing the index with + [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex], + then re-creating the index with [create] + [google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. + + Indexes with a single property cannot be created. + + Args: + request (Union[google.cloud.datastore_admin_v1.types.CreateIndexRequest, dict]): + The request object. The request for + [google.datastore.admin.v1.DatastoreAdmin.CreateIndex][google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.datastore_admin_v1.types.Index` + Datastore composite index definition. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a datastore_admin.CreateIndexRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datastore_admin.CreateIndexRequest): + request = datastore_admin.CreateIndexRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_index] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + index.Index, + metadata_type=datastore_admin.IndexOperationMetadata, + ) + + # Done; return the response. + return response + + def delete_index( + self, + request: Union[datastore_admin.DeleteIndexRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes an existing index. An index can only be deleted if it is + in a ``READY`` or ``ERROR`` state. On successful execution of + the request, the index will be in a ``DELETING`` + [state][google.datastore.admin.v1.Index.State]. And on + completion of the returned + [google.longrunning.Operation][google.longrunning.Operation], + the index will be removed. + + During index deletion, the process could result in an error, in + which case the index will move to the ``ERROR`` state. The + process can be recovered by fixing the data that caused the + error, followed by calling + [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex] + again. + + Args: + request (Union[google.cloud.datastore_admin_v1.types.DeleteIndexRequest, dict]): + The request object. The request for + [google.datastore.admin.v1.DatastoreAdmin.DeleteIndex][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.datastore_admin_v1.types.Index` + Datastore composite index definition. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a datastore_admin.DeleteIndexRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datastore_admin.DeleteIndexRequest): + request = datastore_admin.DeleteIndexRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_index] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + index.Index, + metadata_type=datastore_admin.IndexOperationMetadata, + ) + + # Done; return the response. + return response + def get_index( self, - request: datastore_admin.GetIndexRequest = None, + request: Union[datastore_admin.GetIndexRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> index.Index: r"""Gets an index. Args: - request (:class:`~.datastore_admin.GetIndexRequest`): + request (Union[google.cloud.datastore_admin_v1.types.GetIndexRequest, dict]): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.GetIndex][google.datastore.admin.v1.DatastoreAdmin.GetIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -675,11 +823,10 @@ def get_index( sent along with the request as metadata. Returns: - ~.index.Index: - A minimal index definition. + google.cloud.datastore_admin_v1.types.Index: + Datastore composite index definition. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a datastore_admin.GetIndexRequest. # There's no risk of modifying the input as we've already verified @@ -699,9 +846,9 @@ def get_index( def list_indexes( self, - request: datastore_admin.ListIndexesRequest = None, + request: Union[datastore_admin.ListIndexesRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListIndexesPager: @@ -711,10 +858,9 @@ def list_indexes( results. Args: - request (:class:`~.datastore_admin.ListIndexesRequest`): + request (Union[google.cloud.datastore_admin_v1.types.ListIndexesRequest, dict]): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -722,16 +868,15 @@ def list_indexes( sent along with the request as metadata. Returns: - ~.pagers.ListIndexesPager: + google.cloud.datastore_admin_v1.services.datastore_admin.pagers.ListIndexesPager: The response for - [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. + [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. Iterating over this object will yield results and resolve additional pages automatically. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a datastore_admin.ListIndexesRequest. # There's no risk of modifying the input as we've already verified @@ -755,6 +900,19 @@ def list_indexes( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py index 7c176fce7a25..a2f148588ade 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,8 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Sequence, + Tuple, + Optional, + Iterator, +) from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index @@ -25,7 +32,7 @@ class ListIndexesPager: """A pager for iterating through ``list_indexes`` requests. This class thinly wraps an initial - :class:`~.datastore_admin.ListIndexesResponse` object, and + :class:`google.cloud.datastore_admin_v1.types.ListIndexesResponse` object, and provides an ``__iter__`` method to iterate through its ``indexes`` field. @@ -34,7 +41,7 @@ class ListIndexesPager: through the ``indexes`` field on the corresponding responses. - All the usual :class:`~.datastore_admin.ListIndexesResponse` + All the usual :class:`google.cloud.datastore_admin_v1.types.ListIndexesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -52,9 +59,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.datastore_admin.ListIndexesRequest`): + request (google.cloud.datastore_admin_v1.types.ListIndexesRequest): The initial request object. - response (:class:`~.datastore_admin.ListIndexesResponse`): + response (google.cloud.datastore_admin_v1.types.ListIndexesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -68,14 +75,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[datastore_admin.ListIndexesResponse]: + def pages(self) -> Iterator[datastore_admin.ListIndexesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[index.Index]: + def __iter__(self) -> Iterator[index.Index]: for page in self.pages: yield from page.indexes @@ -87,7 +94,7 @@ class ListIndexesAsyncPager: """A pager for iterating through ``list_indexes`` requests. This class thinly wraps an initial - :class:`~.datastore_admin.ListIndexesResponse` object, and + :class:`google.cloud.datastore_admin_v1.types.ListIndexesResponse` object, and provides an ``__aiter__`` method to iterate through its ``indexes`` field. @@ -96,7 +103,7 @@ class ListIndexesAsyncPager: through the ``indexes`` field on the corresponding responses. - All the usual :class:`~.datastore_admin.ListIndexesResponse` + All the usual :class:`google.cloud.datastore_admin_v1.types.ListIndexesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -109,14 +116,14 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.datastore_admin.ListIndexesRequest`): + request (google.cloud.datastore_admin_v1.types.ListIndexesRequest): The initial request object. - response (:class:`~.datastore_admin.ListIndexesResponse`): + response (google.cloud.datastore_admin_v1.types.ListIndexesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -130,14 +137,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[datastore_admin.ListIndexesResponse]: + async def pages(self) -> AsyncIterator[datastore_admin.ListIndexesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[index.Index]: + def __aiter__(self) -> AsyncIterator[index.Index]: async def async_generator(): async for page in self.pages: for response in page.indexes: diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py index 41b72bc31b4f..376bbfa1ecf5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type @@ -28,7 +26,6 @@ _transport_registry["grpc"] = DatastoreAdminGrpcTransport _transport_registry["grpc_asyncio"] = DatastoreAdminGrpcAsyncIOTransport - __all__ = ( "DatastoreAdminTransport", "DatastoreAdminGrpcTransport", diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index d2a8b621863b..8fc750285c33 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -49,21 +48,25 @@ class DatastoreAdminTransport(abc.ABC): "https://www.googleapis.com/auth/datastore", ) + DEFAULT_HOST: str = "datastore.googleapis.com" + def __init__( self, *, - host: str = "datastore.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -72,43 +75,55 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" self._host = host + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -118,6 +133,12 @@ def _prep_wrapped_messages(self, client_info): self.import_entities: gapic_v1.method.wrap_method( self.import_entities, default_timeout=60.0, client_info=client_info, ), + self.create_index: gapic_v1.method.wrap_method( + self.create_index, default_timeout=60.0, client_info=client_info, + ), + self.delete_index: gapic_v1.method.wrap_method( + self.delete_index, default_timeout=60.0, client_info=client_info, + ), self.get_index: gapic_v1.method.wrap_method( self.get_index, default_retry=retries.Retry( @@ -125,8 +146,10 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -138,54 +161,82 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property - def operations_client(self) -> operations_v1.OperationsClient: + def operations_client(self): """Return the client designed to process long-running operations.""" raise NotImplementedError() @property def export_entities( self, - ) -> typing.Callable[ + ) -> Callable[ [datastore_admin.ExportEntitiesRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def import_entities( self, - ) -> typing.Callable[ + ) -> Callable[ [datastore_admin.ImportEntitiesRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_index( + self, + ) -> Callable[ + [datastore_admin.CreateIndexRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_index( + self, + ) -> Callable[ + [datastore_admin.DeleteIndexRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def get_index( self, - ) -> typing.Callable[ - [datastore_admin.GetIndexRequest], - typing.Union[index.Index, typing.Awaitable[index.Index]], + ) -> Callable[ + [datastore_admin.GetIndexRequest], Union[index.Index, Awaitable[index.Index]] ]: raise NotImplementedError() @property def list_indexes( self, - ) -> typing.Callable[ + ) -> Callable[ [datastore_admin.ListIndexesRequest], - typing.Union[ + Union[ datastore_admin.ListIndexesResponse, - typing.Awaitable[datastore_admin.ListIndexesResponse], + Awaitable[datastore_admin.ListIndexesResponse], ], ]: raise NotImplementedError() diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index 498a6a53abdf..07db8479c032 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO @@ -110,20 +107,23 @@ def __init__( self, *, host: str = "datastore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -140,13 +140,17 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -154,6 +158,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -161,88 +167,77 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - self._stubs = {} # type: Dict[str, Callable] + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # Run the base constructor. + # The base transport sets the host, credentials and scopes super().__init__( host=host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, + scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=always_use_jwt_access, ) + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + @classmethod def create_channel( cls, host: str = "datastore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -250,7 +245,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -273,13 +268,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) @@ -297,18 +294,16 @@ def operations_client(self) -> operations_v1.OperationsClient: client. """ # Sanity check: Only create a new client if we do not already have one. - if "operations_client" not in self.__dict__: - self.__dict__["operations_client"] = operations_v1.OperationsClient( - self.grpc_channel - ) + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) # Return the client from cache. - return self.__dict__["operations_client"] + return self._operations_client @property def export_entities( self, - ) -> Callable[[datastore_admin.ExportEntitiesRequest], operations.Operation]: + ) -> Callable[[datastore_admin.ExportEntitiesRequest], operations_pb2.Operation]: r"""Return a callable for the export entities method over gRPC. Exports a copy of all or a subset of entities from @@ -336,14 +331,14 @@ def export_entities( self._stubs["export_entities"] = self.grpc_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/ExportEntities", request_serializer=datastore_admin.ExportEntitiesRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["export_entities"] @property def import_entities( self, - ) -> Callable[[datastore_admin.ImportEntitiesRequest], operations.Operation]: + ) -> Callable[[datastore_admin.ImportEntitiesRequest], operations_pb2.Operation]: r"""Return a callable for the import entities method over gRPC. Imports entities into Google Cloud Datastore. @@ -368,10 +363,89 @@ def import_entities( self._stubs["import_entities"] = self.grpc_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/ImportEntities", request_serializer=datastore_admin.ImportEntitiesRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["import_entities"] + @property + def create_index( + self, + ) -> Callable[[datastore_admin.CreateIndexRequest], operations_pb2.Operation]: + r"""Return a callable for the create index method over gRPC. + + Creates the specified index. A newly created index's initial + state is ``CREATING``. On completion of the returned + [google.longrunning.Operation][google.longrunning.Operation], + the state will be ``READY``. If the index already exists, the + call will return an ``ALREADY_EXISTS`` status. + + During index creation, the process could result in an error, in + which case the index will move to the ``ERROR`` state. The + process can be recovered by fixing the data that caused the + error, removing the index with + [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex], + then re-creating the index with [create] + [google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. + + Indexes with a single property cannot be created. + + Returns: + Callable[[~.CreateIndexRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_index" not in self._stubs: + self._stubs["create_index"] = self.grpc_channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/CreateIndex", + request_serializer=datastore_admin.CreateIndexRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_index"] + + @property + def delete_index( + self, + ) -> Callable[[datastore_admin.DeleteIndexRequest], operations_pb2.Operation]: + r"""Return a callable for the delete index method over gRPC. + + Deletes an existing index. An index can only be deleted if it is + in a ``READY`` or ``ERROR`` state. On successful execution of + the request, the index will be in a ``DELETING`` + [state][google.datastore.admin.v1.Index.State]. And on + completion of the returned + [google.longrunning.Operation][google.longrunning.Operation], + the index will be removed. + + During index deletion, the process could result in an error, in + which case the index will move to the ``ERROR`` state. The + process can be recovered by fixing the data that caused the + error, followed by calling + [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex] + again. + + Returns: + Callable[[~.DeleteIndexRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_index" not in self._stubs: + self._stubs["delete_index"] = self.grpc_channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/DeleteIndex", + request_serializer=datastore_admin.DeleteIndexRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_index"] + @property def get_index(self) -> Callable[[datastore_admin.GetIndexRequest], index.Index]: r"""Return a callable for the get index method over gRPC. @@ -427,5 +501,8 @@ def list_indexes( ) return self._stubs["list_indexes"] + def close(self): + self.grpc_channel.close() + __all__ = ("DatastoreAdminGrpcTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index f731d4c0ca24..8a1f1a547d2b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,15 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore @@ -30,8 +27,7 @@ from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO from .grpc import DatastoreAdminGrpcTransport @@ -113,7 +109,7 @@ class DatastoreAdminGrpcAsyncIOTransport(DatastoreAdminTransport): def create_channel( cls, host: str = "datastore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -121,7 +117,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -140,13 +136,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) @@ -154,20 +152,23 @@ def __init__( self, *, host: str = "datastore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -185,20 +186,26 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -206,82 +213,70 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # Run the base constructor. + # The base transport sets the host, credentials and scopes super().__init__( host=host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, + scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=always_use_jwt_access, ) - self._stubs = {} + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: @@ -301,19 +296,19 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: client. """ # Sanity check: Only create a new client if we do not already have one. - if "operations_client" not in self.__dict__: - self.__dict__["operations_client"] = operations_v1.OperationsAsyncClient( + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( self.grpc_channel ) # Return the client from cache. - return self.__dict__["operations_client"] + return self._operations_client @property def export_entities( self, ) -> Callable[ - [datastore_admin.ExportEntitiesRequest], Awaitable[operations.Operation] + [datastore_admin.ExportEntitiesRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the export entities method over gRPC. @@ -342,7 +337,7 @@ def export_entities( self._stubs["export_entities"] = self.grpc_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/ExportEntities", request_serializer=datastore_admin.ExportEntitiesRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["export_entities"] @@ -350,7 +345,7 @@ def export_entities( def import_entities( self, ) -> Callable[ - [datastore_admin.ImportEntitiesRequest], Awaitable[operations.Operation] + [datastore_admin.ImportEntitiesRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the import entities method over gRPC. @@ -376,10 +371,93 @@ def import_entities( self._stubs["import_entities"] = self.grpc_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/ImportEntities", request_serializer=datastore_admin.ImportEntitiesRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["import_entities"] + @property + def create_index( + self, + ) -> Callable[ + [datastore_admin.CreateIndexRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create index method over gRPC. + + Creates the specified index. A newly created index's initial + state is ``CREATING``. On completion of the returned + [google.longrunning.Operation][google.longrunning.Operation], + the state will be ``READY``. If the index already exists, the + call will return an ``ALREADY_EXISTS`` status. + + During index creation, the process could result in an error, in + which case the index will move to the ``ERROR`` state. The + process can be recovered by fixing the data that caused the + error, removing the index with + [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex], + then re-creating the index with [create] + [google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. + + Indexes with a single property cannot be created. + + Returns: + Callable[[~.CreateIndexRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_index" not in self._stubs: + self._stubs["create_index"] = self.grpc_channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/CreateIndex", + request_serializer=datastore_admin.CreateIndexRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_index"] + + @property + def delete_index( + self, + ) -> Callable[ + [datastore_admin.DeleteIndexRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete index method over gRPC. + + Deletes an existing index. An index can only be deleted if it is + in a ``READY`` or ``ERROR`` state. On successful execution of + the request, the index will be in a ``DELETING`` + [state][google.datastore.admin.v1.Index.State]. And on + completion of the returned + [google.longrunning.Operation][google.longrunning.Operation], + the index will be removed. + + During index deletion, the process could result in an error, in + which case the index will move to the ``ERROR`` state. The + process can be recovered by fixing the data that caused the + error, followed by calling + [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex] + again. + + Returns: + Callable[[~.DeleteIndexRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_index" not in self._stubs: + self._stubs["delete_index"] = self.grpc_channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/DeleteIndex", + request_serializer=datastore_admin.DeleteIndexRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_index"] + @property def get_index( self, @@ -438,5 +516,8 @@ def list_indexes( ) return self._stubs["list_indexes"] + def close(self): + return self.grpc_channel.close() + __all__ = ("DatastoreAdminGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py index b3bf63d88309..ac4ff9056251 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,36 +13,40 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -from .index import Index from .datastore_admin import ( CommonMetadata, - Progress, + CreateIndexRequest, + DeleteIndexRequest, + EntityFilter, + ExportEntitiesMetadata, ExportEntitiesRequest, - ImportEntitiesRequest, ExportEntitiesResponse, - ExportEntitiesMetadata, - ImportEntitiesMetadata, - EntityFilter, GetIndexRequest, + ImportEntitiesMetadata, + ImportEntitiesRequest, + IndexOperationMetadata, ListIndexesRequest, ListIndexesResponse, - IndexOperationMetadata, + Progress, + OperationType, ) - +from .index import Index __all__ = ( - "Index", "CommonMetadata", - "Progress", + "CreateIndexRequest", + "DeleteIndexRequest", + "EntityFilter", + "ExportEntitiesMetadata", "ExportEntitiesRequest", - "ImportEntitiesRequest", "ExportEntitiesResponse", - "ExportEntitiesMetadata", - "ImportEntitiesMetadata", - "EntityFilter", "GetIndexRequest", + "ImportEntitiesMetadata", + "ImportEntitiesRequest", + "IndexOperationMetadata", "ListIndexesRequest", "ListIndexesResponse", - "IndexOperationMetadata", + "Progress", + "OperationType", + "Index", ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py index 1fd3c8d58ff3..0f4546fdb5b1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.cloud.datastore_admin_v1.types import index -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.cloud.datastore_admin_v1.types import index as gda_index +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -34,6 +31,8 @@ "ExportEntitiesMetadata", "ImportEntitiesMetadata", "EntityFilter", + "CreateIndexRequest", + "DeleteIndexRequest", "GetIndexRequest", "ListIndexesRequest", "ListIndexesResponse", @@ -55,19 +54,19 @@ class CommonMetadata(proto.Message): r"""Metadata common to all Datastore Admin operations. Attributes: - start_time (~.timestamp.Timestamp): + start_time (google.protobuf.timestamp_pb2.Timestamp): The time that work began on the operation. - end_time (~.timestamp.Timestamp): + end_time (google.protobuf.timestamp_pb2.Timestamp): The time the operation ended, either successfully or otherwise. - operation_type (~.datastore_admin.OperationType): + operation_type (google.cloud.datastore_admin_v1.types.OperationType): The type of the operation. Can be used as a filter in ListOperationsRequest. - labels (Sequence[~.datastore_admin.CommonMetadata.LabelsEntry]): + labels (Sequence[google.cloud.datastore_admin_v1.types.CommonMetadata.LabelsEntry]): The client-assigned labels which were provided when the operation was created. May also include additional labels. - state (~.datastore_admin.CommonMetadata.State): + state (google.cloud.datastore_admin_v1.types.CommonMetadata.State): The current state of the Operation. """ @@ -82,14 +81,10 @@ class State(proto.Enum): FAILED = 6 CANCELLED = 7 - start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) operation_type = proto.Field(proto.ENUM, number=3, enum="OperationType",) - - labels = proto.MapField(proto.STRING, proto.STRING, number=4) - + labels = proto.MapField(proto.STRING, proto.STRING, number=4,) state = proto.Field(proto.ENUM, number=5, enum=State,) @@ -106,9 +101,8 @@ class Progress(proto.Message): unavailable. """ - work_completed = proto.Field(proto.INT64, number=1) - - work_estimated = proto.Field(proto.INT64, number=2) + work_completed = proto.Field(proto.INT64, number=1,) + work_estimated = proto.Field(proto.INT64, number=2,) class ExportEntitiesRequest(proto.Message): @@ -119,9 +113,9 @@ class ExportEntitiesRequest(proto.Message): project_id (str): Required. Project ID against which to make the request. - labels (Sequence[~.datastore_admin.ExportEntitiesRequest.LabelsEntry]): + labels (Sequence[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest.LabelsEntry]): Client-assigned labels. - entity_filter (~.datastore_admin.EntityFilter): + entity_filter (google.cloud.datastore_admin_v1.types.EntityFilter): Description of what data from the project is included in the export. output_url_prefix (str): @@ -149,13 +143,10 @@ class ExportEntitiesRequest(proto.Message): without conflict. """ - project_id = proto.Field(proto.STRING, number=1) - - labels = proto.MapField(proto.STRING, proto.STRING, number=2) - + project_id = proto.Field(proto.STRING, number=1,) + labels = proto.MapField(proto.STRING, proto.STRING, number=2,) entity_filter = proto.Field(proto.MESSAGE, number=3, message="EntityFilter",) - - output_url_prefix = proto.Field(proto.STRING, number=4) + output_url_prefix = proto.Field(proto.STRING, number=4,) class ImportEntitiesRequest(proto.Message): @@ -166,7 +157,7 @@ class ImportEntitiesRequest(proto.Message): project_id (str): Required. Project ID against which to make the request. - labels (Sequence[~.datastore_admin.ImportEntitiesRequest.LabelsEntry]): + labels (Sequence[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest.LabelsEntry]): Client-assigned labels. input_url (str): Required. The full resource URL of the external storage @@ -184,7 +175,7 @@ class ImportEntitiesRequest(proto.Message): For more information, see [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. - entity_filter (~.datastore_admin.EntityFilter): + entity_filter (google.cloud.datastore_admin_v1.types.EntityFilter): Optionally specify which kinds/namespaces are to be imported. If provided, the list must be a subset of the EntityFilter used in creating the export, otherwise a @@ -192,12 +183,9 @@ class ImportEntitiesRequest(proto.Message): specified then all entities from the export are imported. """ - project_id = proto.Field(proto.STRING, number=1) - - labels = proto.MapField(proto.STRING, proto.STRING, number=2) - - input_url = proto.Field(proto.STRING, number=3) - + project_id = proto.Field(proto.STRING, number=1,) + labels = proto.MapField(proto.STRING, proto.STRING, number=2,) + input_url = proto.Field(proto.STRING, number=3,) entity_filter = proto.Field(proto.MESSAGE, number=4, message="EntityFilter",) @@ -214,22 +202,22 @@ class ExportEntitiesResponse(proto.Message): Only present if the operation completed successfully. """ - output_url = proto.Field(proto.STRING, number=1) + output_url = proto.Field(proto.STRING, number=1,) class ExportEntitiesMetadata(proto.Message): r"""Metadata for ExportEntities operations. Attributes: - common (~.datastore_admin.CommonMetadata): + common (google.cloud.datastore_admin_v1.types.CommonMetadata): Metadata common to all Datastore Admin operations. - progress_entities (~.datastore_admin.Progress): + progress_entities (google.cloud.datastore_admin_v1.types.Progress): An estimate of the number of entities processed. - progress_bytes (~.datastore_admin.Progress): + progress_bytes (google.cloud.datastore_admin_v1.types.Progress): An estimate of the number of bytes processed. - entity_filter (~.datastore_admin.EntityFilter): + entity_filter (google.cloud.datastore_admin_v1.types.EntityFilter): Description of which entities are being exported. output_url_prefix (str): @@ -241,29 +229,25 @@ class ExportEntitiesMetadata(proto.Message): """ common = proto.Field(proto.MESSAGE, number=1, message="CommonMetadata",) - progress_entities = proto.Field(proto.MESSAGE, number=2, message="Progress",) - progress_bytes = proto.Field(proto.MESSAGE, number=3, message="Progress",) - entity_filter = proto.Field(proto.MESSAGE, number=4, message="EntityFilter",) - - output_url_prefix = proto.Field(proto.STRING, number=5) + output_url_prefix = proto.Field(proto.STRING, number=5,) class ImportEntitiesMetadata(proto.Message): r"""Metadata for ImportEntities operations. Attributes: - common (~.datastore_admin.CommonMetadata): + common (google.cloud.datastore_admin_v1.types.CommonMetadata): Metadata common to all Datastore Admin operations. - progress_entities (~.datastore_admin.Progress): + progress_entities (google.cloud.datastore_admin_v1.types.Progress): An estimate of the number of entities processed. - progress_bytes (~.datastore_admin.Progress): + progress_bytes (google.cloud.datastore_admin_v1.types.Progress): An estimate of the number of bytes processed. - entity_filter (~.datastore_admin.EntityFilter): + entity_filter (google.cloud.datastore_admin_v1.types.EntityFilter): Description of which entities are being imported. input_url (str): @@ -274,14 +258,10 @@ class ImportEntitiesMetadata(proto.Message): """ common = proto.Field(proto.MESSAGE, number=1, message="CommonMetadata",) - progress_entities = proto.Field(proto.MESSAGE, number=2, message="Progress",) - progress_bytes = proto.Field(proto.MESSAGE, number=3, message="Progress",) - entity_filter = proto.Field(proto.MESSAGE, number=4, message="EntityFilter",) - - input_url = proto.Field(proto.STRING, number=5) + input_url = proto.Field(proto.STRING, number=5,) class EntityFilter(proto.Message): @@ -316,9 +296,41 @@ class EntityFilter(proto.Message): Each namespace in this list must be unique. """ - kinds = proto.RepeatedField(proto.STRING, number=1) + kinds = proto.RepeatedField(proto.STRING, number=1,) + namespace_ids = proto.RepeatedField(proto.STRING, number=2,) - namespace_ids = proto.RepeatedField(proto.STRING, number=2) + +class CreateIndexRequest(proto.Message): + r"""The request for + [google.datastore.admin.v1.DatastoreAdmin.CreateIndex][google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. + + Attributes: + project_id (str): + Project ID against which to make the request. + index (google.cloud.datastore_admin_v1.types.Index): + The index to create. The name and state + fields are output only and will be ignored. + Single property indexes cannot be created or + deleted. + """ + + project_id = proto.Field(proto.STRING, number=1,) + index = proto.Field(proto.MESSAGE, number=3, message=gda_index.Index,) + + +class DeleteIndexRequest(proto.Message): + r"""The request for + [google.datastore.admin.v1.DatastoreAdmin.DeleteIndex][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex]. + + Attributes: + project_id (str): + Project ID against which to make the request. + index_id (str): + The resource ID of the index to delete. + """ + + project_id = proto.Field(proto.STRING, number=1,) + index_id = proto.Field(proto.STRING, number=3,) class GetIndexRequest(proto.Message): @@ -332,9 +344,8 @@ class GetIndexRequest(proto.Message): The resource ID of the index to get. """ - project_id = proto.Field(proto.STRING, number=1) - - index_id = proto.Field(proto.STRING, number=3) + project_id = proto.Field(proto.STRING, number=1,) + index_id = proto.Field(proto.STRING, number=3,) class ListIndexesRequest(proto.Message): @@ -354,13 +365,10 @@ class ListIndexesRequest(proto.Message): request, if any. """ - project_id = proto.Field(proto.STRING, number=1) - - filter = proto.Field(proto.STRING, number=3) - - page_size = proto.Field(proto.INT32, number=4) - - page_token = proto.Field(proto.STRING, number=5) + project_id = proto.Field(proto.STRING, number=1,) + filter = proto.Field(proto.STRING, number=3,) + page_size = proto.Field(proto.INT32, number=4,) + page_token = proto.Field(proto.STRING, number=5,) class ListIndexesResponse(proto.Message): @@ -368,7 +376,7 @@ class ListIndexesResponse(proto.Message): [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. Attributes: - indexes (Sequence[~.index.Index]): + indexes (Sequence[google.cloud.datastore_admin_v1.types.Index]): The indexes. next_page_token (str): The standard List next-page token. @@ -378,19 +386,18 @@ class ListIndexesResponse(proto.Message): def raw_page(self): return self - indexes = proto.RepeatedField(proto.MESSAGE, number=1, message=index.Index,) - - next_page_token = proto.Field(proto.STRING, number=2) + indexes = proto.RepeatedField(proto.MESSAGE, number=1, message=gda_index.Index,) + next_page_token = proto.Field(proto.STRING, number=2,) class IndexOperationMetadata(proto.Message): r"""Metadata for Index operations. Attributes: - common (~.datastore_admin.CommonMetadata): + common (google.cloud.datastore_admin_v1.types.CommonMetadata): Metadata common to all Datastore Admin operations. - progress_entities (~.datastore_admin.Progress): + progress_entities (google.cloud.datastore_admin_v1.types.Progress): An estimate of the number of entities processed. index_id (str): @@ -399,10 +406,8 @@ class IndexOperationMetadata(proto.Message): """ common = proto.Field(proto.MESSAGE, number=1, message="CommonMetadata",) - progress_entities = proto.Field(proto.MESSAGE, number=2, message="Progress",) - - index_id = proto.Field(proto.STRING, number=3) + index_id = proto.Field(proto.STRING, number=3,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py index e11a27a584cd..b372cccf6c73 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore @@ -22,7 +20,7 @@ class Index(proto.Message): - r"""A minimal index definition. + r"""Datastore composite index definition. Attributes: project_id (str): @@ -32,13 +30,13 @@ class Index(proto.Message): kind (str): Required. The entity kind to which this index applies. - ancestor (~.index.Index.AncestorMode): + ancestor (google.cloud.datastore_admin_v1.types.Index.AncestorMode): Required. The index's ancestor mode. Must not be ANCESTOR_MODE_UNSPECIFIED. - properties (Sequence[~.index.Index.IndexedProperty]): + properties (Sequence[google.cloud.datastore_admin_v1.types.Index.IndexedProperty]): Required. An ordered sequence of property names and their index attributes. - state (~.index.Index.State): + state (google.cloud.datastore_admin_v1.types.Index.State): Output only. The state of the index. """ @@ -70,25 +68,19 @@ class IndexedProperty(proto.Message): Attributes: name (str): Required. The property name to index. - direction (~.index.Index.Direction): + direction (google.cloud.datastore_admin_v1.types.Index.Direction): Required. The indexed property's direction. Must not be DIRECTION_UNSPECIFIED. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) direction = proto.Field(proto.ENUM, number=2, enum="Index.Direction",) - project_id = proto.Field(proto.STRING, number=1) - - index_id = proto.Field(proto.STRING, number=3) - - kind = proto.Field(proto.STRING, number=4) - + project_id = proto.Field(proto.STRING, number=1,) + index_id = proto.Field(proto.STRING, number=3,) + kind = proto.Field(proto.STRING, number=4,) ancestor = proto.Field(proto.ENUM, number=5, enum=AncestorMode,) - properties = proto.RepeatedField(proto.MESSAGE, number=6, message=IndexedProperty,) - state = proto.Field(proto.ENUM, number=7, enum=State,) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py index a4b5de763013..247eec151abc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,6 +15,8 @@ # from .services.datastore import DatastoreClient +from .services.datastore import DatastoreAsyncClient + from .types.datastore import AllocateIdsRequest from .types.datastore import AllocateIdsResponse from .types.datastore import BeginTransactionRequest @@ -52,8 +53,8 @@ from .types.query import Query from .types.query import QueryResultBatch - __all__ = ( + "DatastoreAsyncClient", "AllocateIdsRequest", "AllocateIdsResponse", "ArrayValue", @@ -62,6 +63,7 @@ "CommitRequest", "CommitResponse", "CompositeFilter", + "DatastoreClient", "Entity", "EntityResult", "Filter", @@ -89,5 +91,4 @@ "RunQueryResponse", "TransactionOptions", "Value", - "DatastoreClient", ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_metadata.json b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_metadata.json new file mode 100644 index 000000000000..5da47e53b941 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_metadata.json @@ -0,0 +1,93 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.datastore_v1", + "protoPackage": "google.datastore.v1", + "schema": "1.0", + "services": { + "Datastore": { + "clients": { + "grpc": { + "libraryClient": "DatastoreClient", + "rpcs": { + "AllocateIds": { + "methods": [ + "allocate_ids" + ] + }, + "BeginTransaction": { + "methods": [ + "begin_transaction" + ] + }, + "Commit": { + "methods": [ + "commit" + ] + }, + "Lookup": { + "methods": [ + "lookup" + ] + }, + "ReserveIds": { + "methods": [ + "reserve_ids" + ] + }, + "Rollback": { + "methods": [ + "rollback" + ] + }, + "RunQuery": { + "methods": [ + "run_query" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DatastoreAsyncClient", + "rpcs": { + "AllocateIds": { + "methods": [ + "allocate_ids" + ] + }, + "BeginTransaction": { + "methods": [ + "begin_transaction" + ] + }, + "Commit": { + "methods": [ + "commit" + ] + }, + "Lookup": { + "methods": [ + "lookup" + ] + }, + "ReserveIds": { + "methods": [ + "reserve_ids" + ] + }, + "Rollback": { + "methods": [ + "rollback" + ] + }, + "RunQuery": { + "methods": [ + "run_query" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py index 42ffdf2bc43d..4de65971c238 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py index a8a828864640..611f280bd6a2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import DatastoreClient from .async_client import DatastoreAsyncClient diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index 01a2cbee3050..ca6beef27784 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,24 +13,24 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query - from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DatastoreGrpcAsyncIOTransport from .client import DatastoreClient @@ -58,29 +57,55 @@ class DatastoreAsyncClient: parse_common_billing_account_path = staticmethod( DatastoreClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(DatastoreClient.common_folder_path) parse_common_folder_path = staticmethod(DatastoreClient.parse_common_folder_path) - common_organization_path = staticmethod(DatastoreClient.common_organization_path) parse_common_organization_path = staticmethod( DatastoreClient.parse_common_organization_path ) - common_project_path = staticmethod(DatastoreClient.common_project_path) parse_common_project_path = staticmethod(DatastoreClient.parse_common_project_path) - common_location_path = staticmethod(DatastoreClient.common_location_path) parse_common_location_path = staticmethod( DatastoreClient.parse_common_location_path ) - from_service_account_file = DatastoreClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatastoreAsyncClient: The constructed client. + """ + return DatastoreClient.from_service_account_info.__func__(DatastoreAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatastoreAsyncClient: The constructed client. + """ + return DatastoreClient.from_service_account_file.__func__(DatastoreAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property def transport(self) -> DatastoreTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: DatastoreTransport: The transport used by the client instance. @@ -94,12 +119,12 @@ def transport(self) -> DatastoreTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, DatastoreTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the datastore client. + """Instantiates the datastore client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -131,7 +156,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = DatastoreClient( credentials=credentials, transport=transport, @@ -141,39 +165,40 @@ def __init__( async def lookup( self, - request: datastore.LookupRequest = None, + request: Union[datastore.LookupRequest, dict] = None, *, project_id: str = None, read_options: datastore.ReadOptions = None, keys: Sequence[entity.Key] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.LookupResponse: r"""Looks up entities by key. Args: - request (:class:`~.datastore.LookupRequest`): + request (Union[google.cloud.datastore_v1.types.LookupRequest, dict]): The request object. The request for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. project_id (:class:`str`): Required. The ID of the project against which to make the request. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - read_options (:class:`~.datastore.ReadOptions`): + read_options (:class:`google.cloud.datastore_v1.types.ReadOptions`): The options for this lookup request. This corresponds to the ``read_options`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - keys (:class:`Sequence[~.entity.Key]`): + keys (:class:`Sequence[google.cloud.datastore_v1.types.Key]`): Required. Keys of entities to look up. + This corresponds to the ``keys`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -181,7 +206,7 @@ async def lookup( sent along with the request as metadata. Returns: - ~.datastore.LookupResponse: + google.cloud.datastore_v1.types.LookupResponse: The response for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. @@ -200,12 +225,10 @@ async def lookup( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if read_options is not None: request.read_options = read_options - if keys: request.keys.extend(keys) @@ -218,8 +241,10 @@ async def lookup( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -233,19 +258,18 @@ async def lookup( async def run_query( self, - request: datastore.RunQueryRequest = None, + request: Union[datastore.RunQueryRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.RunQueryResponse: r"""Queries for entities. Args: - request (:class:`~.datastore.RunQueryRequest`): + request (Union[google.cloud.datastore_v1.types.RunQueryRequest, dict]): The request object. The request for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -253,13 +277,12 @@ async def run_query( sent along with the request as metadata. Returns: - ~.datastore.RunQueryResponse: + google.cloud.datastore_v1.types.RunQueryResponse: The response for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. """ # Create or coerce a protobuf request object. - request = datastore.RunQueryRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -271,8 +294,10 @@ async def run_query( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -286,26 +311,26 @@ async def run_query( async def begin_transaction( self, - request: datastore.BeginTransactionRequest = None, + request: Union[datastore.BeginTransactionRequest, dict] = None, *, project_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.BeginTransactionResponse: r"""Begins a new transaction. Args: - request (:class:`~.datastore.BeginTransactionRequest`): + request (Union[google.cloud.datastore_v1.types.BeginTransactionRequest, dict]): The request object. The request for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. project_id (:class:`str`): Required. The ID of the project against which to make the request. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -313,7 +338,7 @@ async def begin_transaction( sent along with the request as metadata. Returns: - ~.datastore.BeginTransactionResponse: + google.cloud.datastore_v1.types.BeginTransactionResponse: The response for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. @@ -332,7 +357,6 @@ async def begin_transaction( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id @@ -352,13 +376,13 @@ async def begin_transaction( async def commit( self, - request: datastore.CommitRequest = None, + request: Union[datastore.CommitRequest, dict] = None, *, project_id: str = None, mode: datastore.CommitRequest.Mode = None, transaction: bytes = None, mutations: Sequence[datastore.Mutation] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.CommitResponse: @@ -366,18 +390,20 @@ async def commit( or modifying some entities. Args: - request (:class:`~.datastore.CommitRequest`): + request (Union[google.cloud.datastore_v1.types.CommitRequest, dict]): The request object. The request for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. project_id (:class:`str`): Required. The ID of the project against which to make the request. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - mode (:class:`~.datastore.CommitRequest.Mode`): + mode (:class:`google.cloud.datastore_v1.types.CommitRequest.Mode`): The type of commit to perform. Defaults to ``TRANSACTIONAL``. + This corresponds to the ``mode`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -386,10 +412,11 @@ async def commit( commit. A transaction identifier is returned by a call to [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + This corresponds to the ``transaction`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - mutations (:class:`Sequence[~.datastore.Mutation]`): + mutations (:class:`Sequence[google.cloud.datastore_v1.types.Mutation]`): The mutations to perform. When mode is ``TRANSACTIONAL``, mutations affecting a @@ -404,10 +431,10 @@ async def commit( When mode is ``NON_TRANSACTIONAL``, no two mutations may affect a single entity. + This corresponds to the ``mutations`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -415,7 +442,7 @@ async def commit( sent along with the request as metadata. Returns: - ~.datastore.CommitResponse: + google.cloud.datastore_v1.types.CommitResponse: The response for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. @@ -434,14 +461,12 @@ async def commit( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if mode is not None: request.mode = mode if transaction is not None: request.transaction = transaction - if mutations: request.mutations.extend(mutations) @@ -461,23 +486,24 @@ async def commit( async def rollback( self, - request: datastore.RollbackRequest = None, + request: Union[datastore.RollbackRequest, dict] = None, *, project_id: str = None, transaction: bytes = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.RollbackResponse: r"""Rolls back a transaction. Args: - request (:class:`~.datastore.RollbackRequest`): + request (Union[google.cloud.datastore_v1.types.RollbackRequest, dict]): The request object. The request for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. project_id (:class:`str`): Required. The ID of the project against which to make the request. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -485,10 +511,10 @@ async def rollback( Required. The transaction identifier, returned by a call to [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + This corresponds to the ``transaction`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -496,10 +522,9 @@ async def rollback( sent along with the request as metadata. Returns: - ~.datastore.RollbackResponse: - The response for - [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. - (an empty message). + google.cloud.datastore_v1.types.RollbackResponse: + The response for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. + (an empty message). """ # Create or coerce a protobuf request object. @@ -516,7 +541,6 @@ async def rollback( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if transaction is not None: @@ -538,11 +562,11 @@ async def rollback( async def allocate_ids( self, - request: datastore.AllocateIdsRequest = None, + request: Union[datastore.AllocateIdsRequest, dict] = None, *, project_id: str = None, keys: Sequence[entity.Key] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.AllocateIdsResponse: @@ -550,24 +574,25 @@ async def allocate_ids( referencing an entity before it is inserted. Args: - request (:class:`~.datastore.AllocateIdsRequest`): + request (Union[google.cloud.datastore_v1.types.AllocateIdsRequest, dict]): The request object. The request for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. project_id (:class:`str`): Required. The ID of the project against which to make the request. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - keys (:class:`Sequence[~.entity.Key]`): + keys (:class:`Sequence[google.cloud.datastore_v1.types.Key]`): Required. A list of keys with incomplete key paths for which to allocate IDs. No key may be reserved/read-only. + This corresponds to the ``keys`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -575,7 +600,7 @@ async def allocate_ids( sent along with the request as metadata. Returns: - ~.datastore.AllocateIdsResponse: + google.cloud.datastore_v1.types.AllocateIdsResponse: The response for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. @@ -594,10 +619,8 @@ async def allocate_ids( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id - if keys: request.keys.extend(keys) @@ -617,11 +640,11 @@ async def allocate_ids( async def reserve_ids( self, - request: datastore.ReserveIdsRequest = None, + request: Union[datastore.ReserveIdsRequest, dict] = None, *, project_id: str = None, keys: Sequence[entity.Key] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.ReserveIdsResponse: @@ -629,23 +652,24 @@ async def reserve_ids( llocated by Cloud Datastore. Args: - request (:class:`~.datastore.ReserveIdsRequest`): + request (Union[google.cloud.datastore_v1.types.ReserveIdsRequest, dict]): The request object. The request for [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. project_id (:class:`str`): Required. The ID of the project against which to make the request. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - keys (:class:`Sequence[~.entity.Key]`): + keys (:class:`Sequence[google.cloud.datastore_v1.types.Key]`): Required. A list of keys with complete key paths whose numeric IDs should not be auto-allocated. + This corresponds to the ``keys`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -653,7 +677,7 @@ async def reserve_ids( sent along with the request as metadata. Returns: - ~.datastore.ReserveIdsResponse: + google.cloud.datastore_v1.types.ReserveIdsResponse: The response for [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. @@ -672,10 +696,8 @@ async def reserve_ids( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id - if keys: request.keys.extend(keys) @@ -688,8 +710,10 @@ async def reserve_ids( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -701,6 +725,12 @@ async def reserve_ids( # Done; return the response. return response + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index e13791581aa4..4c53cc1fa572 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,28 +13,28 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query - from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DatastoreGrpcTransport from .transports.grpc_asyncio import DatastoreGrpcAsyncIOTransport @@ -54,7 +53,7 @@ class DatastoreClientMeta(type): _transport_registry["grpc_asyncio"] = DatastoreGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[DatastoreTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -84,7 +83,8 @@ class DatastoreClient(metaclass=DatastoreClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -116,10 +116,27 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatastoreClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -128,7 +145,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + DatastoreClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -138,16 +155,17 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> DatastoreTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - DatastoreTransport: The transport used by the client instance. + DatastoreTransport: The transport used by the client + instance. """ return self._transport @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -160,7 +178,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -171,7 +189,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -182,7 +200,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -193,7 +211,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -207,12 +225,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, DatastoreTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the datastore client. + """Instantiates the datastore client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -220,10 +238,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.DatastoreTransport]): The + transport (Union[str, DatastoreTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -259,21 +277,18 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -285,12 +300,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -305,8 +322,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -316,46 +333,48 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def lookup( self, - request: datastore.LookupRequest = None, + request: Union[datastore.LookupRequest, dict] = None, *, project_id: str = None, read_options: datastore.ReadOptions = None, keys: Sequence[entity.Key] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.LookupResponse: r"""Looks up entities by key. Args: - request (:class:`~.datastore.LookupRequest`): + request (Union[google.cloud.datastore_v1.types.LookupRequest, dict]): The request object. The request for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. - project_id (:class:`str`): + project_id (str): Required. The ID of the project against which to make the request. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - read_options (:class:`~.datastore.ReadOptions`): + read_options (google.cloud.datastore_v1.types.ReadOptions): The options for this lookup request. This corresponds to the ``read_options`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - keys (:class:`Sequence[~.entity.Key]`): + keys (Sequence[google.cloud.datastore_v1.types.Key]): Required. Keys of entities to look up. + This corresponds to the ``keys`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -363,7 +382,7 @@ def lookup( sent along with the request as metadata. Returns: - ~.datastore.LookupResponse: + google.cloud.datastore_v1.types.LookupResponse: The response for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. @@ -384,17 +403,14 @@ def lookup( # there are no flattened fields. if not isinstance(request, datastore.LookupRequest): request = datastore.LookupRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if read_options is not None: request.read_options = read_options - - if keys: - request.keys.extend(keys) + if keys is not None: + request.keys = keys # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -408,19 +424,18 @@ def lookup( def run_query( self, - request: datastore.RunQueryRequest = None, + request: Union[datastore.RunQueryRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.RunQueryResponse: r"""Queries for entities. Args: - request (:class:`~.datastore.RunQueryRequest`): + request (Union[google.cloud.datastore_v1.types.RunQueryRequest, dict]): The request object. The request for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -428,13 +443,12 @@ def run_query( sent along with the request as metadata. Returns: - ~.datastore.RunQueryResponse: + google.cloud.datastore_v1.types.RunQueryResponse: The response for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a datastore.RunQueryRequest. # There's no risk of modifying the input as we've already verified @@ -454,26 +468,26 @@ def run_query( def begin_transaction( self, - request: datastore.BeginTransactionRequest = None, + request: Union[datastore.BeginTransactionRequest, dict] = None, *, project_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.BeginTransactionResponse: r"""Begins a new transaction. Args: - request (:class:`~.datastore.BeginTransactionRequest`): + request (Union[google.cloud.datastore_v1.types.BeginTransactionRequest, dict]): The request object. The request for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - project_id (:class:`str`): + project_id (str): Required. The ID of the project against which to make the request. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -481,7 +495,7 @@ def begin_transaction( sent along with the request as metadata. Returns: - ~.datastore.BeginTransactionResponse: + google.cloud.datastore_v1.types.BeginTransactionResponse: The response for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. @@ -502,10 +516,8 @@ def begin_transaction( # there are no flattened fields. if not isinstance(request, datastore.BeginTransactionRequest): request = datastore.BeginTransactionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id @@ -521,13 +533,13 @@ def begin_transaction( def commit( self, - request: datastore.CommitRequest = None, + request: Union[datastore.CommitRequest, dict] = None, *, project_id: str = None, mode: datastore.CommitRequest.Mode = None, transaction: bytes = None, mutations: Sequence[datastore.Mutation] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.CommitResponse: @@ -535,30 +547,33 @@ def commit( or modifying some entities. Args: - request (:class:`~.datastore.CommitRequest`): + request (Union[google.cloud.datastore_v1.types.CommitRequest, dict]): The request object. The request for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. - project_id (:class:`str`): + project_id (str): Required. The ID of the project against which to make the request. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - mode (:class:`~.datastore.CommitRequest.Mode`): + mode (google.cloud.datastore_v1.types.CommitRequest.Mode): The type of commit to perform. Defaults to ``TRANSACTIONAL``. + This corresponds to the ``mode`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - transaction (:class:`bytes`): + transaction (bytes): The identifier of the transaction associated with the commit. A transaction identifier is returned by a call to [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + This corresponds to the ``transaction`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - mutations (:class:`Sequence[~.datastore.Mutation]`): + mutations (Sequence[google.cloud.datastore_v1.types.Mutation]): The mutations to perform. When mode is ``TRANSACTIONAL``, mutations affecting a @@ -573,10 +588,10 @@ def commit( When mode is ``NON_TRANSACTIONAL``, no two mutations may affect a single entity. + This corresponds to the ``mutations`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -584,7 +599,7 @@ def commit( sent along with the request as metadata. Returns: - ~.datastore.CommitResponse: + google.cloud.datastore_v1.types.CommitResponse: The response for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. @@ -605,19 +620,16 @@ def commit( # there are no flattened fields. if not isinstance(request, datastore.CommitRequest): request = datastore.CommitRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if mode is not None: request.mode = mode if transaction is not None: request.transaction = transaction - - if mutations: - request.mutations.extend(mutations) + if mutations is not None: + request.mutations = mutations # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -631,34 +643,35 @@ def commit( def rollback( self, - request: datastore.RollbackRequest = None, + request: Union[datastore.RollbackRequest, dict] = None, *, project_id: str = None, transaction: bytes = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.RollbackResponse: r"""Rolls back a transaction. Args: - request (:class:`~.datastore.RollbackRequest`): + request (Union[google.cloud.datastore_v1.types.RollbackRequest, dict]): The request object. The request for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. - project_id (:class:`str`): + project_id (str): Required. The ID of the project against which to make the request. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - transaction (:class:`bytes`): + transaction (bytes): Required. The transaction identifier, returned by a call to [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + This corresponds to the ``transaction`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -666,10 +679,9 @@ def rollback( sent along with the request as metadata. Returns: - ~.datastore.RollbackResponse: - The response for - [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. - (an empty message). + google.cloud.datastore_v1.types.RollbackResponse: + The response for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. + (an empty message). """ # Create or coerce a protobuf request object. @@ -688,10 +700,8 @@ def rollback( # there are no flattened fields. if not isinstance(request, datastore.RollbackRequest): request = datastore.RollbackRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if transaction is not None: @@ -709,11 +719,11 @@ def rollback( def allocate_ids( self, - request: datastore.AllocateIdsRequest = None, + request: Union[datastore.AllocateIdsRequest, dict] = None, *, project_id: str = None, keys: Sequence[entity.Key] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.AllocateIdsResponse: @@ -721,24 +731,25 @@ def allocate_ids( referencing an entity before it is inserted. Args: - request (:class:`~.datastore.AllocateIdsRequest`): + request (Union[google.cloud.datastore_v1.types.AllocateIdsRequest, dict]): The request object. The request for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. - project_id (:class:`str`): + project_id (str): Required. The ID of the project against which to make the request. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - keys (:class:`Sequence[~.entity.Key]`): + keys (Sequence[google.cloud.datastore_v1.types.Key]): Required. A list of keys with incomplete key paths for which to allocate IDs. No key may be reserved/read-only. + This corresponds to the ``keys`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -746,7 +757,7 @@ def allocate_ids( sent along with the request as metadata. Returns: - ~.datastore.AllocateIdsResponse: + google.cloud.datastore_v1.types.AllocateIdsResponse: The response for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. @@ -767,15 +778,12 @@ def allocate_ids( # there are no flattened fields. if not isinstance(request, datastore.AllocateIdsRequest): request = datastore.AllocateIdsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id - - if keys: - request.keys.extend(keys) + if keys is not None: + request.keys = keys # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -789,11 +797,11 @@ def allocate_ids( def reserve_ids( self, - request: datastore.ReserveIdsRequest = None, + request: Union[datastore.ReserveIdsRequest, dict] = None, *, project_id: str = None, keys: Sequence[entity.Key] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.ReserveIdsResponse: @@ -801,23 +809,24 @@ def reserve_ids( llocated by Cloud Datastore. Args: - request (:class:`~.datastore.ReserveIdsRequest`): + request (Union[google.cloud.datastore_v1.types.ReserveIdsRequest, dict]): The request object. The request for [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. - project_id (:class:`str`): + project_id (str): Required. The ID of the project against which to make the request. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - keys (:class:`Sequence[~.entity.Key]`): + keys (Sequence[google.cloud.datastore_v1.types.Key]): Required. A list of keys with complete key paths whose numeric IDs should not be auto-allocated. + This corresponds to the ``keys`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -825,7 +834,7 @@ def reserve_ids( sent along with the request as metadata. Returns: - ~.datastore.ReserveIdsResponse: + google.cloud.datastore_v1.types.ReserveIdsResponse: The response for [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. @@ -846,15 +855,12 @@ def reserve_ids( # there are no flattened fields. if not isinstance(request, datastore.ReserveIdsRequest): request = datastore.ReserveIdsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id - - if keys: - request.keys.extend(keys) + if keys is not None: + request.keys = keys # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -866,6 +872,19 @@ def reserve_ids( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py index 2d0659d9b786..41074a07ccc3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type @@ -28,7 +26,6 @@ _transport_registry["grpc"] = DatastoreGrpcTransport _transport_registry["grpc_asyncio"] = DatastoreGrpcAsyncIOTransport - __all__ = ( "DatastoreTransport", "DatastoreGrpcTransport", diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py index ad00b33f5f8f..7959b72ed8ef 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,20 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.cloud.datastore_v1.types import datastore - try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-datastore",).version, @@ -44,21 +43,25 @@ class DatastoreTransport(abc.ABC): "https://www.googleapis.com/auth/datastore", ) + DEFAULT_HOST: str = "datastore.googleapis.com" + def __init__( self, *, - host: str = "datastore.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -67,43 +70,55 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" self._host = host + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -114,8 +129,10 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -127,8 +144,10 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -152,44 +171,51 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def lookup( self, - ) -> typing.Callable[ + ) -> Callable[ [datastore.LookupRequest], - typing.Union[ - datastore.LookupResponse, typing.Awaitable[datastore.LookupResponse] - ], + Union[datastore.LookupResponse, Awaitable[datastore.LookupResponse]], ]: raise NotImplementedError() @property def run_query( self, - ) -> typing.Callable[ + ) -> Callable[ [datastore.RunQueryRequest], - typing.Union[ - datastore.RunQueryResponse, typing.Awaitable[datastore.RunQueryResponse] - ], + Union[datastore.RunQueryResponse, Awaitable[datastore.RunQueryResponse]], ]: raise NotImplementedError() @property def begin_transaction( self, - ) -> typing.Callable[ + ) -> Callable[ [datastore.BeginTransactionRequest], - typing.Union[ + Union[ datastore.BeginTransactionResponse, - typing.Awaitable[datastore.BeginTransactionResponse], + Awaitable[datastore.BeginTransactionResponse], ], ]: raise NotImplementedError() @@ -197,45 +223,36 @@ def begin_transaction( @property def commit( self, - ) -> typing.Callable[ + ) -> Callable[ [datastore.CommitRequest], - typing.Union[ - datastore.CommitResponse, typing.Awaitable[datastore.CommitResponse] - ], + Union[datastore.CommitResponse, Awaitable[datastore.CommitResponse]], ]: raise NotImplementedError() @property def rollback( self, - ) -> typing.Callable[ + ) -> Callable[ [datastore.RollbackRequest], - typing.Union[ - datastore.RollbackResponse, typing.Awaitable[datastore.RollbackResponse] - ], + Union[datastore.RollbackResponse, Awaitable[datastore.RollbackResponse]], ]: raise NotImplementedError() @property def allocate_ids( self, - ) -> typing.Callable[ + ) -> Callable[ [datastore.AllocateIdsRequest], - typing.Union[ - datastore.AllocateIdsResponse, - typing.Awaitable[datastore.AllocateIdsResponse], - ], + Union[datastore.AllocateIdsResponse, Awaitable[datastore.AllocateIdsResponse]], ]: raise NotImplementedError() @property def reserve_ids( self, - ) -> typing.Callable[ + ) -> Callable[ [datastore.ReserveIdsRequest], - typing.Union[ - datastore.ReserveIdsResponse, typing.Awaitable[datastore.ReserveIdsResponse] - ], + Union[datastore.ReserveIdsResponse, Awaitable[datastore.ReserveIdsResponse]], ]: raise NotImplementedError() diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py index 7d170570bb82..afcc6a15fabf 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,20 +13,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.datastore_v1.types import datastore - from .base import DatastoreTransport, DEFAULT_CLIENT_INFO @@ -56,20 +53,23 @@ def __init__( self, *, host: str = "datastore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -86,13 +86,17 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -100,6 +104,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -107,88 +113,76 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - self._stubs = {} # type: Dict[str, Callable] + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # Run the base constructor. + # The base transport sets the host, credentials and scopes super().__init__( host=host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, + scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=always_use_jwt_access, ) + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + @classmethod def create_channel( cls, host: str = "datastore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -196,7 +190,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -219,13 +213,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) @@ -418,5 +414,8 @@ def reserve_ids( ) return self._stubs["reserve_ids"] + def close(self): + self.grpc_channel.close() + __all__ = ("DatastoreGrpcTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index 8ba5f66dbaa9..20c51f7c6446 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.datastore_v1.types import datastore - from .base import DatastoreTransport, DEFAULT_CLIENT_INFO from .grpc import DatastoreGrpcTransport @@ -59,7 +55,7 @@ class DatastoreGrpcAsyncIOTransport(DatastoreTransport): def create_channel( cls, host: str = "datastore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -67,7 +63,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -86,13 +82,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) @@ -100,20 +98,23 @@ def __init__( self, *, host: str = "datastore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -131,20 +132,26 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -152,82 +159,69 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # Run the base constructor. + # The base transport sets the host, credentials and scopes super().__init__( host=host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, + scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=always_use_jwt_access, ) - self._stubs = {} + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: @@ -431,5 +425,8 @@ def reserve_ids( ) return self._stubs["reserve_ids"] + def close(self): + return self.grpc_channel.close() + __all__ = ("DatastoreGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py index 2148caa00287..7553ac77cc68 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,84 +13,82 @@ # See the License for the specific language governing permissions and # limitations under the License. # - +from .datastore import ( + AllocateIdsRequest, + AllocateIdsResponse, + BeginTransactionRequest, + BeginTransactionResponse, + CommitRequest, + CommitResponse, + LookupRequest, + LookupResponse, + Mutation, + MutationResult, + ReadOptions, + ReserveIdsRequest, + ReserveIdsResponse, + RollbackRequest, + RollbackResponse, + RunQueryRequest, + RunQueryResponse, + TransactionOptions, +) from .entity import ( - PartitionId, - Key, ArrayValue, - Value, Entity, + Key, + PartitionId, + Value, ) from .query import ( + CompositeFilter, EntityResult, - Query, - KindExpression, - PropertyReference, - Projection, - PropertyOrder, Filter, - CompositeFilter, - PropertyFilter, GqlQuery, GqlQueryParameter, + KindExpression, + Projection, + PropertyFilter, + PropertyOrder, + PropertyReference, + Query, QueryResultBatch, ) -from .datastore import ( - LookupRequest, - LookupResponse, - RunQueryRequest, - RunQueryResponse, - BeginTransactionRequest, - BeginTransactionResponse, - RollbackRequest, - RollbackResponse, - CommitRequest, - CommitResponse, - AllocateIdsRequest, - AllocateIdsResponse, - ReserveIdsRequest, - ReserveIdsResponse, - Mutation, - MutationResult, - ReadOptions, - TransactionOptions, -) - __all__ = ( - "PartitionId", - "Key", - "ArrayValue", - "Value", - "Entity", - "EntityResult", - "Query", - "KindExpression", - "PropertyReference", - "Projection", - "PropertyOrder", - "Filter", - "CompositeFilter", - "PropertyFilter", - "GqlQuery", - "GqlQueryParameter", - "QueryResultBatch", - "LookupRequest", - "LookupResponse", - "RunQueryRequest", - "RunQueryResponse", + "AllocateIdsRequest", + "AllocateIdsResponse", "BeginTransactionRequest", "BeginTransactionResponse", - "RollbackRequest", - "RollbackResponse", "CommitRequest", "CommitResponse", - "AllocateIdsRequest", - "AllocateIdsResponse", - "ReserveIdsRequest", - "ReserveIdsResponse", + "LookupRequest", + "LookupResponse", "Mutation", "MutationResult", "ReadOptions", + "ReserveIdsRequest", + "ReserveIdsResponse", + "RollbackRequest", + "RollbackResponse", + "RunQueryRequest", + "RunQueryResponse", "TransactionOptions", + "ArrayValue", + "Entity", + "Key", + "PartitionId", + "Value", + "CompositeFilter", + "EntityResult", + "Filter", + "GqlQuery", + "GqlQueryParameter", + "KindExpression", + "Projection", + "PropertyFilter", + "PropertyOrder", + "PropertyReference", + "Query", + "QueryResultBatch", ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index e1124457e707..a36a7293c64c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,10 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query as gd_query @@ -55,16 +52,14 @@ class LookupRequest(proto.Message): project_id (str): Required. The ID of the project against which to make the request. - read_options (~.datastore.ReadOptions): + read_options (google.cloud.datastore_v1.types.ReadOptions): The options for this lookup request. - keys (Sequence[~.entity.Key]): + keys (Sequence[google.cloud.datastore_v1.types.Key]): Required. Keys of entities to look up. """ - project_id = proto.Field(proto.STRING, number=8) - + project_id = proto.Field(proto.STRING, number=8,) read_options = proto.Field(proto.MESSAGE, number=1, message="ReadOptions",) - keys = proto.RepeatedField(proto.MESSAGE, number=3, message=entity.Key,) @@ -73,15 +68,15 @@ class LookupResponse(proto.Message): [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. Attributes: - found (Sequence[~.gd_query.EntityResult]): + found (Sequence[google.cloud.datastore_v1.types.EntityResult]): Entities found as ``ResultType.FULL`` entities. The order of results in this field is undefined and has no relation to the order of the keys in the input. - missing (Sequence[~.gd_query.EntityResult]): + missing (Sequence[google.cloud.datastore_v1.types.EntityResult]): Entities not found as ``ResultType.KEY_ONLY`` entities. The order of results in this field is undefined and has no relation to the order of the keys in the input. - deferred (Sequence[~.entity.Key]): + deferred (Sequence[google.cloud.datastore_v1.types.Key]): A list of keys that were not looked up due to resource constraints. The order of results in this field is undefined and has no relation to @@ -89,11 +84,9 @@ class LookupResponse(proto.Message): """ found = proto.RepeatedField(proto.MESSAGE, number=1, message=gd_query.EntityResult,) - missing = proto.RepeatedField( proto.MESSAGE, number=2, message=gd_query.EntityResult, ) - deferred = proto.RepeatedField(proto.MESSAGE, number=3, message=entity.Key,) @@ -101,34 +94,39 @@ class RunQueryRequest(proto.Message): r"""The request for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: project_id (str): Required. The ID of the project against which to make the request. - partition_id (~.entity.PartitionId): + partition_id (google.cloud.datastore_v1.types.PartitionId): Entities are partitioned into subsets, identified by a partition ID. Queries are scoped to a single partition. This partition ID is normalized with the standard default context partition ID. - read_options (~.datastore.ReadOptions): + read_options (google.cloud.datastore_v1.types.ReadOptions): The options for this query. - query (~.gd_query.Query): + query (google.cloud.datastore_v1.types.Query): The query to run. - gql_query (~.gd_query.GqlQuery): + This field is a member of `oneof`_ ``query_type``. + gql_query (google.cloud.datastore_v1.types.GqlQuery): The GQL query to run. + This field is a member of `oneof`_ ``query_type``. """ - project_id = proto.Field(proto.STRING, number=8) - + project_id = proto.Field(proto.STRING, number=8,) partition_id = proto.Field(proto.MESSAGE, number=2, message=entity.PartitionId,) - read_options = proto.Field(proto.MESSAGE, number=1, message="ReadOptions",) - query = proto.Field( proto.MESSAGE, number=3, oneof="query_type", message=gd_query.Query, ) - gql_query = proto.Field( proto.MESSAGE, number=7, oneof="query_type", message=gd_query.GqlQuery, ) @@ -139,15 +137,14 @@ class RunQueryResponse(proto.Message): [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. Attributes: - batch (~.gd_query.QueryResultBatch): + batch (google.cloud.datastore_v1.types.QueryResultBatch): A batch of query results (always present). - query (~.gd_query.Query): + query (google.cloud.datastore_v1.types.Query): The parsed form of the ``GqlQuery`` from the request, if it was set. """ batch = proto.Field(proto.MESSAGE, number=1, message=gd_query.QueryResultBatch,) - query = proto.Field(proto.MESSAGE, number=2, message=gd_query.Query,) @@ -159,12 +156,11 @@ class BeginTransactionRequest(proto.Message): project_id (str): Required. The ID of the project against which to make the request. - transaction_options (~.datastore.TransactionOptions): + transaction_options (google.cloud.datastore_v1.types.TransactionOptions): Options for a new transaction. """ - project_id = proto.Field(proto.STRING, number=8) - + project_id = proto.Field(proto.STRING, number=8,) transaction_options = proto.Field( proto.MESSAGE, number=10, message="TransactionOptions", ) @@ -179,7 +175,7 @@ class BeginTransactionResponse(proto.Message): The transaction identifier (always present). """ - transaction = proto.Field(proto.BYTES, number=1) + transaction = proto.Field(proto.BYTES, number=1,) class RollbackRequest(proto.Message): @@ -195,15 +191,15 @@ class RollbackRequest(proto.Message): [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. """ - project_id = proto.Field(proto.STRING, number=8) - - transaction = proto.Field(proto.BYTES, number=1) + project_id = proto.Field(proto.STRING, number=8,) + transaction = proto.Field(proto.BYTES, number=1,) class RollbackResponse(proto.Message): r"""The response for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. (an empty message). + """ @@ -211,18 +207,22 @@ class CommitRequest(proto.Message): r"""The request for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: project_id (str): Required. The ID of the project against which to make the request. - mode (~.datastore.CommitRequest.Mode): + mode (google.cloud.datastore_v1.types.CommitRequest.Mode): The type of commit to perform. Defaults to ``TRANSACTIONAL``. transaction (bytes): The identifier of the transaction associated with the commit. A transaction identifier is returned by a call to [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - mutations (Sequence[~.datastore.Mutation]): + This field is a member of `oneof`_ ``transaction_selector``. + mutations (Sequence[google.cloud.datastore_v1.types.Mutation]): The mutations to perform. When mode is ``TRANSACTIONAL``, mutations affecting a single @@ -245,12 +245,9 @@ class Mode(proto.Enum): TRANSACTIONAL = 1 NON_TRANSACTIONAL = 2 - project_id = proto.Field(proto.STRING, number=8) - + project_id = proto.Field(proto.STRING, number=8,) mode = proto.Field(proto.ENUM, number=5, enum=Mode,) - - transaction = proto.Field(proto.BYTES, number=1, oneof="transaction_selector") - + transaction = proto.Field(proto.BYTES, number=1, oneof="transaction_selector",) mutations = proto.RepeatedField(proto.MESSAGE, number=6, message="Mutation",) @@ -259,7 +256,7 @@ class CommitResponse(proto.Message): [Datastore.Commit][google.datastore.v1.Datastore.Commit]. Attributes: - mutation_results (Sequence[~.datastore.MutationResult]): + mutation_results (Sequence[google.cloud.datastore_v1.types.MutationResult]): The result of performing the mutations. The i-th mutation result corresponds to the i-th mutation in the request. @@ -271,8 +268,7 @@ class CommitResponse(proto.Message): mutation_results = proto.RepeatedField( proto.MESSAGE, number=3, message="MutationResult", ) - - index_updates = proto.Field(proto.INT32, number=4) + index_updates = proto.Field(proto.INT32, number=4,) class AllocateIdsRequest(proto.Message): @@ -283,14 +279,13 @@ class AllocateIdsRequest(proto.Message): project_id (str): Required. The ID of the project against which to make the request. - keys (Sequence[~.entity.Key]): + keys (Sequence[google.cloud.datastore_v1.types.Key]): Required. A list of keys with incomplete key paths for which to allocate IDs. No key may be reserved/read-only. """ - project_id = proto.Field(proto.STRING, number=8) - + project_id = proto.Field(proto.STRING, number=8,) keys = proto.RepeatedField(proto.MESSAGE, number=1, message=entity.Key,) @@ -299,7 +294,7 @@ class AllocateIdsResponse(proto.Message): [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. Attributes: - keys (Sequence[~.entity.Key]): + keys (Sequence[google.cloud.datastore_v1.types.Key]): The keys specified in the request (in the same order), each with its key path completed with a newly allocated ID. @@ -319,70 +314,77 @@ class ReserveIdsRequest(proto.Message): database_id (str): If not empty, the ID of the database against which to make the request. - keys (Sequence[~.entity.Key]): + keys (Sequence[google.cloud.datastore_v1.types.Key]): Required. A list of keys with complete key paths whose numeric IDs should not be auto- allocated. """ - project_id = proto.Field(proto.STRING, number=8) - - database_id = proto.Field(proto.STRING, number=9) - + project_id = proto.Field(proto.STRING, number=8,) + database_id = proto.Field(proto.STRING, number=9,) keys = proto.RepeatedField(proto.MESSAGE, number=1, message=entity.Key,) class ReserveIdsResponse(proto.Message): r"""The response for [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. + """ class Mutation(proto.Message): r"""A mutation to apply to an entity. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: - insert (~.entity.Entity): + insert (google.cloud.datastore_v1.types.Entity): The entity to insert. The entity must not already exist. The entity key's final path element may be incomplete. - update (~.entity.Entity): + This field is a member of `oneof`_ ``operation``. + update (google.cloud.datastore_v1.types.Entity): The entity to update. The entity must already exist. Must have a complete key path. - upsert (~.entity.Entity): + This field is a member of `oneof`_ ``operation``. + upsert (google.cloud.datastore_v1.types.Entity): The entity to upsert. The entity may or may not already exist. The entity key's final path element may be incomplete. - delete (~.entity.Key): + This field is a member of `oneof`_ ``operation``. + delete (google.cloud.datastore_v1.types.Key): The key of the entity to delete. The entity may or may not already exist. Must have a complete key path and must not be reserved/read- only. + This field is a member of `oneof`_ ``operation``. base_version (int): The version of the entity that this mutation is being applied to. If this does not match the current version on the server, the mutation conflicts. + This field is a member of `oneof`_ ``conflict_detection_strategy``. """ insert = proto.Field( proto.MESSAGE, number=4, oneof="operation", message=entity.Entity, ) - update = proto.Field( proto.MESSAGE, number=5, oneof="operation", message=entity.Entity, ) - upsert = proto.Field( proto.MESSAGE, number=6, oneof="operation", message=entity.Entity, ) - delete = proto.Field( proto.MESSAGE, number=7, oneof="operation", message=entity.Key, ) - base_version = proto.Field( - proto.INT64, number=8, oneof="conflict_detection_strategy" + proto.INT64, number=8, oneof="conflict_detection_strategy", ) @@ -390,7 +392,7 @@ class MutationResult(proto.Message): r"""The result of applying a mutation. Attributes: - key (~.entity.Key): + key (google.cloud.datastore_v1.types.Key): The automatically allocated key. Set only when the mutation allocated a key. version (int): @@ -409,23 +411,30 @@ class MutationResult(proto.Message): """ key = proto.Field(proto.MESSAGE, number=3, message=entity.Key,) - - version = proto.Field(proto.INT64, number=4) - - conflict_detected = proto.Field(proto.BOOL, number=5) + version = proto.Field(proto.INT64, number=4,) + conflict_detected = proto.Field(proto.BOOL, number=5,) class ReadOptions(proto.Message): r"""The options shared by read requests. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: - read_consistency (~.datastore.ReadOptions.ReadConsistency): + read_consistency (google.cloud.datastore_v1.types.ReadOptions.ReadConsistency): The non-transactional read consistency to use. Cannot be set to ``STRONG`` for global queries. + This field is a member of `oneof`_ ``consistency_type``. transaction (bytes): The identifier of the transaction in which to read. A transaction identifier is returned by a call to [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + This field is a member of `oneof`_ ``consistency_type``. """ class ReadConsistency(proto.Enum): @@ -437,8 +446,7 @@ class ReadConsistency(proto.Enum): read_consistency = proto.Field( proto.ENUM, number=1, oneof="consistency_type", enum=ReadConsistency, ) - - transaction = proto.Field(proto.BYTES, number=2, oneof="consistency_type") + transaction = proto.Field(proto.BYTES, number=2, oneof="consistency_type",) class TransactionOptions(proto.Message): @@ -450,12 +458,21 @@ class TransactionOptions(proto.Message): [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] in read requests. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: - read_write (~.datastore.TransactionOptions.ReadWrite): + read_write (google.cloud.datastore_v1.types.TransactionOptions.ReadWrite): The transaction should allow both reads and writes. - read_only (~.datastore.TransactionOptions.ReadOnly): + This field is a member of `oneof`_ ``mode``. + read_only (google.cloud.datastore_v1.types.TransactionOptions.ReadOnly): The transaction should only allow reads. + This field is a member of `oneof`_ ``mode``. """ class ReadWrite(proto.Message): @@ -467,13 +484,13 @@ class ReadWrite(proto.Message): being retried. """ - previous_transaction = proto.Field(proto.BYTES, number=1) + previous_transaction = proto.Field(proto.BYTES, number=1,) class ReadOnly(proto.Message): - r"""Options specific to read-only transactions.""" + r"""Options specific to read-only transactions. + """ read_write = proto.Field(proto.MESSAGE, number=1, oneof="mode", message=ReadWrite,) - read_only = proto.Field(proto.MESSAGE, number=2, oneof="mode", message=ReadOnly,) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index cc1be6e2aab1..8ff844f7bda7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,13 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import struct_pb2 as struct # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.type import latlng_pb2 as latlng # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import latlng_pb2 # type: ignore __protobuf__ = proto.module( @@ -59,9 +56,8 @@ class PartitionId(proto.Message): which the entities belong. """ - project_id = proto.Field(proto.STRING, number=2) - - namespace_id = proto.Field(proto.STRING, number=4) + project_id = proto.Field(proto.STRING, number=2,) + namespace_id = proto.Field(proto.STRING, number=4,) class Key(proto.Message): @@ -72,12 +68,12 @@ class Key(proto.Message): contexts. Attributes: - partition_id (~.entity.PartitionId): + partition_id (google.cloud.datastore_v1.types.PartitionId): Entities are partitioned into subsets, currently identified by a project ID and namespace ID. Queries are scoped to a single partition. - path (Sequence[~.entity.Key.PathElement]): + path (Sequence[google.cloud.datastore_v1.types.Key.PathElement]): The entity path. An entity path consists of one or more elements composed of a kind and a string or numerical identifier, which identify entities. The first element @@ -104,6 +100,13 @@ class PathElement(proto.Message): If either name or ID is set, the element is complete. If neither is set, the element is incomplete. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: kind (str): The kind of the entity. A kind matching regex ``__.*__`` is @@ -114,20 +117,19 @@ class PathElement(proto.Message): Never equal to zero. Values less than zero are discouraged and may not be supported in the future. + This field is a member of `oneof`_ ``id_type``. name (str): The name of the entity. A name matching regex ``__.*__`` is reserved/read-only. A name must not be more than 1500 bytes when UTF-8 encoded. Cannot be ``""``. + This field is a member of `oneof`_ ``id_type``. """ - kind = proto.Field(proto.STRING, number=1) - - id = proto.Field(proto.INT64, number=2, oneof="id_type") - - name = proto.Field(proto.STRING, number=3, oneof="id_type") + kind = proto.Field(proto.STRING, number=1,) + id = proto.Field(proto.INT64, number=2, oneof="id_type",) + name = proto.Field(proto.STRING, number=3, oneof="id_type",) partition_id = proto.Field(proto.MESSAGE, number=1, message="PartitionId",) - path = proto.RepeatedField(proto.MESSAGE, number=2, message=PathElement,) @@ -135,7 +137,7 @@ class ArrayValue(proto.Message): r"""An array value. Attributes: - values (Sequence[~.entity.Value]): + values (Sequence[google.cloud.datastore_v1.types.Value]): Values in the array. The order of values in an array is preserved as long as all values have identical settings for 'exclude_from_indexes'. @@ -148,42 +150,60 @@ class Value(proto.Message): r"""A message that can hold any of the supported value types and associated metadata. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: - null_value (~.struct.NullValue): + null_value (google.protobuf.struct_pb2.NullValue): A null value. + This field is a member of `oneof`_ ``value_type``. boolean_value (bool): A boolean value. + This field is a member of `oneof`_ ``value_type``. integer_value (int): An integer value. + This field is a member of `oneof`_ ``value_type``. double_value (float): A double value. - timestamp_value (~.timestamp.Timestamp): + This field is a member of `oneof`_ ``value_type``. + timestamp_value (google.protobuf.timestamp_pb2.Timestamp): A timestamp value. When stored in the Datastore, precise only to microseconds; any additional precision is rounded down. - key_value (~.entity.Key): + This field is a member of `oneof`_ ``value_type``. + key_value (google.cloud.datastore_v1.types.Key): A key value. + This field is a member of `oneof`_ ``value_type``. string_value (str): A UTF-8 encoded string value. When ``exclude_from_indexes`` is false (it is indexed), may have at most 1500 bytes. Otherwise, may be set to at most 1,000,000 bytes. + This field is a member of `oneof`_ ``value_type``. blob_value (bytes): A blob value. May have at most 1,000,000 bytes. When ``exclude_from_indexes`` is false, may have at most 1500 bytes. In JSON requests, must be base64-encoded. - geo_point_value (~.latlng.LatLng): + This field is a member of `oneof`_ ``value_type``. + geo_point_value (google.type.latlng_pb2.LatLng): A geo point value representing a point on the surface of Earth. - entity_value (~.entity.Entity): + This field is a member of `oneof`_ ``value_type``. + entity_value (google.cloud.datastore_v1.types.Entity): An entity value. - May have no key. - May have a key with an incomplete key path. - May have a reserved/read-only key. - array_value (~.entity.ArrayValue): + This field is a member of `oneof`_ ``value_type``. + array_value (google.cloud.datastore_v1.types.ArrayValue): An array value. Cannot contain another array value. A ``Value`` instance that sets field ``array_value`` must not set fields ``meaning`` or ``exclude_from_indexes``. + This field is a member of `oneof`_ ``value_type``. meaning (int): The ``meaning`` field should only be populated for backwards compatibility. @@ -193,40 +213,28 @@ class Value(proto.Message): """ null_value = proto.Field( - proto.ENUM, number=11, oneof="value_type", enum=struct.NullValue, + proto.ENUM, number=11, oneof="value_type", enum=struct_pb2.NullValue, ) - - boolean_value = proto.Field(proto.BOOL, number=1, oneof="value_type") - - integer_value = proto.Field(proto.INT64, number=2, oneof="value_type") - - double_value = proto.Field(proto.DOUBLE, number=3, oneof="value_type") - + boolean_value = proto.Field(proto.BOOL, number=1, oneof="value_type",) + integer_value = proto.Field(proto.INT64, number=2, oneof="value_type",) + double_value = proto.Field(proto.DOUBLE, number=3, oneof="value_type",) timestamp_value = proto.Field( - proto.MESSAGE, number=10, oneof="value_type", message=timestamp.Timestamp, + proto.MESSAGE, number=10, oneof="value_type", message=timestamp_pb2.Timestamp, ) - key_value = proto.Field(proto.MESSAGE, number=5, oneof="value_type", message="Key",) - - string_value = proto.Field(proto.STRING, number=17, oneof="value_type") - - blob_value = proto.Field(proto.BYTES, number=18, oneof="value_type") - + string_value = proto.Field(proto.STRING, number=17, oneof="value_type",) + blob_value = proto.Field(proto.BYTES, number=18, oneof="value_type",) geo_point_value = proto.Field( - proto.MESSAGE, number=8, oneof="value_type", message=latlng.LatLng, + proto.MESSAGE, number=8, oneof="value_type", message=latlng_pb2.LatLng, ) - entity_value = proto.Field( proto.MESSAGE, number=6, oneof="value_type", message="Entity", ) - array_value = proto.Field( proto.MESSAGE, number=9, oneof="value_type", message="ArrayValue", ) - - meaning = proto.Field(proto.INT32, number=14) - - exclude_from_indexes = proto.Field(proto.BOOL, number=19) + meaning = proto.Field(proto.INT32, number=14,) + exclude_from_indexes = proto.Field(proto.BOOL, number=19,) class Entity(proto.Message): @@ -237,14 +245,14 @@ class Entity(proto.Message): message. Attributes: - key (~.entity.Key): + key (google.cloud.datastore_v1.types.Key): The entity's key. An entity must have a key, unless otherwise documented (for example, an entity in ``Value.entity_value`` may have no key). An entity's kind is its key path's last element's kind, or null if it has no key. - properties (Sequence[~.entity.Entity.PropertiesEntry]): + properties (Sequence[google.cloud.datastore_v1.types.Entity.PropertiesEntry]): The entity's properties. The map's keys are property names. A property name matching regex ``__.*__`` is reserved. A reserved property name is forbidden in certain documented @@ -253,7 +261,6 @@ class Entity(proto.Message): """ key = proto.Field(proto.MESSAGE, number=1, message="Key",) - properties = proto.MapField(proto.STRING, proto.MESSAGE, number=3, message="Value",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index 173626b06f2f..1c69e89fae22 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.datastore_v1.types import entity as gd_entity -from google.protobuf import wrappers_pb2 as wrappers # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore __protobuf__ = proto.module( @@ -45,7 +42,7 @@ class EntityResult(proto.Message): r"""The result of fetching an entity from Datastore. Attributes: - entity (~.gd_entity.Entity): + entity (google.cloud.datastore_v1.types.Entity): The resulting entity. version (int): The version of the entity, a strictly positive number that @@ -79,29 +76,27 @@ class ResultType(proto.Enum): KEY_ONLY = 3 entity = proto.Field(proto.MESSAGE, number=1, message=gd_entity.Entity,) - - version = proto.Field(proto.INT64, number=4) - - cursor = proto.Field(proto.BYTES, number=3) + version = proto.Field(proto.INT64, number=4,) + cursor = proto.Field(proto.BYTES, number=3,) class Query(proto.Message): r"""A query for entities. Attributes: - projection (Sequence[~.query.Projection]): + projection (Sequence[google.cloud.datastore_v1.types.Projection]): The projection to return. Defaults to returning all properties. - kind (Sequence[~.query.KindExpression]): + kind (Sequence[google.cloud.datastore_v1.types.KindExpression]): The kinds to query (if empty, returns entities of all kinds). Currently at most 1 kind may be specified. - filter (~.query.Filter): + filter (google.cloud.datastore_v1.types.Filter): The filter to apply. - order (Sequence[~.query.PropertyOrder]): + order (Sequence[google.cloud.datastore_v1.types.PropertyOrder]): The order to apply to the query results (if empty, order is unspecified). - distinct_on (Sequence[~.query.PropertyReference]): + distinct_on (Sequence[google.cloud.datastore_v1.types.PropertyReference]): The properties to make distinct. The query results will contain the first result for each distinct combination of values for the given @@ -120,7 +115,7 @@ class Query(proto.Message): The number of results to skip. Applies before limit, but after all other constraints. Optional. Must be >= 0 if specified. - limit (~.wrappers.Int32Value): + limit (google.protobuf.wrappers_pb2.Int32Value): The maximum number of results to return. Applies after all other constraints. Optional. Unspecified is interpreted as no limit. @@ -128,24 +123,16 @@ class Query(proto.Message): """ projection = proto.RepeatedField(proto.MESSAGE, number=2, message="Projection",) - kind = proto.RepeatedField(proto.MESSAGE, number=3, message="KindExpression",) - filter = proto.Field(proto.MESSAGE, number=4, message="Filter",) - order = proto.RepeatedField(proto.MESSAGE, number=5, message="PropertyOrder",) - distinct_on = proto.RepeatedField( proto.MESSAGE, number=6, message="PropertyReference", ) - - start_cursor = proto.Field(proto.BYTES, number=7) - - end_cursor = proto.Field(proto.BYTES, number=8) - - offset = proto.Field(proto.INT32, number=10) - - limit = proto.Field(proto.MESSAGE, number=12, message=wrappers.Int32Value,) + start_cursor = proto.Field(proto.BYTES, number=7,) + end_cursor = proto.Field(proto.BYTES, number=8,) + offset = proto.Field(proto.INT32, number=10,) + limit = proto.Field(proto.MESSAGE, number=12, message=wrappers_pb2.Int32Value,) class KindExpression(proto.Message): @@ -156,7 +143,7 @@ class KindExpression(proto.Message): The name of the kind. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class PropertyReference(proto.Message): @@ -169,14 +156,14 @@ class PropertyReference(proto.Message): a property name path. """ - name = proto.Field(proto.STRING, number=2) + name = proto.Field(proto.STRING, number=2,) class Projection(proto.Message): r"""A representation of a property in a projection. Attributes: - property (~.query.PropertyReference): + property (google.cloud.datastore_v1.types.PropertyReference): The property to project. """ @@ -187,9 +174,9 @@ class PropertyOrder(proto.Message): r"""The desired order for a specific property. Attributes: - property (~.query.PropertyReference): + property (google.cloud.datastore_v1.types.PropertyReference): The property to order by. - direction (~.query.PropertyOrder.Direction): + direction (google.cloud.datastore_v1.types.PropertyOrder.Direction): The direction to order by. Defaults to ``ASCENDING``. """ @@ -200,24 +187,31 @@ class Direction(proto.Enum): DESCENDING = 2 property = proto.Field(proto.MESSAGE, number=1, message="PropertyReference",) - direction = proto.Field(proto.ENUM, number=2, enum=Direction,) class Filter(proto.Message): r"""A holder for any type of filter. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: - composite_filter (~.query.CompositeFilter): + composite_filter (google.cloud.datastore_v1.types.CompositeFilter): A composite filter. - property_filter (~.query.PropertyFilter): + This field is a member of `oneof`_ ``filter_type``. + property_filter (google.cloud.datastore_v1.types.PropertyFilter): A filter on a property. + This field is a member of `oneof`_ ``filter_type``. """ composite_filter = proto.Field( proto.MESSAGE, number=1, oneof="filter_type", message="CompositeFilter", ) - property_filter = proto.Field( proto.MESSAGE, number=2, oneof="filter_type", message="PropertyFilter", ) @@ -228,9 +222,9 @@ class CompositeFilter(proto.Message): operator. Attributes: - op (~.query.CompositeFilter.Operator): + op (google.cloud.datastore_v1.types.CompositeFilter.Operator): The operator for combining multiple filters. - filters (Sequence[~.query.Filter]): + filters (Sequence[google.cloud.datastore_v1.types.Filter]): The list of filters to combine. Must contain at least one filter. """ @@ -241,7 +235,6 @@ class Operator(proto.Enum): AND = 1 op = proto.Field(proto.ENUM, number=1, enum=Operator,) - filters = proto.RepeatedField(proto.MESSAGE, number=2, message="Filter",) @@ -249,11 +242,11 @@ class PropertyFilter(proto.Message): r"""A filter on a specific property. Attributes: - property (~.query.PropertyReference): + property (google.cloud.datastore_v1.types.PropertyReference): The property to filter by. - op (~.query.PropertyFilter.Operator): + op (google.cloud.datastore_v1.types.PropertyFilter.Operator): The operator to filter by. - value (~.gd_entity.Value): + value (google.cloud.datastore_v1.types.Value): The value to compare the property to. """ @@ -268,9 +261,7 @@ class Operator(proto.Enum): HAS_ANCESTOR = 11 property = proto.Field(proto.MESSAGE, number=1, message="PropertyReference",) - op = proto.Field(proto.ENUM, number=2, enum=Operator,) - value = proto.Field(proto.MESSAGE, number=3, message=gd_entity.Value,) @@ -287,14 +278,14 @@ class GqlQuery(proto.Message): and instead must bind all values. For example, ``SELECT * FROM Kind WHERE a = 'string literal'`` is not allowed, while ``SELECT * FROM Kind WHERE a = @value`` is. - named_bindings (Sequence[~.query.GqlQuery.NamedBindingsEntry]): + named_bindings (Sequence[google.cloud.datastore_v1.types.GqlQuery.NamedBindingsEntry]): For each non-reserved named binding site in the query string, there must be a named parameter with that name, but not necessarily the inverse. Key must match regex ``[A-Za-z_$][A-Za-z_$0-9]*``, must not match regex ``__.*__``, and must not be ``""``. - positional_bindings (Sequence[~.query.GqlQueryParameter]): + positional_bindings (Sequence[google.cloud.datastore_v1.types.GqlQueryParameter]): Numbered binding site @1 references the first numbered parameter, effectively using 1-based indexing, rather than the usual 0. @@ -304,14 +295,11 @@ class GqlQuery(proto.Message): true. """ - query_string = proto.Field(proto.STRING, number=1) - - allow_literals = proto.Field(proto.BOOL, number=2) - + query_string = proto.Field(proto.STRING, number=1,) + allow_literals = proto.Field(proto.BOOL, number=2,) named_bindings = proto.MapField( proto.STRING, proto.MESSAGE, number=5, message="GqlQueryParameter", ) - positional_bindings = proto.RepeatedField( proto.MESSAGE, number=4, message="GqlQueryParameter", ) @@ -320,19 +308,27 @@ class GqlQuery(proto.Message): class GqlQueryParameter(proto.Message): r"""A binding parameter for a GQL query. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: - value (~.gd_entity.Value): + value (google.cloud.datastore_v1.types.Value): A value parameter. + This field is a member of `oneof`_ ``parameter_type``. cursor (bytes): A query cursor. Query cursors are returned in query result batches. + This field is a member of `oneof`_ ``parameter_type``. """ value = proto.Field( proto.MESSAGE, number=2, oneof="parameter_type", message=gd_entity.Value, ) - - cursor = proto.Field(proto.BYTES, number=3, oneof="parameter_type") + cursor = proto.Field(proto.BYTES, number=3, oneof="parameter_type",) class QueryResultBatch(proto.Message): @@ -345,14 +341,14 @@ class QueryResultBatch(proto.Message): skipped_cursor (bytes): A cursor that points to the position after the last skipped result. Will be set when ``skipped_results`` != 0. - entity_result_type (~.query.EntityResult.ResultType): + entity_result_type (google.cloud.datastore_v1.types.EntityResult.ResultType): The result type for every entity in ``entity_results``. - entity_results (Sequence[~.query.EntityResult]): + entity_results (Sequence[google.cloud.datastore_v1.types.EntityResult]): The results for this batch. end_cursor (bytes): A cursor that points to the position after the last result in the batch. - more_results (~.query.QueryResultBatch.MoreResultsType): + more_results (google.cloud.datastore_v1.types.QueryResultBatch.MoreResultsType): The state of the query after the current batch. snapshot_version (int): @@ -377,23 +373,17 @@ class MoreResultsType(proto.Enum): MORE_RESULTS_AFTER_CURSOR = 4 NO_MORE_RESULTS = 3 - skipped_results = proto.Field(proto.INT32, number=6) - - skipped_cursor = proto.Field(proto.BYTES, number=3) - + skipped_results = proto.Field(proto.INT32, number=6,) + skipped_cursor = proto.Field(proto.BYTES, number=3,) entity_result_type = proto.Field( proto.ENUM, number=1, enum="EntityResult.ResultType", ) - entity_results = proto.RepeatedField( proto.MESSAGE, number=2, message="EntityResult", ) - - end_cursor = proto.Field(proto.BYTES, number=4) - + end_cursor = proto.Field(proto.BYTES, number=4,) more_results = proto.Field(proto.ENUM, number=5, enum=MoreResultsType,) - - snapshot_version = proto.Field(proto.INT64, number=7) + snapshot_version = proto.Field(proto.INT64, number=7,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 0ad059b78c85..e5b43cca8ffe 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -13,41 +13,80 @@ # limitations under the License. """This script is used to synthesize generated parts of this library.""" +from pathlib import Path +from typing import List, Optional + import synthtool as s from synthtool import gcp from synthtool.languages import python common = gcp.CommonTemplates() +# This is a customized version of the s.get_staging_dirs() function from synthtool to +# cater for copying 2 different folders from googleapis-gen +# which are datastore and datastore/admin +# Source https://github.com/googleapis/synthtool/blob/master/synthtool/transforms.py#L280 +def get_staging_dirs( + default_version: Optional[str] = None, sub_directory: Optional[str] = None +) -> List[Path]: + """Returns the list of directories, one per version, copied from + https://github.com/googleapis/googleapis-gen. Will return in lexical sorting + order with the exception of the default_version which will be last (if specified). + Args: + default_version (str): the default version of the API. The directory for this version + will be the last item in the returned list if specified. + sub_directory (str): if a `sub_directory` is provided, only the directories within the + specified `sub_directory` will be returned. + Returns: the empty list if no file were copied. + """ + + staging = Path("owl-bot-staging") + + if sub_directory: + staging /= sub_directory + + if staging.is_dir(): + # Collect the subdirectories of the staging directory. + versions = [v.name for v in staging.iterdir() if v.is_dir()] + # Reorder the versions so the default version always comes last. + versions = [v for v in versions if v != default_version] + versions.sort() + if default_version is not None: + versions += [default_version] + dirs = [staging / v for v in versions] + for dir in dirs: + s._tracked_paths.add(dir) + return dirs + else: + return [] + # This library ships clients for two different APIs, # Datastore and Datastore Admin datastore_default_version = "v1" datastore_admin_default_version = "v1" -for library in s.get_staging_dirs(datastore_default_version): - if library.parent.absolute() == "datastore": - s.move(library / f"google/cloud/datastore_{library.name}") - s.move(library / "tests/") - s.move(library / "scripts") - -for library in s.get_staging_dirs(datastore_admin_default_version): - if library.parent.absolute() == "datastore_admin": - s.replace( - library / "google/**/datastore_admin_client.py", - "google-cloud-datastore-admin", - "google-cloud-datstore", - ) - - # Remove spurious markup - s.replace( - "google/**/datastore_admin/client.py", - r"\s+---------------------------------(-)+", - "", - ) - - s.move(library / f"google/cloud/datastore_admin_{library.name}") - s.move(library / "tests") - s.move(library / "scripts") +for library in get_staging_dirs(datastore_default_version, "datastore"): + s.move(library / f"google/cloud/datastore_{library.name}") + s.move(library / "tests/") + s.move(library / "scripts") + +for library in get_staging_dirs(datastore_admin_default_version, "datastore_admin"): + s.replace( + library / "google/**/datastore_admin_client.py", + "google-cloud-datastore-admin", + "google-cloud-datstore", + ) + + # Remove spurious markup + s.replace( + library / "google/**/datastore_admin/client.py", + r"\s+---------------------------------(-)+", + "", + ) + + s.move(library / f"google/cloud/datastore_admin_{library.name}") + s.move(library / "tests") + s.move(library / "scripts") s.remove_staging_dirs() diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py index fae3ea91605f..12e217dea24c 100644 --- a/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py @@ -1,6 +1,5 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import argparse import os import libcst as cst @@ -41,11 +39,12 @@ def partition( class datastore_adminCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'export_entities': ('project_id', 'output_url_prefix', 'labels', 'entity_filter', ), - 'get_index': ('project_id', 'index_id', ), - 'import_entities': ('project_id', 'input_url', 'labels', 'entity_filter', ), - 'list_indexes': ('project_id', 'filter', 'page_size', 'page_token', ), - + 'create_index': ('project_id', 'index', ), + 'delete_index': ('project_id', 'index_id', ), + 'export_entities': ('project_id', 'output_url_prefix', 'labels', 'entity_filter', ), + 'get_index': ('project_id', 'index_id', ), + 'import_entities': ('project_id', 'input_url', 'labels', 'entity_filter', ), + 'list_indexes': ('project_id', 'filter', 'page_size', 'page_token', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -64,7 +63,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: return updated kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, + lambda a: a.keyword.value not in self.CTRL_PARAMS, kwargs ) @@ -76,7 +75,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) +cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py index 8b04f6fe13ba..e0358795f751 100644 --- a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py @@ -1,6 +1,5 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import argparse import os import libcst as cst @@ -41,14 +39,13 @@ def partition( class datastoreCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'allocate_ids': ('project_id', 'keys', ), - 'begin_transaction': ('project_id', 'transaction_options', ), - 'commit': ('project_id', 'mode', 'transaction', 'mutations', ), - 'lookup': ('project_id', 'keys', 'read_options', ), - 'reserve_ids': ('project_id', 'keys', 'database_id', ), - 'rollback': ('project_id', 'transaction', ), - 'run_query': ('project_id', 'partition_id', 'read_options', 'query', 'gql_query', ), - + 'allocate_ids': ('project_id', 'keys', ), + 'begin_transaction': ('project_id', 'transaction_options', ), + 'commit': ('project_id', 'mode', 'transaction', 'mutations', ), + 'lookup': ('project_id', 'keys', 'read_options', ), + 'reserve_ids': ('project_id', 'keys', 'database_id', ), + 'rollback': ('project_id', 'transaction', ), + 'run_query': ('project_id', 'partition_id', 'read_options', 'query', 'gql_query', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -67,7 +64,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: return updated kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, + lambda a: a.keyword.value not in self.CTRL_PARAMS, kwargs ) @@ -79,7 +76,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) +cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 6550cea3eaf9..286653d5d3fa 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -32,7 +32,7 @@ # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.22.2, <3.0.0dev", + "google-api-core[grpc] >= 1.28.0, <3.0.0dev", # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 diff --git a/packages/google-cloud-datastore/testing/constraints-3.6.txt b/packages/google-cloud-datastore/testing/constraints-3.6.txt index 01fc45a4c0f7..1800ac45ef0c 100644 --- a/packages/google-cloud-datastore/testing/constraints-3.6.txt +++ b/packages/google-cloud-datastore/testing/constraints-3.6.txt @@ -5,8 +5,7 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.22.2 +google-api-core==1.28.0 google-cloud-core==1.4.0 proto-plus==1.4.0 libcst==0.2.5 -google-auth==1.24.0 # TODO: remove when google-auth>=1.25.0 is required through google-api-core \ No newline at end of file diff --git a/packages/google-cloud-datastore/tests/__init__.py b/packages/google-cloud-datastore/tests/__init__.py index e69de29bb2d1..4de65971c238 100644 --- a/packages/google-cloud-datastore/tests/__init__.py +++ b/packages/google-cloud-datastore/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-datastore/tests/unit/__init__.py b/packages/google-cloud-datastore/tests/unit/__init__.py index df379f1e9d88..4de65971c238 100644 --- a/packages/google-cloud-datastore/tests/unit/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/__init__.py @@ -1,4 +1,5 @@ -# Copyright 2016 Google LLC +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,3 +12,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# diff --git a/packages/google-cloud-datastore/tests/unit/gapic/__init__.py b/packages/google-cloud-datastore/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..4de65971c238 --- /dev/null +++ b/packages/google-cloud-datastore/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py index 8b137891791f..4de65971c238 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py @@ -1 +1,15 @@ - +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index 54320c9774f1..a8f4a7b64a61 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock @@ -24,16 +22,17 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 -from google.auth import credentials +from google.api_core import path_template +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.datastore_admin_v1.services.datastore_admin import ( DatastoreAdminAsyncClient, @@ -47,6 +46,7 @@ from google.cloud.datastore_admin_v1.types import index from google.longrunning import operations_pb2 from google.oauth2 import service_account +import google.auth def client_cert_source_callback(): @@ -94,26 +94,73 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [DatastoreAdminClient, DatastoreAdminAsyncClient] + "client_class", [DatastoreAdminClient, DatastoreAdminAsyncClient,] +) +def test_datastore_admin_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "datastore.googleapis.com:443" + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DatastoreAdminGrpcTransport, "grpc"), + (transports.DatastoreAdminGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_datastore_admin_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class", [DatastoreAdminClient, DatastoreAdminAsyncClient,] ) def test_datastore_admin_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "datastore.googleapis.com:443" def test_datastore_admin_client_get_transport_class(): transport = DatastoreAdminClient.get_transport_class() - assert transport == transports.DatastoreAdminGrpcTransport + available_transports = [ + transports.DatastoreAdminGrpcTransport, + ] + assert transport in available_transports transport = DatastoreAdminClient.get_transport_class("grpc") assert transport == transports.DatastoreAdminGrpcTransport @@ -145,7 +192,7 @@ def test_datastore_admin_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(DatastoreAdminClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -158,15 +205,16 @@ def test_datastore_admin_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -174,15 +222,16 @@ def test_datastore_admin_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -190,15 +239,16 @@ def test_datastore_admin_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -218,15 +268,16 @@ def test_datastore_admin_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -275,29 +326,26 @@ def test_datastore_admin_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -306,66 +354,55 @@ def test_datastore_admin_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -385,15 +422,16 @@ def test_datastore_admin_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,15 +453,16 @@ def test_datastore_admin_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -440,9 +479,10 @@ def test_datastore_admin_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -450,7 +490,7 @@ def test_export_entities( transport: str = "grpc", request_type=datastore_admin.ExportEntitiesRequest ): client = DatastoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -461,13 +501,11 @@ def test_export_entities( with mock.patch.object(type(client.transport.export_entities), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.export_entities(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ExportEntitiesRequest() # Establish that the response is the type that we expect. @@ -478,12 +516,27 @@ def test_export_entities_from_dict(): test_export_entities(request_type=dict) +def test_export_entities_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_entities), "__call__") as call: + client.export_entities() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.ExportEntitiesRequest() + + @pytest.mark.asyncio async def test_export_entities_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ExportEntitiesRequest ): client = DatastoreAdminAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -496,13 +549,11 @@ async def test_export_entities_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.export_entities(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ExportEntitiesRequest() # Establish that the response is the type that we expect. @@ -515,13 +566,12 @@ async def test_export_entities_async_from_dict(): def test_export_entities_flattened(): - client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_entities), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.export_entities( @@ -535,20 +585,16 @@ def test_export_entities_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].labels == {"key_value": "value_value"} - assert args[0].entity_filter == datastore_admin.EntityFilter( kinds=["kinds_value"] ) - assert args[0].output_url_prefix == "output_url_prefix_value" def test_export_entities_flattened_error(): - client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -564,7 +610,9 @@ def test_export_entities_flattened_error(): @pytest.mark.asyncio async def test_export_entities_flattened_async(): - client = DatastoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_entities), "__call__") as call: @@ -587,21 +635,19 @@ async def test_export_entities_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].labels == {"key_value": "value_value"} - assert args[0].entity_filter == datastore_admin.EntityFilter( kinds=["kinds_value"] ) - assert args[0].output_url_prefix == "output_url_prefix_value" @pytest.mark.asyncio async def test_export_entities_flattened_error_async(): - client = DatastoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -619,7 +665,7 @@ def test_import_entities( transport: str = "grpc", request_type=datastore_admin.ImportEntitiesRequest ): client = DatastoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -630,13 +676,11 @@ def test_import_entities( with mock.patch.object(type(client.transport.import_entities), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.import_entities(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ImportEntitiesRequest() # Establish that the response is the type that we expect. @@ -647,12 +691,27 @@ def test_import_entities_from_dict(): test_import_entities(request_type=dict) +def test_import_entities_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_entities), "__call__") as call: + client.import_entities() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.ImportEntitiesRequest() + + @pytest.mark.asyncio async def test_import_entities_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ImportEntitiesRequest ): client = DatastoreAdminAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -665,13 +724,11 @@ async def test_import_entities_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.import_entities(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ImportEntitiesRequest() # Establish that the response is the type that we expect. @@ -684,13 +741,12 @@ async def test_import_entities_async_from_dict(): def test_import_entities_flattened(): - client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.import_entities), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.import_entities( @@ -704,20 +760,16 @@ def test_import_entities_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].labels == {"key_value": "value_value"} - assert args[0].input_url == "input_url_value" - assert args[0].entity_filter == datastore_admin.EntityFilter( kinds=["kinds_value"] ) def test_import_entities_flattened_error(): - client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -733,7 +785,9 @@ def test_import_entities_flattened_error(): @pytest.mark.asyncio async def test_import_entities_flattened_async(): - client = DatastoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.import_entities), "__call__") as call: @@ -756,13 +810,9 @@ async def test_import_entities_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].labels == {"key_value": "value_value"} - assert args[0].input_url == "input_url_value" - assert args[0].entity_filter == datastore_admin.EntityFilter( kinds=["kinds_value"] ) @@ -770,7 +820,9 @@ async def test_import_entities_flattened_async(): @pytest.mark.asyncio async def test_import_entities_flattened_error_async(): - client = DatastoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -784,11 +836,169 @@ async def test_import_entities_flattened_error_async(): ) +def test_create_index( + transport: str = "grpc", request_type=datastore_admin.CreateIndexRequest +): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_index), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.CreateIndexRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_index_from_dict(): + test_create_index(request_type=dict) + + +def test_create_index_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_index), "__call__") as call: + client.create_index() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.CreateIndexRequest() + + +@pytest.mark.asyncio +async def test_create_index_async( + transport: str = "grpc_asyncio", request_type=datastore_admin.CreateIndexRequest +): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_index), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.CreateIndexRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_index_async_from_dict(): + await test_create_index_async(request_type=dict) + + +def test_delete_index( + transport: str = "grpc", request_type=datastore_admin.DeleteIndexRequest +): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.DeleteIndexRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_index_from_dict(): + test_delete_index(request_type=dict) + + +def test_delete_index_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + client.delete_index() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.DeleteIndexRequest() + + +@pytest.mark.asyncio +async def test_delete_index_async( + transport: str = "grpc_asyncio", request_type=datastore_admin.DeleteIndexRequest +): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.DeleteIndexRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_index_async_from_dict(): + await test_delete_index_async(request_type=dict) + + def test_get_index( transport: str = "grpc", request_type=datastore_admin.GetIndexRequest ): client = DatastoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -805,27 +1015,19 @@ def test_get_index( ancestor=index.Index.AncestorMode.NONE, state=index.Index.State.CREATING, ) - response = client.get_index(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.GetIndexRequest() # Establish that the response is the type that we expect. - assert isinstance(response, index.Index) - assert response.project_id == "project_id_value" - assert response.index_id == "index_id_value" - assert response.kind == "kind_value" - assert response.ancestor == index.Index.AncestorMode.NONE - assert response.state == index.Index.State.CREATING @@ -833,12 +1035,27 @@ def test_get_index_from_dict(): test_get_index(request_type=dict) +def test_get_index_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_index), "__call__") as call: + client.get_index() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.GetIndexRequest() + + @pytest.mark.asyncio async def test_get_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.GetIndexRequest ): client = DatastoreAdminAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -857,26 +1074,19 @@ async def test_get_index_async( state=index.Index.State.CREATING, ) ) - response = await client.get_index(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.GetIndexRequest() # Establish that the response is the type that we expect. assert isinstance(response, index.Index) - assert response.project_id == "project_id_value" - assert response.index_id == "index_id_value" - assert response.kind == "kind_value" - assert response.ancestor == index.Index.AncestorMode.NONE - assert response.state == index.Index.State.CREATING @@ -889,7 +1099,7 @@ def test_list_indexes( transport: str = "grpc", request_type=datastore_admin.ListIndexesRequest ): client = DatastoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -902,19 +1112,15 @@ def test_list_indexes( call.return_value = datastore_admin.ListIndexesResponse( next_page_token="next_page_token_value", ) - response = client.list_indexes(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ListIndexesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListIndexesPager) - assert response.next_page_token == "next_page_token_value" @@ -922,12 +1128,27 @@ def test_list_indexes_from_dict(): test_list_indexes(request_type=dict) +def test_list_indexes_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: + client.list_indexes() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.ListIndexesRequest() + + @pytest.mark.asyncio async def test_list_indexes_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ListIndexesRequest ): client = DatastoreAdminAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -942,18 +1163,15 @@ async def test_list_indexes_async( next_page_token="next_page_token_value", ) ) - response = await client.list_indexes(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ListIndexesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListIndexesAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -963,7 +1181,7 @@ async def test_list_indexes_async_from_dict(): def test_list_indexes_pager(): - client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials,) + client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: @@ -994,7 +1212,7 @@ def test_list_indexes_pager(): def test_list_indexes_pages(): - client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials,) + client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: @@ -1020,7 +1238,7 @@ def test_list_indexes_pages(): @pytest.mark.asyncio async def test_list_indexes_async_pager(): - client = DatastoreAdminAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DatastoreAdminAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1053,7 +1271,7 @@ async def test_list_indexes_async_pager(): @pytest.mark.asyncio async def test_list_indexes_async_pages(): - client = DatastoreAdminAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DatastoreAdminAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1084,16 +1302,16 @@ async def test_list_indexes_async_pages(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.DatastoreAdminGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DatastoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.DatastoreAdminGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DatastoreAdminClient( @@ -1103,7 +1321,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.DatastoreAdminGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DatastoreAdminClient( @@ -1114,7 +1332,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.DatastoreAdminGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = DatastoreAdminClient(transport=transport) assert client.transport is transport @@ -1123,13 +1341,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.DatastoreAdminGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.DatastoreAdminGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -1144,23 +1362,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = DatastoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.DatastoreAdminGrpcTransport,) def test_datastore_admin_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.DatastoreAdminTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -1172,7 +1390,7 @@ def test_datastore_admin_base_transport(): ) as Transport: Transport.return_value = None transport = transports.DatastoreAdminTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -1180,6 +1398,8 @@ def test_datastore_admin_base_transport(): methods = ( "export_entities", "import_entities", + "create_index", + "delete_index", "get_index", "list_indexes", ) @@ -1187,6 +1407,9 @@ def test_datastore_admin_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + # Additionally, the LRO client (a property) should # also raise NotImplementedError with pytest.raises(NotImplementedError): @@ -1196,18 +1419,19 @@ def test_datastore_admin_base_transport(): def test_datastore_admin_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.datastore_admin_v1.services.datastore_admin.transports.DatastoreAdminTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DatastoreAdminTransport( credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", - scopes=( + scopes=None, + default_scopes=( "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/datastore", ), @@ -1217,22 +1441,23 @@ def test_datastore_admin_base_transport_with_credentials_file(): def test_datastore_admin_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.datastore_admin_v1.services.datastore_admin.transports.DatastoreAdminTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DatastoreAdminTransport() adc.assert_called_once() def test_datastore_admin_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) DatastoreAdminClient() adc.assert_called_once_with( - scopes=( + scopes=None, + default_scopes=( "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/datastore", ), @@ -1240,16 +1465,22 @@ def test_datastore_admin_auth_adc(): ) -def test_datastore_admin_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatastoreAdminGrpcTransport, + transports.DatastoreAdminGrpcAsyncIOTransport, + ], +) +def test_datastore_admin_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.DatastoreAdminGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" - ) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( - scopes=( + scopes=["1", "2"], + default_scopes=( "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/datastore", ), @@ -1257,9 +1488,92 @@ def test_datastore_admin_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DatastoreAdminGrpcTransport, grpc_helpers), + (transports.DatastoreAdminGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_datastore_admin_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "datastore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + scopes=["1", "2"], + default_host="datastore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatastoreAdminGrpcTransport, + transports.DatastoreAdminGrpcAsyncIOTransport, + ], +) +def test_datastore_admin_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_datastore_admin_host_no_port(): client = DatastoreAdminClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datastore.googleapis.com" ), @@ -1269,7 +1583,7 @@ def test_datastore_admin_host_no_port(): def test_datastore_admin_host_with_port(): client = DatastoreAdminClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datastore.googleapis.com:8000" ), @@ -1278,7 +1592,7 @@ def test_datastore_admin_host_with_port(): def test_datastore_admin_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.DatastoreAdminGrpcTransport( @@ -1290,7 +1604,7 @@ def test_datastore_admin_grpc_transport_channel(): def test_datastore_admin_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.DatastoreAdminGrpcAsyncIOTransport( @@ -1301,6 +1615,8 @@ def test_datastore_admin_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -1315,7 +1631,7 @@ def test_datastore_admin_transport_channel_mtls_with_client_cert_source( "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -1323,9 +1639,9 @@ def test_datastore_admin_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -1341,17 +1657,20 @@ def test_datastore_admin_transport_channel_mtls_with_client_cert_source( "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -1367,7 +1686,7 @@ def test_datastore_admin_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel @@ -1385,19 +1704,20 @@ def test_datastore_admin_transport_channel_mtls_with_adc(transport_class): "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel def test_datastore_admin_grpc_lro_client(): client = DatastoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport @@ -1410,7 +1730,7 @@ def test_datastore_admin_grpc_lro_client(): def test_datastore_admin_grpc_lro_async_client(): client = DatastoreAdminAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport @@ -1423,7 +1743,6 @@ def test_datastore_admin_grpc_lro_async_client(): def test_common_billing_account_path(): billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -1444,7 +1763,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) actual = DatastoreAdminClient.common_folder_path(folder) assert expected == actual @@ -1463,7 +1781,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) actual = DatastoreAdminClient.common_organization_path(organization) assert expected == actual @@ -1482,7 +1799,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) actual = DatastoreAdminClient.common_project_path(project) assert expected == actual @@ -1502,7 +1818,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "winkle" location = "nautilus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -1529,7 +1844,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.DatastoreAdminTransport, "_prep_wrapped_messages" ) as prep: client = DatastoreAdminClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1538,6 +1853,52 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = DatastoreAdminClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py index 8b137891791f..4de65971c238 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py @@ -1 +1,15 @@ - +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 32faab361137..04ced96f401d 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock @@ -24,13 +22,14 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.api_core import path_template +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.datastore_v1.services.datastore import DatastoreAsyncClient from google.cloud.datastore_v1.services.datastore import DatastoreClient @@ -39,10 +38,11 @@ from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query from google.oauth2 import service_account -from google.protobuf import struct_pb2 as struct # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.protobuf import wrappers_pb2 as wrappers # type: ignore -from google.type import latlng_pb2 as latlng # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.type import latlng_pb2 # type: ignore +import google.auth def client_cert_source_callback(): @@ -84,25 +84,70 @@ def test__get_default_mtls_endpoint(): assert DatastoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [DatastoreClient, DatastoreAsyncClient]) +@pytest.mark.parametrize("client_class", [DatastoreClient, DatastoreAsyncClient,]) +def test_datastore_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "datastore.googleapis.com:443" + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DatastoreGrpcTransport, "grpc"), + (transports.DatastoreGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_datastore_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class", [DatastoreClient, DatastoreAsyncClient,]) def test_datastore_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "datastore.googleapis.com:443" def test_datastore_client_get_transport_class(): transport = DatastoreClient.get_transport_class() - assert transport == transports.DatastoreGrpcTransport + available_transports = [ + transports.DatastoreGrpcTransport, + ] + assert transport in available_transports transport = DatastoreClient.get_transport_class("grpc") assert transport == transports.DatastoreGrpcTransport @@ -130,7 +175,7 @@ def test_datastore_client_get_transport_class(): def test_datastore_client_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. with mock.patch.object(DatastoreClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -143,15 +188,16 @@ def test_datastore_client_client_options(client_class, transport_class, transpor options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -159,15 +205,16 @@ def test_datastore_client_client_options(client_class, transport_class, transpor with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -175,15 +222,16 @@ def test_datastore_client_client_options(client_class, transport_class, transpor with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -203,15 +251,16 @@ def test_datastore_client_client_options(client_class, transport_class, transpor options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -258,29 +307,26 @@ def test_datastore_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -289,66 +335,55 @@ def test_datastore_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -368,15 +403,16 @@ def test_datastore_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -398,15 +434,16 @@ def test_datastore_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -421,15 +458,16 @@ def test_datastore_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) def test_lookup(transport: str = "grpc", request_type=datastore.LookupRequest): client = DatastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -440,17 +478,14 @@ def test_lookup(transport: str = "grpc", request_type=datastore.LookupRequest): with mock.patch.object(type(client.transport.lookup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.LookupResponse() - response = client.lookup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.LookupRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datastore.LookupResponse) @@ -458,12 +493,27 @@ def test_lookup_from_dict(): test_lookup(request_type=dict) +def test_lookup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lookup), "__call__") as call: + client.lookup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.LookupRequest() + + @pytest.mark.asyncio async def test_lookup_async( transport: str = "grpc_asyncio", request_type=datastore.LookupRequest ): client = DatastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -476,13 +526,11 @@ async def test_lookup_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datastore.LookupResponse() ) - response = await client.lookup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.LookupRequest() # Establish that the response is the type that we expect. @@ -495,13 +543,12 @@ async def test_lookup_async_from_dict(): def test_lookup_flattened(): - client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.LookupResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.lookup( @@ -520,20 +567,17 @@ def test_lookup_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].read_options == datastore.ReadOptions( read_consistency=datastore.ReadOptions.ReadConsistency.STRONG ) - assert args[0].keys == [ entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) ] def test_lookup_flattened_error(): - client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -554,7 +598,7 @@ def test_lookup_flattened_error(): @pytest.mark.asyncio async def test_lookup_flattened_async(): - client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup), "__call__") as call: @@ -582,13 +626,10 @@ async def test_lookup_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].read_options == datastore.ReadOptions( read_consistency=datastore.ReadOptions.ReadConsistency.STRONG ) - assert args[0].keys == [ entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) ] @@ -596,7 +637,7 @@ async def test_lookup_flattened_async(): @pytest.mark.asyncio async def test_lookup_flattened_error_async(): - client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -617,7 +658,7 @@ async def test_lookup_flattened_error_async(): def test_run_query(transport: str = "grpc", request_type=datastore.RunQueryRequest): client = DatastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -628,17 +669,14 @@ def test_run_query(transport: str = "grpc", request_type=datastore.RunQueryReque with mock.patch.object(type(client.transport.run_query), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.RunQueryResponse() - response = client.run_query(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunQueryRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datastore.RunQueryResponse) @@ -646,12 +684,27 @@ def test_run_query_from_dict(): test_run_query(request_type=dict) +def test_run_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + client.run_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.RunQueryRequest() + + @pytest.mark.asyncio async def test_run_query_async( transport: str = "grpc_asyncio", request_type=datastore.RunQueryRequest ): client = DatastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -664,13 +717,11 @@ async def test_run_query_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datastore.RunQueryResponse() ) - response = await client.run_query(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunQueryRequest() # Establish that the response is the type that we expect. @@ -686,7 +737,7 @@ def test_begin_transaction( transport: str = "grpc", request_type=datastore.BeginTransactionRequest ): client = DatastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -701,19 +752,15 @@ def test_begin_transaction( call.return_value = datastore.BeginTransactionResponse( transaction=b"transaction_blob", ) - response = client.begin_transaction(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.BeginTransactionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datastore.BeginTransactionResponse) - assert response.transaction == b"transaction_blob" @@ -721,12 +768,29 @@ def test_begin_transaction_from_dict(): test_begin_transaction(request_type=dict) +def test_begin_transaction_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + client.begin_transaction() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.BeginTransactionRequest() + + @pytest.mark.asyncio async def test_begin_transaction_async( transport: str = "grpc_asyncio", request_type=datastore.BeginTransactionRequest ): client = DatastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -741,18 +805,15 @@ async def test_begin_transaction_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datastore.BeginTransactionResponse(transaction=b"transaction_blob",) ) - response = await client.begin_transaction(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.BeginTransactionRequest() # Establish that the response is the type that we expect. assert isinstance(response, datastore.BeginTransactionResponse) - assert response.transaction == b"transaction_blob" @@ -762,7 +823,7 @@ async def test_begin_transaction_async_from_dict(): def test_begin_transaction_flattened(): - client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -770,7 +831,6 @@ def test_begin_transaction_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = datastore.BeginTransactionResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.begin_transaction(project_id="project_id_value",) @@ -779,12 +839,11 @@ def test_begin_transaction_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" def test_begin_transaction_flattened_error(): - client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -796,7 +855,7 @@ def test_begin_transaction_flattened_error(): @pytest.mark.asyncio async def test_begin_transaction_flattened_async(): - client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -816,13 +875,12 @@ async def test_begin_transaction_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" @pytest.mark.asyncio async def test_begin_transaction_flattened_error_async(): - client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -834,7 +892,7 @@ async def test_begin_transaction_flattened_error_async(): def test_commit(transport: str = "grpc", request_type=datastore.CommitRequest): client = DatastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -845,19 +903,15 @@ def test_commit(transport: str = "grpc", request_type=datastore.CommitRequest): with mock.patch.object(type(client.transport.commit), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.CommitResponse(index_updates=1389,) - response = client.commit(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.CommitRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datastore.CommitResponse) - assert response.index_updates == 1389 @@ -865,12 +919,27 @@ def test_commit_from_dict(): test_commit(request_type=dict) +def test_commit_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + client.commit() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.CommitRequest() + + @pytest.mark.asyncio async def test_commit_async( transport: str = "grpc_asyncio", request_type=datastore.CommitRequest ): client = DatastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -883,18 +952,15 @@ async def test_commit_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datastore.CommitResponse(index_updates=1389,) ) - response = await client.commit(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.CommitRequest() # Establish that the response is the type that we expect. assert isinstance(response, datastore.CommitResponse) - assert response.index_updates == 1389 @@ -904,13 +970,12 @@ async def test_commit_async_from_dict(): def test_commit_flattened(): - client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.CommitResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.commit( @@ -934,11 +999,8 @@ def test_commit_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].mode == datastore.CommitRequest.Mode.TRANSACTIONAL - assert args[0].mutations == [ datastore.Mutation( insert=entity.Entity( @@ -948,12 +1010,11 @@ def test_commit_flattened(): ) ) ] - assert args[0].transaction == b"transaction_blob" def test_commit_flattened_error(): - client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -979,7 +1040,7 @@ def test_commit_flattened_error(): @pytest.mark.asyncio async def test_commit_flattened_async(): - client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: @@ -1012,11 +1073,8 @@ async def test_commit_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].mode == datastore.CommitRequest.Mode.TRANSACTIONAL - assert args[0].mutations == [ datastore.Mutation( insert=entity.Entity( @@ -1026,13 +1084,12 @@ async def test_commit_flattened_async(): ) ) ] - assert args[0].transaction == b"transaction_blob" @pytest.mark.asyncio async def test_commit_flattened_error_async(): - client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1058,7 +1115,7 @@ async def test_commit_flattened_error_async(): def test_rollback(transport: str = "grpc", request_type=datastore.RollbackRequest): client = DatastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1069,17 +1126,14 @@ def test_rollback(transport: str = "grpc", request_type=datastore.RollbackReques with mock.patch.object(type(client.transport.rollback), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.RollbackResponse() - response = client.rollback(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RollbackRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datastore.RollbackResponse) @@ -1087,12 +1141,27 @@ def test_rollback_from_dict(): test_rollback(request_type=dict) +def test_rollback_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + client.rollback() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.RollbackRequest() + + @pytest.mark.asyncio async def test_rollback_async( transport: str = "grpc_asyncio", request_type=datastore.RollbackRequest ): client = DatastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1105,13 +1174,11 @@ async def test_rollback_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datastore.RollbackResponse() ) - response = await client.rollback(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RollbackRequest() # Establish that the response is the type that we expect. @@ -1124,13 +1191,12 @@ async def test_rollback_async_from_dict(): def test_rollback_flattened(): - client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.RollbackResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.rollback( @@ -1141,14 +1207,12 @@ def test_rollback_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].transaction == b"transaction_blob" def test_rollback_flattened_error(): - client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1162,7 +1226,7 @@ def test_rollback_flattened_error(): @pytest.mark.asyncio async def test_rollback_flattened_async(): - client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback), "__call__") as call: @@ -1182,15 +1246,13 @@ async def test_rollback_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].transaction == b"transaction_blob" @pytest.mark.asyncio async def test_rollback_flattened_error_async(): - client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1206,7 +1268,7 @@ def test_allocate_ids( transport: str = "grpc", request_type=datastore.AllocateIdsRequest ): client = DatastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1217,17 +1279,14 @@ def test_allocate_ids( with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.AllocateIdsResponse() - response = client.allocate_ids(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.AllocateIdsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datastore.AllocateIdsResponse) @@ -1235,12 +1294,27 @@ def test_allocate_ids_from_dict(): test_allocate_ids(request_type=dict) +def test_allocate_ids_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + client.allocate_ids() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.AllocateIdsRequest() + + @pytest.mark.asyncio async def test_allocate_ids_async( transport: str = "grpc_asyncio", request_type=datastore.AllocateIdsRequest ): client = DatastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1253,13 +1327,11 @@ async def test_allocate_ids_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datastore.AllocateIdsResponse() ) - response = await client.allocate_ids(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.AllocateIdsRequest() # Establish that the response is the type that we expect. @@ -1272,13 +1344,12 @@ async def test_allocate_ids_async_from_dict(): def test_allocate_ids_flattened(): - client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.AllocateIdsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.allocate_ids( @@ -1294,16 +1365,14 @@ def test_allocate_ids_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].keys == [ entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) ] def test_allocate_ids_flattened_error(): - client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1321,7 +1390,7 @@ def test_allocate_ids_flattened_error(): @pytest.mark.asyncio async def test_allocate_ids_flattened_async(): - client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: @@ -1346,9 +1415,7 @@ async def test_allocate_ids_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].keys == [ entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) ] @@ -1356,7 +1423,7 @@ async def test_allocate_ids_flattened_async(): @pytest.mark.asyncio async def test_allocate_ids_flattened_error_async(): - client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1374,7 +1441,7 @@ async def test_allocate_ids_flattened_error_async(): def test_reserve_ids(transport: str = "grpc", request_type=datastore.ReserveIdsRequest): client = DatastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1385,17 +1452,14 @@ def test_reserve_ids(transport: str = "grpc", request_type=datastore.ReserveIdsR with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.ReserveIdsResponse() - response = client.reserve_ids(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.ReserveIdsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datastore.ReserveIdsResponse) @@ -1403,12 +1467,27 @@ def test_reserve_ids_from_dict(): test_reserve_ids(request_type=dict) +def test_reserve_ids_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + client.reserve_ids() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.ReserveIdsRequest() + + @pytest.mark.asyncio async def test_reserve_ids_async( transport: str = "grpc_asyncio", request_type=datastore.ReserveIdsRequest ): client = DatastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1421,13 +1500,11 @@ async def test_reserve_ids_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datastore.ReserveIdsResponse() ) - response = await client.reserve_ids(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.ReserveIdsRequest() # Establish that the response is the type that we expect. @@ -1440,13 +1517,12 @@ async def test_reserve_ids_async_from_dict(): def test_reserve_ids_flattened(): - client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datastore.ReserveIdsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.reserve_ids( @@ -1462,16 +1538,14 @@ def test_reserve_ids_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].keys == [ entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) ] def test_reserve_ids_flattened_error(): - client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1489,7 +1563,7 @@ def test_reserve_ids_flattened_error(): @pytest.mark.asyncio async def test_reserve_ids_flattened_async(): - client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: @@ -1514,9 +1588,7 @@ async def test_reserve_ids_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].keys == [ entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) ] @@ -1524,7 +1596,7 @@ async def test_reserve_ids_flattened_async(): @pytest.mark.asyncio async def test_reserve_ids_flattened_error_async(): - client = DatastoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1543,16 +1615,16 @@ async def test_reserve_ids_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.DatastoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DatastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.DatastoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DatastoreClient( @@ -1562,7 +1634,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.DatastoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DatastoreClient( @@ -1573,7 +1645,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.DatastoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = DatastoreClient(transport=transport) assert client.transport is transport @@ -1582,13 +1654,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.DatastoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.DatastoreGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -1596,27 +1668,27 @@ def test_transport_get_channel(): @pytest.mark.parametrize( "transport_class", - [transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport], + [transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport,], ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = DatastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.DatastoreGrpcTransport,) def test_datastore_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.DatastoreTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -1628,7 +1700,7 @@ def test_datastore_base_transport(): ) as Transport: Transport.return_value = None transport = transports.DatastoreTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -1646,22 +1718,26 @@ def test_datastore_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + def test_datastore_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.datastore_v1.services.datastore.transports.DatastoreTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DatastoreTransport( credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", - scopes=( + scopes=None, + default_scopes=( "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/datastore", ), @@ -1671,22 +1747,23 @@ def test_datastore_base_transport_with_credentials_file(): def test_datastore_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.datastore_v1.services.datastore.transports.DatastoreTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DatastoreTransport() adc.assert_called_once() def test_datastore_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) DatastoreClient() adc.assert_called_once_with( - scopes=( + scopes=None, + default_scopes=( "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/datastore", ), @@ -1694,16 +1771,19 @@ def test_datastore_auth_adc(): ) -def test_datastore_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport,], +) +def test_datastore_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.DatastoreGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" - ) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( - scopes=( + scopes=["1", "2"], + default_scopes=( "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/datastore", ), @@ -1711,9 +1791,89 @@ def test_datastore_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DatastoreGrpcTransport, grpc_helpers), + (transports.DatastoreGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_datastore_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "datastore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + scopes=["1", "2"], + default_host="datastore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport], +) +def test_datastore_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_datastore_host_no_port(): client = DatastoreClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datastore.googleapis.com" ), @@ -1723,7 +1883,7 @@ def test_datastore_host_no_port(): def test_datastore_host_with_port(): client = DatastoreClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datastore.googleapis.com:8000" ), @@ -1732,7 +1892,7 @@ def test_datastore_host_with_port(): def test_datastore_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.DatastoreGrpcTransport( @@ -1744,7 +1904,7 @@ def test_datastore_grpc_transport_channel(): def test_datastore_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.DatastoreGrpcAsyncIOTransport( @@ -1755,6 +1915,8 @@ def test_datastore_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport], @@ -1764,7 +1926,7 @@ def test_datastore_transport_channel_mtls_with_client_cert_source(transport_clas "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -1772,9 +1934,9 @@ def test_datastore_transport_channel_mtls_with_client_cert_source(transport_clas mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -1790,17 +1952,20 @@ def test_datastore_transport_channel_mtls_with_client_cert_source(transport_clas "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport], @@ -1813,7 +1978,7 @@ def test_datastore_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel @@ -1831,19 +1996,19 @@ def test_datastore_transport_channel_mtls_with_adc(transport_class): "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel def test_common_billing_account_path(): billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -1864,7 +2029,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) actual = DatastoreClient.common_folder_path(folder) assert expected == actual @@ -1883,7 +2047,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) actual = DatastoreClient.common_organization_path(organization) assert expected == actual @@ -1902,7 +2065,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) actual = DatastoreClient.common_project_path(project) assert expected == actual @@ -1922,7 +2084,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "winkle" location = "nautilus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -1949,7 +2110,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.DatastoreTransport, "_prep_wrapped_messages" ) as prep: client = DatastoreClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1958,6 +2119,52 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = DatastoreClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() From bba94777334c28ffddd37007593a1ed59249b1f7 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 9 Nov 2021 11:05:21 -0500 Subject: [PATCH 380/611] chore: release 2.4.0 (#250) --- packages/google-cloud-datastore/CHANGELOG.md | 21 +++++++++++++++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index b24a7b8a6deb..089d3abcd80f 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,27 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.4.0](https://www.github.com/googleapis/python-datastore/compare/v2.3.0...v2.4.0) (2021-11-08) + + +### Features + +* add context manager support in client ([d6c8868](https://www.github.com/googleapis/python-datastore/commit/d6c8868088daa99979f03b0ba359f7ad1c842b39)) +* add methods for creating and deleting composite indexes ([#248](https://www.github.com/googleapis/python-datastore/issues/248)) ([d6c8868](https://www.github.com/googleapis/python-datastore/commit/d6c8868088daa99979f03b0ba359f7ad1c842b39)) +* add support for self-signed JWT flow for service accounts ([d6c8868](https://www.github.com/googleapis/python-datastore/commit/d6c8868088daa99979f03b0ba359f7ad1c842b39)) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([d6c8868](https://www.github.com/googleapis/python-datastore/commit/d6c8868088daa99979f03b0ba359f7ad1c842b39)) +* export async client from 'google/cloud/datastore_v1' ([d6c8868](https://www.github.com/googleapis/python-datastore/commit/d6c8868088daa99979f03b0ba359f7ad1c842b39)) +* **deps:** require google-api-core >= 1.28.0 ([d6c8868](https://www.github.com/googleapis/python-datastore/commit/d6c8868088daa99979f03b0ba359f7ad1c842b39)) + + +### Documentation + +* list 'oneofs' in docstrings for message classes ([d6c8868](https://www.github.com/googleapis/python-datastore/commit/d6c8868088daa99979f03b0ba359f7ad1c842b39)) + ## [2.3.0](https://www.github.com/googleapis/python-datastore/compare/v2.2.0...v2.3.0) (2021-10-18) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 999199f5a138..fe11624d91dd 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.3.0" +__version__ = "2.4.0" From 0a7a6714b7c3b9b2edadd9ae03d211ab5ce50305 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 9 Nov 2021 18:07:54 -0500 Subject: [PATCH 381/611] chore: use gapic-generator-python 0.56.2 (#252) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update Java and Python dependencies PiperOrigin-RevId: 408420890 Source-Link: https://github.com/googleapis/googleapis/commit/2921f9fb3bfbd16f6b2da0104373e2b47a80a65e Source-Link: https://github.com/googleapis/googleapis-gen/commit/6598ca8cbbf5226733a099c4506518a5af6ff74c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjU5OGNhOGNiYmY1MjI2NzMzYTA5OWM0NTA2NTE4YTVhZjZmZjc0YyJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../services/datastore_admin/async_client.py | 13 ++- .../services/datastore_admin/client.py | 25 +++-- .../datastore_admin/transports/base.py | 10 +- .../datastore_admin/transports/grpc.py | 6 +- .../transports/grpc_asyncio.py | 6 +- .../services/datastore/async_client.py | 13 ++- .../datastore_v1/services/datastore/client.py | 25 +++-- .../services/datastore/transports/base.py | 8 +- .../services/datastore/transports/grpc.py | 4 +- .../datastore/transports/grpc_asyncio.py | 4 +- .../cloud/datastore_v1/types/datastore.py | 12 ++ .../google/cloud/datastore_v1/types/entity.py | 13 +++ .../google/cloud/datastore_v1/types/query.py | 4 + .../test_datastore_admin.py | 72 ++++++++---- .../unit/gapic/datastore_v1/test_datastore.py | 104 +++++++++++++----- 15 files changed, 224 insertions(+), 95 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index e1d24d1684e9..1d9c8fc92e67 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index b8ca70c4eadd..cedc242816f3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -326,8 +328,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index 8fc750285c33..1b3c3d325c0c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -18,11 +18,11 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.api_core import operations_v1 # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index 07db8479c032..3d08e9db0fcc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -16,9 +16,9 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import operations_v1 # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index 8a1f1a547d2b..3f9e53305144 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -16,9 +16,9 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index ca6beef27784..2246300ec0fd 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index 4c53cc1fa572..5205b8935562 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity @@ -273,8 +275,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py index 7959b72ed8ef..e23e42a2a59e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py index afcc6a15fabf..55c93eb89b96 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index 20c51f7c6446..2e71d2977c0b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index a36a7293c64c..f3f87f6ca2a5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -115,9 +115,11 @@ class RunQueryRequest(proto.Message): The options for this query. query (google.cloud.datastore_v1.types.Query): The query to run. + This field is a member of `oneof`_ ``query_type``. gql_query (google.cloud.datastore_v1.types.GqlQuery): The GQL query to run. + This field is a member of `oneof`_ ``query_type``. """ @@ -221,6 +223,7 @@ class CommitRequest(proto.Message): The identifier of the transaction associated with the commit. A transaction identifier is returned by a call to [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + This field is a member of `oneof`_ ``transaction_selector``. mutations (Sequence[google.cloud.datastore_v1.types.Mutation]): The mutations to perform. @@ -347,27 +350,32 @@ class Mutation(proto.Message): The entity to insert. The entity must not already exist. The entity key's final path element may be incomplete. + This field is a member of `oneof`_ ``operation``. update (google.cloud.datastore_v1.types.Entity): The entity to update. The entity must already exist. Must have a complete key path. + This field is a member of `oneof`_ ``operation``. upsert (google.cloud.datastore_v1.types.Entity): The entity to upsert. The entity may or may not already exist. The entity key's final path element may be incomplete. + This field is a member of `oneof`_ ``operation``. delete (google.cloud.datastore_v1.types.Key): The key of the entity to delete. The entity may or may not already exist. Must have a complete key path and must not be reserved/read- only. + This field is a member of `oneof`_ ``operation``. base_version (int): The version of the entity that this mutation is being applied to. If this does not match the current version on the server, the mutation conflicts. + This field is a member of `oneof`_ ``conflict_detection_strategy``. """ @@ -429,11 +437,13 @@ class ReadOptions(proto.Message): read_consistency (google.cloud.datastore_v1.types.ReadOptions.ReadConsistency): The non-transactional read consistency to use. Cannot be set to ``STRONG`` for global queries. + This field is a member of `oneof`_ ``consistency_type``. transaction (bytes): The identifier of the transaction in which to read. A transaction identifier is returned by a call to [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + This field is a member of `oneof`_ ``consistency_type``. """ @@ -469,9 +479,11 @@ class TransactionOptions(proto.Message): read_write (google.cloud.datastore_v1.types.TransactionOptions.ReadWrite): The transaction should allow both reads and writes. + This field is a member of `oneof`_ ``mode``. read_only (google.cloud.datastore_v1.types.TransactionOptions.ReadOnly): The transaction should only allow reads. + This field is a member of `oneof`_ ``mode``. """ diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index 8ff844f7bda7..828d43e52c9c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -117,11 +117,13 @@ class PathElement(proto.Message): Never equal to zero. Values less than zero are discouraged and may not be supported in the future. + This field is a member of `oneof`_ ``id_type``. name (str): The name of the entity. A name matching regex ``__.*__`` is reserved/read-only. A name must not be more than 1500 bytes when UTF-8 encoded. Cannot be ``""``. + This field is a member of `oneof`_ ``id_type``. """ @@ -160,49 +162,60 @@ class Value(proto.Message): Attributes: null_value (google.protobuf.struct_pb2.NullValue): A null value. + This field is a member of `oneof`_ ``value_type``. boolean_value (bool): A boolean value. + This field is a member of `oneof`_ ``value_type``. integer_value (int): An integer value. + This field is a member of `oneof`_ ``value_type``. double_value (float): A double value. + This field is a member of `oneof`_ ``value_type``. timestamp_value (google.protobuf.timestamp_pb2.Timestamp): A timestamp value. When stored in the Datastore, precise only to microseconds; any additional precision is rounded down. + This field is a member of `oneof`_ ``value_type``. key_value (google.cloud.datastore_v1.types.Key): A key value. + This field is a member of `oneof`_ ``value_type``. string_value (str): A UTF-8 encoded string value. When ``exclude_from_indexes`` is false (it is indexed), may have at most 1500 bytes. Otherwise, may be set to at most 1,000,000 bytes. + This field is a member of `oneof`_ ``value_type``. blob_value (bytes): A blob value. May have at most 1,000,000 bytes. When ``exclude_from_indexes`` is false, may have at most 1500 bytes. In JSON requests, must be base64-encoded. + This field is a member of `oneof`_ ``value_type``. geo_point_value (google.type.latlng_pb2.LatLng): A geo point value representing a point on the surface of Earth. + This field is a member of `oneof`_ ``value_type``. entity_value (google.cloud.datastore_v1.types.Entity): An entity value. - May have no key. - May have a key with an incomplete key path. - May have a reserved/read-only key. + This field is a member of `oneof`_ ``value_type``. array_value (google.cloud.datastore_v1.types.ArrayValue): An array value. Cannot contain another array value. A ``Value`` instance that sets field ``array_value`` must not set fields ``meaning`` or ``exclude_from_indexes``. + This field is a member of `oneof`_ ``value_type``. meaning (int): The ``meaning`` field should only be populated for backwards diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index 1c69e89fae22..b42e7191ad48 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -203,9 +203,11 @@ class Filter(proto.Message): Attributes: composite_filter (google.cloud.datastore_v1.types.CompositeFilter): A composite filter. + This field is a member of `oneof`_ ``filter_type``. property_filter (google.cloud.datastore_v1.types.PropertyFilter): A filter on a property. + This field is a member of `oneof`_ ``filter_type``. """ @@ -318,10 +320,12 @@ class GqlQueryParameter(proto.Message): Attributes: value (google.cloud.datastore_v1.types.Value): A value parameter. + This field is a member of `oneof`_ ``parameter_type``. cursor (bytes): A query cursor. Query cursors are returned in query result batches. + This field is a member of `oneof`_ ``parameter_type``. """ diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index a8f4a7b64a61..69c55257e046 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -585,12 +585,18 @@ def test_export_entities_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].labels == {"key_value": "value_value"} - assert args[0].entity_filter == datastore_admin.EntityFilter( - kinds=["kinds_value"] - ) - assert args[0].output_url_prefix == "output_url_prefix_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].labels + mock_val = {"key_value": "value_value"} + assert arg == mock_val + arg = args[0].entity_filter + mock_val = datastore_admin.EntityFilter(kinds=["kinds_value"]) + assert arg == mock_val + arg = args[0].output_url_prefix + mock_val = "output_url_prefix_value" + assert arg == mock_val def test_export_entities_flattened_error(): @@ -635,12 +641,18 @@ async def test_export_entities_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].labels == {"key_value": "value_value"} - assert args[0].entity_filter == datastore_admin.EntityFilter( - kinds=["kinds_value"] - ) - assert args[0].output_url_prefix == "output_url_prefix_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].labels + mock_val = {"key_value": "value_value"} + assert arg == mock_val + arg = args[0].entity_filter + mock_val = datastore_admin.EntityFilter(kinds=["kinds_value"]) + assert arg == mock_val + arg = args[0].output_url_prefix + mock_val = "output_url_prefix_value" + assert arg == mock_val @pytest.mark.asyncio @@ -760,12 +772,18 @@ def test_import_entities_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].labels == {"key_value": "value_value"} - assert args[0].input_url == "input_url_value" - assert args[0].entity_filter == datastore_admin.EntityFilter( - kinds=["kinds_value"] - ) + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].labels + mock_val = {"key_value": "value_value"} + assert arg == mock_val + arg = args[0].input_url + mock_val = "input_url_value" + assert arg == mock_val + arg = args[0].entity_filter + mock_val = datastore_admin.EntityFilter(kinds=["kinds_value"]) + assert arg == mock_val def test_import_entities_flattened_error(): @@ -810,12 +828,18 @@ async def test_import_entities_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].labels == {"key_value": "value_value"} - assert args[0].input_url == "input_url_value" - assert args[0].entity_filter == datastore_admin.EntityFilter( - kinds=["kinds_value"] - ) + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].labels + mock_val = {"key_value": "value_value"} + assert arg == mock_val + arg = args[0].input_url + mock_val = "input_url_value" + assert arg == mock_val + arg = args[0].entity_filter + mock_val = datastore_admin.EntityFilter(kinds=["kinds_value"]) + assert arg == mock_val @pytest.mark.asyncio diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 04ced96f401d..b58b5ad5afe9 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -567,13 +567,19 @@ def test_lookup_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].read_options == datastore.ReadOptions( + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].read_options + mock_val = datastore.ReadOptions( read_consistency=datastore.ReadOptions.ReadConsistency.STRONG ) - assert args[0].keys == [ + assert arg == mock_val + arg = args[0].keys + mock_val = [ entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) ] + assert arg == mock_val def test_lookup_flattened_error(): @@ -626,13 +632,19 @@ async def test_lookup_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].read_options == datastore.ReadOptions( + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].read_options + mock_val = datastore.ReadOptions( read_consistency=datastore.ReadOptions.ReadConsistency.STRONG ) - assert args[0].keys == [ + assert arg == mock_val + arg = args[0].keys + mock_val = [ entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) ] + assert arg == mock_val @pytest.mark.asyncio @@ -839,7 +851,9 @@ def test_begin_transaction_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val def test_begin_transaction_flattened_error(): @@ -875,7 +889,9 @@ async def test_begin_transaction_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val @pytest.mark.asyncio @@ -999,9 +1015,14 @@ def test_commit_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].mode == datastore.CommitRequest.Mode.TRANSACTIONAL - assert args[0].mutations == [ + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].mode + mock_val = datastore.CommitRequest.Mode.TRANSACTIONAL + assert arg == mock_val + arg = args[0].mutations + mock_val = [ datastore.Mutation( insert=entity.Entity( key=entity.Key( @@ -1010,6 +1031,7 @@ def test_commit_flattened(): ) ) ] + assert arg == mock_val assert args[0].transaction == b"transaction_blob" @@ -1073,9 +1095,14 @@ async def test_commit_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].mode == datastore.CommitRequest.Mode.TRANSACTIONAL - assert args[0].mutations == [ + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].mode + mock_val = datastore.CommitRequest.Mode.TRANSACTIONAL + assert arg == mock_val + arg = args[0].mutations + mock_val = [ datastore.Mutation( insert=entity.Entity( key=entity.Key( @@ -1084,6 +1111,7 @@ async def test_commit_flattened_async(): ) ) ] + assert arg == mock_val assert args[0].transaction == b"transaction_blob" @@ -1207,8 +1235,12 @@ def test_rollback_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].transaction == b"transaction_blob" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].transaction + mock_val = b"transaction_blob" + assert arg == mock_val def test_rollback_flattened_error(): @@ -1246,8 +1278,12 @@ async def test_rollback_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].transaction == b"transaction_blob" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].transaction + mock_val = b"transaction_blob" + assert arg == mock_val @pytest.mark.asyncio @@ -1365,10 +1401,14 @@ def test_allocate_ids_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].keys == [ + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].keys + mock_val = [ entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) ] + assert arg == mock_val def test_allocate_ids_flattened_error(): @@ -1415,10 +1455,14 @@ async def test_allocate_ids_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].keys == [ + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].keys + mock_val = [ entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) ] + assert arg == mock_val @pytest.mark.asyncio @@ -1538,10 +1582,14 @@ def test_reserve_ids_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].keys == [ + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].keys + mock_val = [ entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) ] + assert arg == mock_val def test_reserve_ids_flattened_error(): @@ -1588,10 +1636,14 @@ async def test_reserve_ids_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].keys == [ + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].keys + mock_val = [ entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) ] + assert arg == mock_val @pytest.mark.asyncio From 79376fc3e0a3ae011dd23d919d7daa275ae4323a Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 12 Nov 2021 12:45:42 -0500 Subject: [PATCH 382/611] chore: add default_version and codeowner_team to .repo-metadata.json (#254) --- .../.repo-metadata.json | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/packages/google-cloud-datastore/.repo-metadata.json b/packages/google-cloud-datastore/.repo-metadata.json index 5f4bae89f2a8..bfeff1a10f29 100644 --- a/packages/google-cloud-datastore/.repo-metadata.json +++ b/packages/google-cloud-datastore/.repo-metadata.json @@ -1,13 +1,15 @@ { - "name": "datastore", - "name_pretty": "Google Cloud Datastore", - "product_documentation": "https://cloud.google.com/datastore", - "client_documentation": "https://googleapis.dev/python/datastore/latest", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559768", - "release_level": "ga", - "language": "python", - "library_type": "GAPIC_COMBO", - "repo": "googleapis/python-datastore", - "distribution_name": "google-cloud-datastore", - "api_id": "datastore.googleapis.com" -} \ No newline at end of file + "name": "datastore", + "name_pretty": "Google Cloud Datastore", + "product_documentation": "https://cloud.google.com/datastore", + "client_documentation": "https://googleapis.dev/python/datastore/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559768", + "release_level": "ga", + "language": "python", + "library_type": "GAPIC_COMBO", + "repo": "googleapis/python-datastore", + "distribution_name": "google-cloud-datastore", + "api_id": "datastore.googleapis.com", + "default_version": "v1", + "codeowner_team": "@googleapis/firestore-dpe" +} From ddffbc2cc0f01f034f2291c6aaccba00702004d1 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 12 Nov 2021 19:15:54 -0500 Subject: [PATCH 383/611] chore(python): add .github/CODEOWNERS as a templated file (#253) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): add .github/CODEOWNERS as a templated file Source-Link: https://github.com/googleapis/synthtool/commit/c5026b3217973a8db55db8ee85feee0e9a65e295 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/.github/CODEOWNERS | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 108063d4dee4..7519fa3a2289 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4ee57a76a176ede9087c14330c625a71553cf9c72828b2c0ca12f5338171ba60 + digest: sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 diff --git a/packages/google-cloud-datastore/.github/CODEOWNERS b/packages/google-cloud-datastore/.github/CODEOWNERS index 6da5f48a6fbf..c5d76c65c532 100644 --- a/packages/google-cloud-datastore/.github/CODEOWNERS +++ b/packages/google-cloud-datastore/.github/CODEOWNERS @@ -3,8 +3,10 @@ # # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax +# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. +# @googleapis/yoshi-python @googleapis/firestore-dpe are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/firestore-dpe -# The firestore-dpe team is the default owner for anything not -# explicitly taken by someone else. -* @googleapis/firestore-dpe @googleapis/yoshi-python +# @googleapis/python-samples-owners @googleapis/firestore-dpe are the default owners for samples changes +/samples/ @googleapis/python-samples-owners @googleapis/firestore-dpe From f679c7bfd34973485535c9755b52849f41e122f8 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Sat, 13 Nov 2021 07:46:12 -0500 Subject: [PATCH 384/611] ci: run 'mypy' against 'google/', 'tests/' (#251) Closes #240. --- .../google-cloud-datastore/google/__init__.py | 2 +- .../google/cloud/__init__.py | 2 +- .../services/datastore_admin/async_client.py | 8 +++---- .../services/datastore_admin/client.py | 8 +++---- .../types/datastore_admin.py | 6 +++--- packages/google-cloud-datastore/noxfile.py | 5 ++--- packages/google-cloud-datastore/owlbot.py | 21 +++++++++++++++---- .../tests/unit/test_batch.py | 5 ++++- .../tests/unit/test_client.py | 5 ++++- 9 files changed, 40 insertions(+), 22 deletions(-) diff --git a/packages/google-cloud-datastore/google/__init__.py b/packages/google-cloud-datastore/google/__init__.py index 0e1bc5131ba6..4755e2b063bc 100644 --- a/packages/google-cloud-datastore/google/__init__.py +++ b/packages/google-cloud-datastore/google/__init__.py @@ -19,4 +19,4 @@ except ImportError: import pkgutil - __path__ = pkgutil.extend_path(__path__, __name__) + __path__ = pkgutil.extend_path(__path__, __name__) # type: ignore diff --git a/packages/google-cloud-datastore/google/cloud/__init__.py b/packages/google-cloud-datastore/google/cloud/__init__.py index 0e1bc5131ba6..4755e2b063bc 100644 --- a/packages/google-cloud-datastore/google/cloud/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/__init__.py @@ -19,4 +19,4 @@ except ImportError: import pkgutil - __path__ = pkgutil.extend_path(__path__, __name__) + __path__ = pkgutil.extend_path(__path__, __name__) # type: ignore diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 1d9c8fc92e67..75d9ce2f31c9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -231,7 +231,7 @@ async def export_entities( request: Union[datastore_admin.ExportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Sequence[datastore_admin.ExportEntitiesRequest.LabelsEntry] = None, + labels: Dict[str, str] = None, entity_filter: datastore_admin.EntityFilter = None, output_url_prefix: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -260,7 +260,7 @@ async def export_entities( This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Sequence[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest.LabelsEntry]`): + labels (:class:`Dict[str, str]`): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -368,7 +368,7 @@ async def import_entities( request: Union[datastore_admin.ImportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Sequence[datastore_admin.ImportEntitiesRequest.LabelsEntry] = None, + labels: Dict[str, str] = None, input_url: str = None, entity_filter: datastore_admin.EntityFilter = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -394,7 +394,7 @@ async def import_entities( This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Sequence[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest.LabelsEntry]`): + labels (:class:`Dict[str, str]`): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index cedc242816f3..18c8ed7bd83c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -406,7 +406,7 @@ def export_entities( request: Union[datastore_admin.ExportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Sequence[datastore_admin.ExportEntitiesRequest.LabelsEntry] = None, + labels: Dict[str, str] = None, entity_filter: datastore_admin.EntityFilter = None, output_url_prefix: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -435,7 +435,7 @@ def export_entities( This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (Sequence[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest.LabelsEntry]): + labels (Dict[str, str]): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -542,7 +542,7 @@ def import_entities( request: Union[datastore_admin.ImportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Sequence[datastore_admin.ImportEntitiesRequest.LabelsEntry] = None, + labels: Dict[str, str] = None, input_url: str = None, entity_filter: datastore_admin.EntityFilter = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -568,7 +568,7 @@ def import_entities( This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (Sequence[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest.LabelsEntry]): + labels (Dict[str, str]): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py index 0f4546fdb5b1..77af489199f4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -62,7 +62,7 @@ class CommonMetadata(proto.Message): operation_type (google.cloud.datastore_admin_v1.types.OperationType): The type of the operation. Can be used as a filter in ListOperationsRequest. - labels (Sequence[google.cloud.datastore_admin_v1.types.CommonMetadata.LabelsEntry]): + labels (Dict[str, str]): The client-assigned labels which were provided when the operation was created. May also include additional labels. @@ -113,7 +113,7 @@ class ExportEntitiesRequest(proto.Message): project_id (str): Required. Project ID against which to make the request. - labels (Sequence[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest.LabelsEntry]): + labels (Dict[str, str]): Client-assigned labels. entity_filter (google.cloud.datastore_admin_v1.types.EntityFilter): Description of what data from the project is @@ -157,7 +157,7 @@ class ImportEntitiesRequest(proto.Message): project_id (str): Required. Project ID against which to make the request. - labels (Sequence[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest.LabelsEntry]): + labels (Dict[str, str]): Client-assigned labels. input_url (str): Required. The full resource URL of the external storage diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 2510a58b77cc..44bb4fc78b51 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -77,9 +77,8 @@ def blacken(session): def mypy(session): """Verify type hints are mypy compatible.""" session.install("-e", ".") - session.install("mypy") - # TODO: also verify types on tests, all of google package - session.run("mypy", "-p", "google.cloud.datastore", "--no-incremental") + session.install("mypy", "types-setuptools", "types-mock", "types-requests") + session.run("mypy", "google/", "tests/") @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index e5b43cca8ffe..b3116698bc13 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -22,6 +22,7 @@ common = gcp.CommonTemplates() + # This is a customized version of the s.get_staging_dirs() function from synthtool to # cater for copying 2 different folders from googleapis-gen # which are datastore and datastore/admin @@ -60,6 +61,7 @@ def get_staging_dirs( else: return [] + # This library ships clients for two different APIs, # Datastore and Datastore Admin datastore_default_version = "v1" @@ -197,6 +199,17 @@ def docfx(session): """, ) +# Work around: https://github.com/googleapis/gapic-generator-python/issues/689 +s.replace( + [ + "google/**/datastore_admin/async_client.py", + "google/**/datastore_admin/client.py", + "google/**/types/datastore_admin.py", + ], + r"Sequence\[.*\.LabelsEntry\]", + r"Dict[str, str]", +) + # Add documentation about creating indexes and populating data for system # tests. assert 1 == s.replace( @@ -240,7 +253,8 @@ def docfx(session): ) # add type checker nox session -s.replace("noxfile.py", +s.replace( + "noxfile.py", """nox.options.sessions = \[ "unit", "system",""", @@ -262,9 +276,8 @@ def lint_setup_py\(session\): def mypy(session): """Verify type hints are mypy compatible.""" session.install("-e", ".") - session.install("mypy") - # TODO: also verify types on tests, all of google package - session.run("mypy", "-p", "google.cloud.datastore", "--no-incremental") + session.install("mypy", "types-setuptools", "types-mock", "types-requests") + session.run("mypy", "google/", "tests/") @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/packages/google-cloud-datastore/tests/unit/test_batch.py b/packages/google-cloud-datastore/tests/unit/test_batch.py index fffbefa2b2d9..0e45ed97906d 100644 --- a/packages/google-cloud-datastore/tests/unit/test_batch.py +++ b/packages/google-cloud-datastore/tests/unit/test_batch.py @@ -12,6 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Dict +from typing import Any + import mock import pytest @@ -498,7 +501,7 @@ def test__parse_commit_response(): class _Entity(dict): key = None exclude_from_indexes = () - _meanings = {} + _meanings: Dict[str, Any] = {} class _Key(object): diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 7f38a5ad61e7..da253deb6517 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -12,6 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Dict +from typing import Any + import mock import pytest @@ -1522,7 +1525,7 @@ def __exit__(self, *args): class _Entity(dict): key = None exclude_from_indexes = () - _meanings = {} + _meanings: Dict[str, Any] = {} class _Key(object): From 1e8b669056928635460613f67a51625468379ae5 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 15 Nov 2021 12:44:19 -0500 Subject: [PATCH 385/611] chore: strip unnecessary 'type: ignore' comments (#255) --- .../google/cloud/datastore/_http.py | 2 +- .../google/cloud/datastore/client.py | 6 +++--- .../services/datastore_admin/async_client.py | 6 +++--- .../datastore_admin_v1/services/datastore_admin/client.py | 6 +++--- .../services/datastore_admin/transports/base.py | 2 +- .../services/datastore_admin/transports/grpc.py | 4 ++-- .../services/datastore_admin/transports/grpc_asyncio.py | 6 +++--- .../cloud/datastore_admin_v1/types/datastore_admin.py | 4 ++-- .../google/cloud/datastore_admin_v1/types/index.py | 2 +- .../datastore_v1/services/datastore/transports/grpc.py | 2 +- .../services/datastore/transports/grpc_asyncio.py | 4 ++-- .../google/cloud/datastore_v1/types/datastore.py | 2 +- .../google/cloud/datastore_v1/types/entity.py | 8 ++++---- .../google/cloud/datastore_v1/types/query.py | 4 ++-- packages/google-cloud-datastore/mypy.ini | 4 ++-- packages/google-cloud-datastore/noxfile.py | 4 +++- packages/google-cloud-datastore/owlbot.py | 4 +++- .../unit/gapic/datastore_admin_v1/test_datastore_admin.py | 2 +- .../tests/unit/gapic/datastore_v1/test_datastore.py | 8 ++++---- 19 files changed, 42 insertions(+), 38 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 9ea5aac88294..f92c76f05ef4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -17,7 +17,7 @@ from google.rpc import status_pb2 # type: ignore from google.cloud import _http as connection_module -from google.cloud import exceptions # type: ignore +from google.cloud import exceptions from google.cloud.datastore_v1.types import datastore as _datastore_pb2 diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 207759cc2bf6..03829ce0550a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -18,9 +18,9 @@ import google.api_core.client_options from google.auth.credentials import AnonymousCredentials # type: ignore -from google.cloud._helpers import _LocalStack # type: ignore -from google.cloud._helpers import _determine_default_project as _base_default_project # type: ignore -from google.cloud.client import ClientWithProject # type: ignore +from google.cloud._helpers import _LocalStack +from google.cloud._helpers import _determine_default_project as _base_default_project +from google.cloud.client import ClientWithProject from google.cloud.datastore.version import __version__ from google.cloud.datastore import helpers from google.cloud.datastore._http import HTTPDatastoreAPI diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 75d9ce2f31c9..c64a328cdf29 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -31,12 +31,12 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore +from google.api_core import operation +from google.api_core import operation_async from google.cloud.datastore_admin_v1.services.datastore_admin import pagers from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import empty_pb2 from .transports.base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DatastoreAdminGrpcAsyncIOTransport from .client import DatastoreAdminClient diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index 18c8ed7bd83c..695446c58105 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -34,12 +34,12 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore +from google.api_core import operation +from google.api_core import operation_async from google.cloud.datastore_admin_v1.services.datastore_admin import pagers from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import empty_pb2 from .transports.base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DatastoreAdminGrpcTransport from .transports.grpc_asyncio import DatastoreAdminGrpcAsyncIOTransport diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index 1b3c3d325c0c..58358b0abf23 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -28,7 +28,7 @@ from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index 3d08e9db0fcc..d7018fcbfd3d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -23,11 +23,11 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +import grpc from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index 3f9e53305144..84b5299b372e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -22,12 +22,12 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore -from grpc.experimental import aio # type: ignore +import grpc +from grpc.experimental import aio from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO from .grpc import DatastoreAdminGrpcTransport diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py index 77af489199f4..0d42d80234d6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore +import proto from google.cloud.datastore_admin_v1.types import index as gda_index -from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import timestamp_pb2 __protobuf__ = proto.module( diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py index b372cccf6c73..7a57c945757f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore +import proto __protobuf__ = proto.module(package="google.datastore.admin.v1", manifest={"Index",},) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py index 55c93eb89b96..79071c807a34 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -22,7 +22,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +import grpc from google.cloud.datastore_v1.types import datastore from .base import DatastoreTransport, DEFAULT_CLIENT_INFO diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index 2e71d2977c0b..7543acee84c9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -21,8 +21,8 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore -from grpc.experimental import aio # type: ignore +import grpc +from grpc.experimental import aio from google.cloud.datastore_v1.types import datastore from .base import DatastoreTransport, DEFAULT_CLIENT_INFO diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index f3f87f6ca2a5..efd696aae27f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore +import proto from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query as gd_query diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index 828d43e52c9c..8bf66889482b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -13,11 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore +import proto -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import latlng_pb2 # type: ignore +from google.protobuf import struct_pb2 +from google.protobuf import timestamp_pb2 +from google.type import latlng_pb2 __protobuf__ = proto.module( diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index b42e7191ad48..6ae312575d23 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore +import proto from google.cloud.datastore_v1.types import entity as gd_entity -from google.protobuf import wrappers_pb2 # type: ignore +from google.protobuf import wrappers_pb2 __protobuf__ = proto.module( diff --git a/packages/google-cloud-datastore/mypy.ini b/packages/google-cloud-datastore/mypy.ini index 5663b40df934..17f8a8f6afbb 100644 --- a/packages/google-cloud-datastore/mypy.ini +++ b/packages/google-cloud-datastore/mypy.ini @@ -3,5 +3,5 @@ python_version = 3.6 namespace_packages = True ignore_missing_imports = True -[mypy-google.protobuf] -ignore_missing_imports = True +[mypy-google.cloud.datastore._app_engine_key_pb2] +ignore_errors = True diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 44bb4fc78b51..c8cc807033a1 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -77,7 +77,9 @@ def blacken(session): def mypy(session): """Verify type hints are mypy compatible.""" session.install("-e", ".") - session.install("mypy", "types-setuptools", "types-mock", "types-requests") + session.install( + "mypy", "types-setuptools", "types-mock", "types-protobuf", "types-requests" + ) session.run("mypy", "google/", "tests/") diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index b3116698bc13..fbf8c1312364 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -276,7 +276,9 @@ def lint_setup_py\(session\): def mypy(session): """Verify type hints are mypy compatible.""" session.install("-e", ".") - session.install("mypy", "types-setuptools", "types-mock", "types-requests") + session.install( + "mypy", "types-setuptools", "types-mock", "types-protobuf", "types-requests" + ) session.run("mypy", "google/", "tests/") diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index 69c55257e046..293b111142ce 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -29,7 +29,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.api_core import operation_async # type: ignore +from google.api_core import operation_async from google.api_core import operations_v1 from google.api_core import path_template from google.auth import credentials as ga_credentials diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index b58b5ad5afe9..acfffd844c02 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -38,10 +38,10 @@ from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query from google.oauth2 import service_account -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from google.type import latlng_pb2 # type: ignore +from google.protobuf import struct_pb2 +from google.protobuf import timestamp_pb2 +from google.protobuf import wrappers_pb2 +from google.type import latlng_pb2 import google.auth From 696adaded725bde0bb0fe62e8c659ac8f8ecd6db Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Thu, 18 Nov 2021 13:06:50 -0500 Subject: [PATCH 386/611] chore: update doc links from googleapis.dev to cloud.google.com (#256) --- packages/google-cloud-datastore/.repo-metadata.json | 2 +- packages/google-cloud-datastore/README.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/.repo-metadata.json b/packages/google-cloud-datastore/.repo-metadata.json index bfeff1a10f29..bff823e5fd3b 100644 --- a/packages/google-cloud-datastore/.repo-metadata.json +++ b/packages/google-cloud-datastore/.repo-metadata.json @@ -2,7 +2,7 @@ "name": "datastore", "name_pretty": "Google Cloud Datastore", "product_documentation": "https://cloud.google.com/datastore", - "client_documentation": "https://googleapis.dev/python/datastore/latest", + "client_documentation": "https://cloud.google.com/python/docs/reference/datastore/latest", "issue_tracker": "https://issuetracker.google.com/savedsearches/559768", "release_level": "ga", "language": "python", diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index b142bc22ffa8..2315c84a893f 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -20,7 +20,7 @@ all other queries. :target: https://pypi.org/project/google-cloud-datastore/ .. _Google Cloud Datastore API: https://cloud.google.com/datastore/docs .. _Product Documentation: https://cloud.google.com/datastore/docs -.. _Client Library Documentation: https://googleapis.dev/python/datastore/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/datastore/latest Quick Start ----------- From ff199e5af6d13cf4238058452043bacd71f145f3 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 28 Dec 2021 13:20:28 -0500 Subject: [PATCH 387/611] chore: update .repo-metadata.json (#259) --- packages/google-cloud-datastore/.repo-metadata.json | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/.repo-metadata.json b/packages/google-cloud-datastore/.repo-metadata.json index bff823e5fd3b..a5bf20b2c694 100644 --- a/packages/google-cloud-datastore/.repo-metadata.json +++ b/packages/google-cloud-datastore/.repo-metadata.json @@ -4,12 +4,13 @@ "product_documentation": "https://cloud.google.com/datastore", "client_documentation": "https://cloud.google.com/python/docs/reference/datastore/latest", "issue_tracker": "https://issuetracker.google.com/savedsearches/559768", - "release_level": "ga", + "release_level": "stable", "language": "python", "library_type": "GAPIC_COMBO", "repo": "googleapis/python-datastore", "distribution_name": "google-cloud-datastore", "api_id": "datastore.googleapis.com", "default_version": "v1", - "codeowner_team": "@googleapis/firestore-dpe" + "codeowner_team": "@googleapis/firestore-dpe", + "api_shortname": "datastore" } From 4d6caf995713848fb556d87c170042b186c55f07 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 6 Jan 2022 16:00:30 +0000 Subject: [PATCH 388/611] chore: use python-samples-reviewers (#262) --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/.github/CODEOWNERS | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 7519fa3a2289..f33299ddbbab 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 + digest: sha256:899d5d7cc340fa8ef9d8ae1a8cfba362c6898584f779e156f25ee828ba824610 diff --git a/packages/google-cloud-datastore/.github/CODEOWNERS b/packages/google-cloud-datastore/.github/CODEOWNERS index c5d76c65c532..6116b8379e24 100644 --- a/packages/google-cloud-datastore/.github/CODEOWNERS +++ b/packages/google-cloud-datastore/.github/CODEOWNERS @@ -8,5 +8,5 @@ # @googleapis/yoshi-python @googleapis/firestore-dpe are the default owners for changes in this repo * @googleapis/yoshi-python @googleapis/firestore-dpe -# @googleapis/python-samples-owners @googleapis/firestore-dpe are the default owners for samples changes -/samples/ @googleapis/python-samples-owners @googleapis/firestore-dpe +# @googleapis/python-samples-reviewers @googleapis/firestore-dpe are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/firestore-dpe From 8063c335da84913b16363aaf1efefc159e70df21 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 13 Jan 2022 15:46:20 +0000 Subject: [PATCH 389/611] build: switch to release-please for tagging (#263) --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/.github/release-please.yml | 1 + packages/google-cloud-datastore/.github/release-trigger.yml | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-datastore/.github/release-trigger.yml diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index f33299ddbbab..ff5126c188d0 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:899d5d7cc340fa8ef9d8ae1a8cfba362c6898584f779e156f25ee828ba824610 + digest: sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 diff --git a/packages/google-cloud-datastore/.github/release-please.yml b/packages/google-cloud-datastore/.github/release-please.yml index 4507ad0598a5..466597e5b196 100644 --- a/packages/google-cloud-datastore/.github/release-please.yml +++ b/packages/google-cloud-datastore/.github/release-please.yml @@ -1 +1,2 @@ releaseType: python +handleGHRelease: true diff --git a/packages/google-cloud-datastore/.github/release-trigger.yml b/packages/google-cloud-datastore/.github/release-trigger.yml new file mode 100644 index 000000000000..d4ca94189e16 --- /dev/null +++ b/packages/google-cloud-datastore/.github/release-trigger.yml @@ -0,0 +1 @@ +enabled: true From 276601eee92228de8ddad040b8c11b37d7d892e3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 14 Jan 2022 12:04:39 -0500 Subject: [PATCH 390/611] chore(python): update release.sh to use keystore (#264) Source-Link: https://github.com/googleapis/synthtool/commit/69fda12e2994f0b595a397e8bb6e3e9f380524eb Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 Co-authored-by: Owl Bot --- .../google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/.kokoro/release.sh | 2 +- .../.kokoro/release/common.cfg | 12 +++++++++++- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index ff5126c188d0..eecb84c21b27 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 + digest: sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 diff --git a/packages/google-cloud-datastore/.kokoro/release.sh b/packages/google-cloud-datastore/.kokoro/release.sh index c593dd9dabfd..fc6f080213d6 100755 --- a/packages/google-cloud-datastore/.kokoro/release.sh +++ b/packages/google-cloud-datastore/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") cd github/python-datastore python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/google-cloud-datastore/.kokoro/release/common.cfg b/packages/google-cloud-datastore/.kokoro/release/common.cfg index 8571f251b63b..6f858df92560 100644 --- a/packages/google-cloud-datastore/.kokoro/release/common.cfg +++ b/packages/google-cloud-datastore/.kokoro/release/common.cfg @@ -23,8 +23,18 @@ env_vars: { value: "github/python-datastore/.kokoro/release.sh" } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google-cloud-pypi-token-keystore-1" + } + } +} + # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } From 5e2c52a0b68f61821fff76e8fa0527a12d38a497 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 21 Jan 2022 05:38:19 -0500 Subject: [PATCH 391/611] ci(python): run lint / unit tests / docs / mypy as GH actions (#265) * ci(python): run lint / unit tests / docs as GH actions Source-Link: https://github.com/googleapis/synthtool/commit/57be0cdb0b94e1669cee0ca38d790de1dfdbcd44 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 * add commit to trigger gh actions * add gh action checks for mypy and doctests * set python version for mypy and doctests * remove doctests GH action as credentials are needed Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 16 +++++- .../.github/workflows/docs.yml | 38 +++++++++++++ .../.github/workflows/lint.yml | 25 ++++++++ .../.github/workflows/mypy.yml | 22 +++++++ .../.github/workflows/unittest.yml | 57 +++++++++++++++++++ 5 files changed, 157 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-datastore/.github/workflows/docs.yml create mode 100644 packages/google-cloud-datastore/.github/workflows/lint.yml create mode 100644 packages/google-cloud-datastore/.github/workflows/mypy.yml create mode 100644 packages/google-cloud-datastore/.github/workflows/unittest.yml diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index eecb84c21b27..b668c04d5d65 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,3 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 + digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 + diff --git a/packages/google-cloud-datastore/.github/workflows/docs.yml b/packages/google-cloud-datastore/.github/workflows/docs.yml new file mode 100644 index 000000000000..f7b8344c4500 --- /dev/null +++ b/packages/google-cloud-datastore/.github/workflows/docs.yml @@ -0,0 +1,38 @@ +on: + pull_request: + branches: + - main +name: docs +jobs: + docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docs + run: | + nox -s docs + docfx: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docfx + run: | + nox -s docfx diff --git a/packages/google-cloud-datastore/.github/workflows/lint.yml b/packages/google-cloud-datastore/.github/workflows/lint.yml new file mode 100644 index 000000000000..1e8b05c3d7ff --- /dev/null +++ b/packages/google-cloud-datastore/.github/workflows/lint.yml @@ -0,0 +1,25 @@ +on: + pull_request: + branches: + - main +name: lint +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run lint + run: | + nox -s lint + - name: Run lint_setup_py + run: | + nox -s lint_setup_py diff --git a/packages/google-cloud-datastore/.github/workflows/mypy.yml b/packages/google-cloud-datastore/.github/workflows/mypy.yml new file mode 100644 index 000000000000..5a0f0e090d69 --- /dev/null +++ b/packages/google-cloud-datastore/.github/workflows/mypy.yml @@ -0,0 +1,22 @@ +on: + pull_request: + branches: + - main +name: mypy +jobs: + mypy: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.8" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run mypy + run: | + nox -s mypy diff --git a/packages/google-cloud-datastore/.github/workflows/unittest.yml b/packages/google-cloud-datastore/.github/workflows/unittest.yml new file mode 100644 index 000000000000..074ee2504ca5 --- /dev/null +++ b/packages/google-cloud-datastore/.github/workflows/unittest.yml @@ -0,0 +1,57 @@ +on: + pull_request: + branches: + - main +name: unittest +jobs: + unit: + runs-on: ubuntu-latest + strategy: + matrix: + python: ['3.6', '3.7', '3.8', '3.9', '3.10'] + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run unit tests + env: + COVERAGE_FILE: .coverage-${{ matrix.python }} + run: | + nox -s unit-${{ matrix.python }} + - name: Upload coverage results + uses: actions/upload-artifact@v2 + with: + name: coverage-artifacts + path: .coverage-${{ matrix.python }} + + cover: + runs-on: ubuntu-latest + needs: + - unit + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install coverage + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install coverage + - name: Download coverage results + uses: actions/download-artifact@v2 + with: + name: coverage-artifacts + path: .coverage-results/ + - name: Report coverage results + run: | + coverage combine .coverage-results/.coverage* + coverage report --show-missing --fail-under=100 From c08e191afb2fdf7ede0e40fbb951a1738ef54389 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 15 Feb 2022 09:04:48 -0700 Subject: [PATCH 392/611] fix(deps): move libcst to extras (#271) * fix(deps): move libcst to extras * Update setup.py --- packages/google-cloud-datastore/UPGRADING.md | 4 ++-- packages/google-cloud-datastore/setup.py | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/UPGRADING.md b/packages/google-cloud-datastore/UPGRADING.md index 517c39adb8f9..8d8ec3bc52fd 100644 --- a/packages/google-cloud-datastore/UPGRADING.md +++ b/packages/google-cloud-datastore/UPGRADING.md @@ -20,10 +20,10 @@ If you previously were using modules or functions under the namespace To assist with this, we have included some helpful scripts to make some of the code modifications required to use 2.0.0. -* Install the library +* Install the library with `libcst`. ```py -python3 -m pip install google-cloud-datastore +python3 -m pip install google-cloud-datastore[libcst] ``` * The scripts `fixup_datastore_v1_keywords.py` and `fixup_datastore_admin_v1_keywords.py` diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 286653d5d3fa..6d24d2ee9a7c 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -38,9 +38,8 @@ # https://github.com/googleapis/google-cloud-python/issues/10566 "google-cloud-core >= 1.4.0, <3.0.0dev", "proto-plus >= 1.4.0", - "libcst >= 0.2.5", ] -extras = {} +extras = {"libcst": "libcst >= 0.2.5"} # Setup boilerplate below this line. From 3a95e974c69696aeac3e36c9f6a144ba3f41aa5b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 26 Feb 2022 19:02:37 +0000 Subject: [PATCH 393/611] feat: define Datastore -> Firestore in Datastore mode migration long running operation metadata (#270) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 431037888 Source-Link: https://github.com/googleapis/googleapis/commit/b3397f5febbf21dfc69b875ddabaf76bee765058 Source-Link: https://github.com/googleapis/googleapis-gen/commit/510b54e1cdefd53173984df16645081308fe897e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTEwYjU0ZTFjZGVmZDUzMTczOTg0ZGYxNjY0NTA4MTMwOGZlODk3ZSJ9 chore: use gapic-generator-python 0.63.4 chore: fix snippet region tag format chore: fix docstring code block formatting chore: update copyright year to 2022 feat: add api key support fix: resolve DuplicateCredentialArgs error when using credentials_file PiperOrigin-RevId: 430730865 Source-Link: https://github.com/googleapis/googleapis/commit/ea5800229f73f94fd7204915a86ed09dcddf429a Source-Link: https://github.com/googleapis/googleapis-gen/commit/ca893ff8af25fc7fe001de1405a517d80446ecca Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2E4OTNmZjhhZjI1ZmM3ZmUwMDFkZTE0MDVhNTE3ZDgwNDQ2ZWNjYSJ9 chore: use gapic-generator-python 0.63.2 docs: add generated snippets PiperOrigin-RevId: 427792504 Source-Link: https://github.com/googleapis/googleapis/commit/55b9e1e0b3106c850d13958352bc0751147b6b15 Source-Link: https://github.com/googleapis/googleapis-gen/commit/bf4e86b753f42cb0edb1fd51fbe840d7da0a1cde Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYmY0ZTg2Yjc1M2Y0MmNiMGVkYjFmZDUxZmJlODQwZDdkYTBhMWNkZSJ9 --- .../cloud/datastore_admin_v1/__init__.py | 12 +- .../datastore_admin_v1/services/__init__.py | 2 +- .../services/datastore_admin/__init__.py | 2 +- .../services/datastore_admin/async_client.py | 184 ++++++++++- .../services/datastore_admin/client.py | 273 +++++++++++++--- .../services/datastore_admin/pagers.py | 2 +- .../datastore_admin/transports/__init__.py | 2 +- .../datastore_admin/transports/base.py | 5 +- .../datastore_admin/transports/grpc.py | 13 +- .../transports/grpc_asyncio.py | 15 +- .../datastore_admin_v1/types/__init__.py | 14 +- .../types/datastore_admin.py | 31 +- .../cloud/datastore_admin_v1/types/index.py | 4 +- .../datastore_admin_v1/types/migration.py | 135 ++++++++ .../google/cloud/datastore_v1/__init__.py | 2 +- .../cloud/datastore_v1/services/__init__.py | 2 +- .../services/datastore/__init__.py | 2 +- .../services/datastore/async_client.py | 194 +++++++++++- .../datastore_v1/services/datastore/client.py | 283 +++++++++++++---- .../services/datastore/transports/__init__.py | 2 +- .../services/datastore/transports/base.py | 3 +- .../services/datastore/transports/grpc.py | 13 +- .../datastore/transports/grpc_asyncio.py | 15 +- .../cloud/datastore_v1/types/__init__.py | 2 +- .../cloud/datastore_v1/types/datastore.py | 12 +- .../google/cloud/datastore_v1/types/entity.py | 10 +- .../google/cloud/datastore_v1/types/query.py | 6 +- .../fixup_datastore_admin_v1_keywords.py | 2 +- .../scripts/fixup_datastore_v1_keywords.py | 2 +- .../google-cloud-datastore/tests/__init__.py | 2 +- .../tests/unit/__init__.py | 2 +- .../tests/unit/gapic/__init__.py | 2 +- .../unit/gapic/datastore_admin_v1/__init__.py | 2 +- .../test_datastore_admin.py | 293 ++++++++++++++---- .../tests/unit/gapic/datastore_v1/__init__.py | 2 +- .../unit/gapic/datastore_v1/test_datastore.py | 273 ++++++++++++---- 36 files changed, 1527 insertions(+), 293 deletions(-) create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py index 70a79c07616e..4d0164cf72bd 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -19,6 +19,7 @@ from .types.datastore_admin import CommonMetadata from .types.datastore_admin import CreateIndexRequest +from .types.datastore_admin import DatastoreFirestoreMigrationMetadata from .types.datastore_admin import DeleteIndexRequest from .types.datastore_admin import EntityFilter from .types.datastore_admin import ExportEntitiesMetadata @@ -33,12 +34,17 @@ from .types.datastore_admin import Progress from .types.datastore_admin import OperationType from .types.index import Index +from .types.migration import MigrationProgressEvent +from .types.migration import MigrationStateEvent +from .types.migration import MigrationState +from .types.migration import MigrationStep __all__ = ( "DatastoreAdminAsyncClient", "CommonMetadata", "CreateIndexRequest", "DatastoreAdminClient", + "DatastoreFirestoreMigrationMetadata", "DeleteIndexRequest", "EntityFilter", "ExportEntitiesMetadata", @@ -51,6 +57,10 @@ "IndexOperationMetadata", "ListIndexesRequest", "ListIndexesResponse", + "MigrationProgressEvent", + "MigrationState", + "MigrationStateEvent", + "MigrationStep", "OperationType", "Progress", ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py index 951a69a98da9..6e5bb3d16f6b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index c64a328cdf29..ebac62bd8569 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -31,12 +31,12 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.api_core import operation -from google.api_core import operation_async +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.datastore_admin_v1.services.datastore_admin import pagers from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.protobuf import empty_pb2 +from google.protobuf import empty_pb2 # type: ignore from .transports.base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DatastoreAdminGrpcAsyncIOTransport from .client import DatastoreAdminClient @@ -166,6 +166,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DatastoreAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> DatastoreAdminTransport: """Returns the transport used by the client instance. @@ -249,6 +285,31 @@ async def export_entities( before completion it may leave partial data behind in Google Cloud Storage. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_export_entities(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.ExportEntitiesRequest( + project_id="project_id_value", + output_url_prefix="output_url_prefix_value", + ) + + # Make the request + operation = client.export_entities(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest, dict]): The request object. The request for @@ -316,7 +377,7 @@ async def export_entities( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any( [project_id, labels, entity_filter, output_url_prefix] @@ -383,6 +444,31 @@ async def import_entities( is possible that a subset of the data has already been imported to Cloud Datastore. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_import_entities(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.ImportEntitiesRequest( + project_id="project_id_value", + input_url="input_url_value", + ) + + # Make the request + operation = client.import_entities(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest, dict]): The request object. The request for @@ -456,7 +542,7 @@ async def import_entities( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, labels, input_url, entity_filter]) if request is not None and has_flattened_params: @@ -525,6 +611,29 @@ async def create_index( Indexes with a single property cannot be created. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_create_index(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.CreateIndexRequest( + ) + + # Make the request + operation = client.create_index(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.CreateIndexRequest, dict]): The request object. The request for @@ -592,6 +701,29 @@ async def delete_index( [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex] again. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_delete_index(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.DeleteIndexRequest( + ) + + # Make the request + operation = client.delete_index(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.DeleteIndexRequest, dict]): The request object. The request for @@ -646,6 +778,24 @@ async def get_index( ) -> index.Index: r"""Gets an index. + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_get_index(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.GetIndexRequest( + ) + + # Make the request + response = client.get_index(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.GetIndexRequest, dict]): The request object. The request for @@ -700,6 +850,26 @@ async def list_indexes( the list of indexes and may occasionally return stale results. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_list_indexes(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.ListIndexesRequest( + ) + + # Make the request + page_result = client.list_indexes(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.ListIndexesRequest, dict]): The request object. The request for diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index 695446c58105..4f4f9211724f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -34,12 +34,12 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.api_core import operation -from google.api_core import operation_async +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.datastore_admin_v1.services.datastore_admin import pagers from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.protobuf import empty_pb2 +from google.protobuf import empty_pb2 # type: ignore from .transports.base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DatastoreAdminGrpcTransport from .transports.grpc_asyncio import DatastoreAdminGrpcAsyncIOTransport @@ -277,6 +277,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -327,57 +394,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, DatastoreAdminTransport): # transport is a DatastoreAdminTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -389,6 +421,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, @@ -424,6 +465,31 @@ def export_entities( before completion it may leave partial data behind in Google Cloud Storage. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_export_entities(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.ExportEntitiesRequest( + project_id="project_id_value", + output_url_prefix="output_url_prefix_value", + ) + + # Make the request + operation = client.export_entities(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest, dict]): The request object. The request for @@ -491,7 +557,7 @@ def export_entities( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any( [project_id, labels, entity_filter, output_url_prefix] @@ -557,6 +623,31 @@ def import_entities( is possible that a subset of the data has already been imported to Cloud Datastore. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_import_entities(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.ImportEntitiesRequest( + project_id="project_id_value", + input_url="input_url_value", + ) + + # Make the request + operation = client.import_entities(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest, dict]): The request object. The request for @@ -630,7 +721,7 @@ def import_entities( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, labels, input_url, entity_filter]) if request is not None and has_flattened_params: @@ -698,6 +789,29 @@ def create_index( Indexes with a single property cannot be created. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_create_index(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.CreateIndexRequest( + ) + + # Make the request + operation = client.create_index(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.CreateIndexRequest, dict]): The request object. The request for @@ -766,6 +880,29 @@ def delete_index( [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex] again. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_delete_index(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.DeleteIndexRequest( + ) + + # Make the request + operation = client.delete_index(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.DeleteIndexRequest, dict]): The request object. The request for @@ -821,6 +958,24 @@ def get_index( ) -> index.Index: r"""Gets an index. + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_get_index(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.GetIndexRequest( + ) + + # Make the request + response = client.get_index(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.GetIndexRequest, dict]): The request object. The request for @@ -866,6 +1021,26 @@ def list_indexes( the list of indexes and may occasionally return stale results. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_list_indexes(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.ListIndexesRequest( + ) + + # Make the request + page_result = client.list_indexes(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.ListIndexesRequest, dict]): The request object. The request for diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py index a2f148588ade..9a2d05ad7796 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py index 376bbfa1ecf5..7d7ea9d4ddfd 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index 58358b0abf23..1b47ae2b17c5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,7 +28,7 @@ from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -107,7 +107,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index d7018fcbfd3d..e27734f805b9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -23,11 +23,11 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc +import grpc # type: ignore from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO @@ -219,8 +219,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, @@ -293,7 +296,7 @@ def operations_client(self) -> operations_v1.OperationsClient: This property caches on the instance; repeated calls return the same client. """ - # Sanity check: Only create a new client if we do not already have one. + # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsClient(self.grpc_channel) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index 84b5299b372e..46f848877361 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -22,12 +22,12 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc -from grpc.experimental import aio +import grpc # type: ignore +from grpc.experimental import aio # type: ignore from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO from .grpc import DatastoreAdminGrpcTransport @@ -264,8 +264,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, @@ -295,7 +298,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: This property caches on the instance; repeated calls return the same client. """ - # Sanity check: Only create a new client if we do not already have one. + # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( self.grpc_channel diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py index ac4ff9056251..fbc4f65f995b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,6 +16,7 @@ from .datastore_admin import ( CommonMetadata, CreateIndexRequest, + DatastoreFirestoreMigrationMetadata, DeleteIndexRequest, EntityFilter, ExportEntitiesMetadata, @@ -31,10 +32,17 @@ OperationType, ) from .index import Index +from .migration import ( + MigrationProgressEvent, + MigrationStateEvent, + MigrationState, + MigrationStep, +) __all__ = ( "CommonMetadata", "CreateIndexRequest", + "DatastoreFirestoreMigrationMetadata", "DeleteIndexRequest", "EntityFilter", "ExportEntitiesMetadata", @@ -49,4 +57,8 @@ "Progress", "OperationType", "Index", + "MigrationProgressEvent", + "MigrationStateEvent", + "MigrationState", + "MigrationStep", ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py index 0d42d80234d6..4e5ad0dafa35 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto +import proto # type: ignore from google.cloud.datastore_admin_v1.types import index as gda_index -from google.protobuf import timestamp_pb2 +from google.cloud.datastore_admin_v1.types import migration +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -37,6 +38,7 @@ "ListIndexesRequest", "ListIndexesResponse", "IndexOperationMetadata", + "DatastoreFirestoreMigrationMetadata", }, ) @@ -410,4 +412,27 @@ class IndexOperationMetadata(proto.Message): index_id = proto.Field(proto.STRING, number=3,) +class DatastoreFirestoreMigrationMetadata(proto.Message): + r"""Metadata for Datastore to Firestore migration operations. + + The DatastoreFirestoreMigration operation is not started by the + end-user via an explicit "creation" method. This is an intentional + deviation from the LRO design pattern. + + This singleton resource can be accessed at: + ``projects/{project_id}/datastore-firestore-migration`` + + Attributes: + migration_state (google.cloud.datastore_admin_v1.types.MigrationState): + The current state of migration from Cloud + Datastore to Cloud Firestore in Datastore mode. + migration_step (google.cloud.datastore_admin_v1.types.MigrationStep): + The current step of migration from Cloud + Datastore to Cloud Firestore in Datastore mode. + """ + + migration_state = proto.Field(proto.ENUM, number=1, enum=migration.MigrationState,) + migration_step = proto.Field(proto.ENUM, number=2, enum=migration.MigrationStep,) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py index 7a57c945757f..8d50f03a5a3f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto +import proto # type: ignore __protobuf__ = proto.module(package="google.datastore.admin.v1", manifest={"Index",},) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py new file mode 100644 index 000000000000..18cdd8d61051 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.datastore.admin.v1", + manifest={ + "MigrationState", + "MigrationStep", + "MigrationStateEvent", + "MigrationProgressEvent", + }, +) + + +class MigrationState(proto.Enum): + r"""States for a migration.""" + MIGRATION_STATE_UNSPECIFIED = 0 + RUNNING = 1 + PAUSED = 2 + COMPLETE = 3 + + +class MigrationStep(proto.Enum): + r"""Steps in a migration.""" + MIGRATION_STEP_UNSPECIFIED = 0 + PREPARE = 6 + START = 1 + APPLY_WRITES_SYNCHRONOUSLY = 7 + COPY_AND_VERIFY = 2 + REDIRECT_EVENTUALLY_CONSISTENT_READS = 3 + REDIRECT_STRONGLY_CONSISTENT_READS = 4 + REDIRECT_WRITES = 5 + + +class MigrationStateEvent(proto.Message): + r"""An event signifying a change in state of a `migration from Cloud + Datastore to Cloud Firestore in Datastore + mode `__. + + Attributes: + state (google.cloud.datastore_admin_v1.types.MigrationState): + The new state of the migration. + """ + + state = proto.Field(proto.ENUM, number=1, enum="MigrationState",) + + +class MigrationProgressEvent(proto.Message): + r"""An event signifying the start of a new step in a `migration from + Cloud Datastore to Cloud Firestore in Datastore + mode `__. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + step (google.cloud.datastore_admin_v1.types.MigrationStep): + The step that is starting. + + An event with step set to ``START`` indicates that the + migration has been reverted back to the initial + pre-migration state. + prepare_step_details (google.cloud.datastore_admin_v1.types.MigrationProgressEvent.PrepareStepDetails): + Details for the ``PREPARE`` step. + + This field is a member of `oneof`_ ``step_details``. + redirect_writes_step_details (google.cloud.datastore_admin_v1.types.MigrationProgressEvent.RedirectWritesStepDetails): + Details for the ``REDIRECT_WRITES`` step. + + This field is a member of `oneof`_ ``step_details``. + """ + + class ConcurrencyMode(proto.Enum): + r"""Concurrency modes for transactions in Cloud Firestore.""" + CONCURRENCY_MODE_UNSPECIFIED = 0 + PESSIMISTIC = 1 + OPTIMISTIC = 2 + + class PrepareStepDetails(proto.Message): + r"""Details for the ``PREPARE`` step. + + Attributes: + concurrency_mode (google.cloud.datastore_admin_v1.types.MigrationProgressEvent.ConcurrencyMode): + The concurrency mode this database will use when it reaches + the ``REDIRECT_WRITES`` step. + """ + + concurrency_mode = proto.Field( + proto.ENUM, number=1, enum="MigrationProgressEvent.ConcurrencyMode", + ) + + class RedirectWritesStepDetails(proto.Message): + r"""Details for the ``REDIRECT_WRITES`` step. + + Attributes: + concurrency_mode (google.cloud.datastore_admin_v1.types.MigrationProgressEvent.ConcurrencyMode): + Ths concurrency mode for this database. + """ + + concurrency_mode = proto.Field( + proto.ENUM, number=1, enum="MigrationProgressEvent.ConcurrencyMode", + ) + + step = proto.Field(proto.ENUM, number=1, enum="MigrationStep",) + prepare_step_details = proto.Field( + proto.MESSAGE, number=2, oneof="step_details", message=PrepareStepDetails, + ) + redirect_writes_step_details = proto.Field( + proto.MESSAGE, + number=3, + oneof="step_details", + message=RedirectWritesStepDetails, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py index 247eec151abc..881df4caf73e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py index 611f280bd6a2..66d6560d859a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index 2246300ec0fd..c6f8431bd383 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -106,6 +106,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DatastoreClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> DatastoreTransport: """Returns the transport used by the client instance. @@ -179,6 +215,25 @@ async def lookup( ) -> datastore.LookupResponse: r"""Looks up entities by key. + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_lookup(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.LookupRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.lookup(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.LookupRequest, dict]): The request object. The request for @@ -215,7 +270,7 @@ async def lookup( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, read_options, keys]) if request is not None and has_flattened_params: @@ -269,6 +324,25 @@ async def run_query( ) -> datastore.RunQueryResponse: r"""Queries for entities. + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_run_query(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.RunQueryRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.run_query(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.RunQueryRequest, dict]): The request object. The request for @@ -323,6 +397,25 @@ async def begin_transaction( ) -> datastore.BeginTransactionResponse: r"""Begins a new transaction. + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_begin_transaction(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.BeginTransactionRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.begin_transaction(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.BeginTransactionRequest, dict]): The request object. The request for @@ -347,7 +440,7 @@ async def begin_transaction( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id]) if request is not None and has_flattened_params: @@ -392,6 +485,27 @@ async def commit( r"""Commits a transaction, optionally creating, deleting or modifying some entities. + + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_commit(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.CommitRequest( + transaction=b'transaction_blob', + project_id="project_id_value", + ) + + # Make the request + response = client.commit(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.CommitRequest, dict]): The request object. The request for @@ -451,7 +565,7 @@ async def commit( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, mode, transaction, mutations]) if request is not None and has_flattened_params: @@ -499,6 +613,26 @@ async def rollback( ) -> datastore.RollbackResponse: r"""Rolls back a transaction. + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_rollback(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.RollbackRequest( + project_id="project_id_value", + transaction=b'transaction_blob', + ) + + # Make the request + response = client.rollback(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.RollbackRequest, dict]): The request object. The request for @@ -531,7 +665,7 @@ async def rollback( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, transaction]) if request is not None and has_flattened_params: @@ -576,6 +710,26 @@ async def allocate_ids( r"""Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. + + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_allocate_ids(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.AllocateIdsRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.allocate_ids(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.AllocateIdsRequest, dict]): The request object. The request for @@ -609,7 +763,7 @@ async def allocate_ids( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, keys]) if request is not None and has_flattened_params: @@ -651,8 +805,28 @@ async def reserve_ids( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.ReserveIdsResponse: - r"""Prevents the supplied keys' IDs from being auto- - llocated by Cloud Datastore. + r"""Prevents the supplied keys' IDs from being + auto-allocated by Cloud Datastore. + + + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_reserve_ids(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.ReserveIdsRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.reserve_ids(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.datastore_v1.types.ReserveIdsRequest, dict]): @@ -686,7 +860,7 @@ async def reserve_ids( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, keys]) if request is not None and has_flattened_params: diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index 5205b8935562..49c741dee835 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -224,6 +224,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -274,57 +341,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, DatastoreTransport): # transport is a DatastoreTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -336,6 +368,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, @@ -361,6 +402,25 @@ def lookup( ) -> datastore.LookupResponse: r"""Looks up entities by key. + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_lookup(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.LookupRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.lookup(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.LookupRequest, dict]): The request object. The request for @@ -397,7 +457,7 @@ def lookup( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, read_options, keys]) if request is not None and has_flattened_params: @@ -441,6 +501,25 @@ def run_query( ) -> datastore.RunQueryResponse: r"""Queries for entities. + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_run_query(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.RunQueryRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.run_query(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.RunQueryRequest, dict]): The request object. The request for @@ -486,6 +565,25 @@ def begin_transaction( ) -> datastore.BeginTransactionResponse: r"""Begins a new transaction. + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_begin_transaction(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.BeginTransactionRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.begin_transaction(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.BeginTransactionRequest, dict]): The request object. The request for @@ -510,7 +608,7 @@ def begin_transaction( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id]) if request is not None and has_flattened_params: @@ -555,6 +653,27 @@ def commit( r"""Commits a transaction, optionally creating, deleting or modifying some entities. + + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_commit(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.CommitRequest( + transaction=b'transaction_blob', + project_id="project_id_value", + ) + + # Make the request + response = client.commit(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.CommitRequest, dict]): The request object. The request for @@ -614,7 +733,7 @@ def commit( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, mode, transaction, mutations]) if request is not None and has_flattened_params: @@ -662,6 +781,26 @@ def rollback( ) -> datastore.RollbackResponse: r"""Rolls back a transaction. + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_rollback(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.RollbackRequest( + project_id="project_id_value", + transaction=b'transaction_blob', + ) + + # Make the request + response = client.rollback(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.RollbackRequest, dict]): The request object. The request for @@ -694,7 +833,7 @@ def rollback( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, transaction]) if request is not None and has_flattened_params: @@ -739,6 +878,26 @@ def allocate_ids( r"""Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. + + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_allocate_ids(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.AllocateIdsRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.allocate_ids(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.AllocateIdsRequest, dict]): The request object. The request for @@ -772,7 +931,7 @@ def allocate_ids( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, keys]) if request is not None and has_flattened_params: @@ -814,8 +973,28 @@ def reserve_ids( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.ReserveIdsResponse: - r"""Prevents the supplied keys' IDs from being auto- - llocated by Cloud Datastore. + r"""Prevents the supplied keys' IDs from being + auto-allocated by Cloud Datastore. + + + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_reserve_ids(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.ReserveIdsRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.reserve_ids(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.datastore_v1.types.ReserveIdsRequest, dict]): @@ -849,7 +1028,7 @@ def reserve_ids( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, keys]) if request is not None and has_flattened_params: diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py index 41074a07ccc3..b7d617f6aa87 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py index e23e42a2a59e..487a1a456813 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -102,7 +102,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py index 79071c807a34..410aa89d4a0f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -22,7 +22,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc +import grpc # type: ignore from google.cloud.datastore_v1.types import datastore from .base import DatastoreTransport, DEFAULT_CLIENT_INFO @@ -164,8 +164,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, @@ -393,8 +396,8 @@ def reserve_ids( ) -> Callable[[datastore.ReserveIdsRequest], datastore.ReserveIdsResponse]: r"""Return a callable for the reserve ids method over gRPC. - Prevents the supplied keys' IDs from being auto- - llocated by Cloud Datastore. + Prevents the supplied keys' IDs from being + auto-allocated by Cloud Datastore. Returns: Callable[[~.ReserveIdsRequest], diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index 7543acee84c9..f539e84f33c9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -21,8 +21,8 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc -from grpc.experimental import aio +import grpc # type: ignore +from grpc.experimental import aio # type: ignore from google.cloud.datastore_v1.types import datastore from .base import DatastoreTransport, DEFAULT_CLIENT_INFO @@ -209,8 +209,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, @@ -404,8 +407,8 @@ def reserve_ids( ]: r"""Return a callable for the reserve ids method over gRPC. - Prevents the supplied keys' IDs from being auto- - llocated by Cloud Datastore. + Prevents the supplied keys' IDs from being + auto-allocated by Cloud Datastore. Returns: Callable[[~.ReserveIdsRequest], diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py index 7553ac77cc68..eb4fc8c2a755 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index efd696aae27f..e77ad1e9398b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto +import proto # type: ignore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query as gd_query @@ -319,8 +319,8 @@ class ReserveIdsRequest(proto.Message): which to make the request. keys (Sequence[google.cloud.datastore_v1.types.Key]): Required. A list of keys with complete key - paths whose numeric IDs should not be auto- - allocated. + paths whose numeric IDs should not be + auto-allocated. """ project_id = proto.Field(proto.STRING, number=8,) @@ -366,8 +366,8 @@ class Mutation(proto.Message): delete (google.cloud.datastore_v1.types.Key): The key of the entity to delete. The entity may or may not already exist. Must have a - complete key path and must not be reserved/read- - only. + complete key path and must not be + reserved/read-only. This field is a member of `oneof`_ ``operation``. base_version (int): diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index 8bf66889482b..1c432ee61218 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,11 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto +import proto # type: ignore -from google.protobuf import struct_pb2 -from google.protobuf import timestamp_pb2 -from google.type import latlng_pb2 +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import latlng_pb2 # type: ignore __protobuf__ = proto.module( diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index 6ae312575d23..46147f057071 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto +import proto # type: ignore from google.cloud.datastore_v1.types import entity as gd_entity -from google.protobuf import wrappers_pb2 +from google.protobuf import wrappers_pb2 # type: ignore __protobuf__ = proto.module( diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py index 12e217dea24c..49b96026dac4 100644 --- a/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py index e0358795f751..4f5265b60140 100644 --- a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/__init__.py b/packages/google-cloud-datastore/tests/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/google-cloud-datastore/tests/__init__.py +++ b/packages/google-cloud-datastore/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/__init__.py b/packages/google-cloud-datastore/tests/unit/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/google-cloud-datastore/tests/unit/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/__init__.py b/packages/google-cloud-datastore/tests/unit/gapic/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index 293b111142ce..e6ed5508d9ff 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -29,7 +29,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.api_core import operation_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template from google.auth import credentials as ga_credentials @@ -255,20 +256,20 @@ def test_datastore_admin_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -327,7 +328,7 @@ def test_datastore_admin_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -404,6 +405,87 @@ def test_datastore_admin_client_mtls_env_auto( ) +@pytest.mark.parametrize( + "client_class", [DatastoreAdminClient, DatastoreAdminAsyncClient] +) +@mock.patch.object( + DatastoreAdminClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DatastoreAdminClient), +) +@mock.patch.object( + DatastoreAdminAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DatastoreAdminAsyncClient), +) +def test_datastore_admin_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -422,7 +504,7 @@ def test_datastore_admin_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -436,24 +518,31 @@ def test_datastore_admin_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport, "grpc"), + ( + DatastoreAdminClient, + transports.DatastoreAdminGrpcTransport, + "grpc", + grpc_helpers, + ), ( DatastoreAdminAsyncClient, transports.DatastoreAdminGrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_datastore_admin_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -486,9 +575,77 @@ def test_datastore_admin_client_client_options_from_dict(): ) -def test_export_entities( - transport: str = "grpc", request_type=datastore_admin.ExportEntitiesRequest +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DatastoreAdminClient, + transports.DatastoreAdminGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DatastoreAdminAsyncClient, + transports.DatastoreAdminGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_datastore_admin_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers ): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "datastore.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + scopes=None, + default_host="datastore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [datastore_admin.ExportEntitiesRequest, dict,]) +def test_export_entities(request_type, transport: str = "grpc"): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -512,10 +669,6 @@ def test_export_entities( assert isinstance(response, future.Future) -def test_export_entities_from_dict(): - test_export_entities(request_type=dict) - - def test_export_entities_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -673,9 +826,8 @@ async def test_export_entities_flattened_error_async(): ) -def test_import_entities( - transport: str = "grpc", request_type=datastore_admin.ImportEntitiesRequest -): +@pytest.mark.parametrize("request_type", [datastore_admin.ImportEntitiesRequest, dict,]) +def test_import_entities(request_type, transport: str = "grpc"): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -699,10 +851,6 @@ def test_import_entities( assert isinstance(response, future.Future) -def test_import_entities_from_dict(): - test_import_entities(request_type=dict) - - def test_import_entities_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -860,9 +1008,8 @@ async def test_import_entities_flattened_error_async(): ) -def test_create_index( - transport: str = "grpc", request_type=datastore_admin.CreateIndexRequest -): +@pytest.mark.parametrize("request_type", [datastore_admin.CreateIndexRequest, dict,]) +def test_create_index(request_type, transport: str = "grpc"): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -886,10 +1033,6 @@ def test_create_index( assert isinstance(response, future.Future) -def test_create_index_from_dict(): - test_create_index(request_type=dict) - - def test_create_index_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -939,9 +1082,8 @@ async def test_create_index_async_from_dict(): await test_create_index_async(request_type=dict) -def test_delete_index( - transport: str = "grpc", request_type=datastore_admin.DeleteIndexRequest -): +@pytest.mark.parametrize("request_type", [datastore_admin.DeleteIndexRequest, dict,]) +def test_delete_index(request_type, transport: str = "grpc"): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -965,10 +1107,6 @@ def test_delete_index( assert isinstance(response, future.Future) -def test_delete_index_from_dict(): - test_delete_index(request_type=dict) - - def test_delete_index_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1018,9 +1156,8 @@ async def test_delete_index_async_from_dict(): await test_delete_index_async(request_type=dict) -def test_get_index( - transport: str = "grpc", request_type=datastore_admin.GetIndexRequest -): +@pytest.mark.parametrize("request_type", [datastore_admin.GetIndexRequest, dict,]) +def test_get_index(request_type, transport: str = "grpc"): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1055,10 +1192,6 @@ def test_get_index( assert response.state == index.Index.State.CREATING -def test_get_index_from_dict(): - test_get_index(request_type=dict) - - def test_get_index_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1119,9 +1252,8 @@ async def test_get_index_async_from_dict(): await test_get_index_async(request_type=dict) -def test_list_indexes( - transport: str = "grpc", request_type=datastore_admin.ListIndexesRequest -): +@pytest.mark.parametrize("request_type", [datastore_admin.ListIndexesRequest, dict,]) +def test_list_indexes(request_type, transport: str = "grpc"): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1148,10 +1280,6 @@ def test_list_indexes( assert response.next_page_token == "next_page_token_value" -def test_list_indexes_from_dict(): - test_list_indexes(request_type=dict) - - def test_list_indexes_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1204,8 +1332,10 @@ async def test_list_indexes_async_from_dict(): await test_list_indexes_async(request_type=dict) -def test_list_indexes_pager(): - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_indexes_pager(transport_name: str = "grpc"): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: @@ -1235,8 +1365,10 @@ def test_list_indexes_pager(): assert all(isinstance(i, index.Index) for i in results) -def test_list_indexes_pages(): - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_indexes_pages(transport_name: str = "grpc"): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: @@ -1343,6 +1475,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatastoreAdminClient(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatastoreAdminClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.DatastoreAdminGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -1861,7 +2010,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( @@ -1926,3 +2075,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport), + (DatastoreAdminAsyncClient, transports.DatastoreAdminGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index acfffd844c02..fee5a408e71f 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -38,10 +38,10 @@ from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query from google.oauth2 import service_account -from google.protobuf import struct_pb2 -from google.protobuf import timestamp_pb2 -from google.protobuf import wrappers_pb2 -from google.type import latlng_pb2 +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.type import latlng_pb2 # type: ignore import google.auth @@ -238,20 +238,20 @@ def test_datastore_client_client_options(client_class, transport_class, transpor # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -308,7 +308,7 @@ def test_datastore_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -385,6 +385,83 @@ def test_datastore_client_mtls_env_auto( ) +@pytest.mark.parametrize("client_class", [DatastoreClient, DatastoreAsyncClient]) +@mock.patch.object( + DatastoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatastoreClient) +) +@mock.patch.object( + DatastoreAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DatastoreAsyncClient), +) +def test_datastore_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -403,7 +480,7 @@ def test_datastore_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -417,24 +494,26 @@ def test_datastore_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (DatastoreClient, transports.DatastoreGrpcTransport, "grpc"), + (DatastoreClient, transports.DatastoreGrpcTransport, "grpc", grpc_helpers), ( DatastoreAsyncClient, transports.DatastoreGrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_datastore_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -465,7 +544,72 @@ def test_datastore_client_client_options_from_dict(): ) -def test_lookup(transport: str = "grpc", request_type=datastore.LookupRequest): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (DatastoreClient, transports.DatastoreGrpcTransport, "grpc", grpc_helpers), + ( + DatastoreAsyncClient, + transports.DatastoreGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_datastore_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "datastore.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + scopes=None, + default_host="datastore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [datastore.LookupRequest, dict,]) +def test_lookup(request_type, transport: str = "grpc"): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -489,10 +633,6 @@ def test_lookup(transport: str = "grpc", request_type=datastore.LookupRequest): assert isinstance(response, datastore.LookupResponse) -def test_lookup_from_dict(): - test_lookup(request_type=dict) - - def test_lookup_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -668,7 +808,8 @@ async def test_lookup_flattened_error_async(): ) -def test_run_query(transport: str = "grpc", request_type=datastore.RunQueryRequest): +@pytest.mark.parametrize("request_type", [datastore.RunQueryRequest, dict,]) +def test_run_query(request_type, transport: str = "grpc"): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -692,10 +833,6 @@ def test_run_query(transport: str = "grpc", request_type=datastore.RunQueryReque assert isinstance(response, datastore.RunQueryResponse) -def test_run_query_from_dict(): - test_run_query(request_type=dict) - - def test_run_query_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -745,9 +882,8 @@ async def test_run_query_async_from_dict(): await test_run_query_async(request_type=dict) -def test_begin_transaction( - transport: str = "grpc", request_type=datastore.BeginTransactionRequest -): +@pytest.mark.parametrize("request_type", [datastore.BeginTransactionRequest, dict,]) +def test_begin_transaction(request_type, transport: str = "grpc"): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -776,10 +912,6 @@ def test_begin_transaction( assert response.transaction == b"transaction_blob" -def test_begin_transaction_from_dict(): - test_begin_transaction(request_type=dict) - - def test_begin_transaction_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -906,7 +1038,8 @@ async def test_begin_transaction_flattened_error_async(): ) -def test_commit(transport: str = "grpc", request_type=datastore.CommitRequest): +@pytest.mark.parametrize("request_type", [datastore.CommitRequest, dict,]) +def test_commit(request_type, transport: str = "grpc"): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -931,10 +1064,6 @@ def test_commit(transport: str = "grpc", request_type=datastore.CommitRequest): assert response.index_updates == 1389 -def test_commit_from_dict(): - test_commit(request_type=dict) - - def test_commit_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1141,7 +1270,8 @@ async def test_commit_flattened_error_async(): ) -def test_rollback(transport: str = "grpc", request_type=datastore.RollbackRequest): +@pytest.mark.parametrize("request_type", [datastore.RollbackRequest, dict,]) +def test_rollback(request_type, transport: str = "grpc"): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1165,10 +1295,6 @@ def test_rollback(transport: str = "grpc", request_type=datastore.RollbackReques assert isinstance(response, datastore.RollbackResponse) -def test_rollback_from_dict(): - test_rollback(request_type=dict) - - def test_rollback_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1300,9 +1426,8 @@ async def test_rollback_flattened_error_async(): ) -def test_allocate_ids( - transport: str = "grpc", request_type=datastore.AllocateIdsRequest -): +@pytest.mark.parametrize("request_type", [datastore.AllocateIdsRequest, dict,]) +def test_allocate_ids(request_type, transport: str = "grpc"): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1326,10 +1451,6 @@ def test_allocate_ids( assert isinstance(response, datastore.AllocateIdsResponse) -def test_allocate_ids_from_dict(): - test_allocate_ids(request_type=dict) - - def test_allocate_ids_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1483,7 +1604,8 @@ async def test_allocate_ids_flattened_error_async(): ) -def test_reserve_ids(transport: str = "grpc", request_type=datastore.ReserveIdsRequest): +@pytest.mark.parametrize("request_type", [datastore.ReserveIdsRequest, dict,]) +def test_reserve_ids(request_type, transport: str = "grpc"): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1507,10 +1629,6 @@ def test_reserve_ids(transport: str = "grpc", request_type=datastore.ReserveIdsR assert isinstance(response, datastore.ReserveIdsResponse) -def test_reserve_ids_from_dict(): - test_reserve_ids(request_type=dict) - - def test_reserve_ids_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1684,6 +1802,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.DatastoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatastoreClient(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatastoreClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.DatastoreGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -2155,7 +2290,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( @@ -2220,3 +2355,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (DatastoreClient, transports.DatastoreGrpcTransport), + (DatastoreAsyncClient, transports.DatastoreGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) From 9ea5e092f6af309fde0702985f32f7590b1c64e1 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 28 Feb 2022 17:28:18 -0500 Subject: [PATCH 394/611] chore(main): release 2.5.0 (#272) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 19 +++++++++++++++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 089d3abcd80f..0acb94a4dff9 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,25 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.5.0](https://github.com/googleapis/python-datastore/compare/v2.4.0...v2.5.0) (2022-02-26) + + +### Features + +* add api key support ([e166d7b](https://github.com/googleapis/python-datastore/commit/e166d7b3bc5b70d668df19e4b3a6d63b7c9c6599)) +* define Datastore -> Firestore in Datastore mode migration long running operation metadata ([#270](https://github.com/googleapis/python-datastore/issues/270)) ([e166d7b](https://github.com/googleapis/python-datastore/commit/e166d7b3bc5b70d668df19e4b3a6d63b7c9c6599)) + + +### Bug Fixes + +* **deps:** move libcst to extras ([#271](https://github.com/googleapis/python-datastore/issues/271)) ([d53fcce](https://github.com/googleapis/python-datastore/commit/d53fcce361d1585be9b0793fb6cc7fc4b27b07a7)) +* resolve DuplicateCredentialArgs error when using credentials_file ([e166d7b](https://github.com/googleapis/python-datastore/commit/e166d7b3bc5b70d668df19e4b3a6d63b7c9c6599)) + + +### Documentation + +* add generated snippets ([e166d7b](https://github.com/googleapis/python-datastore/commit/e166d7b3bc5b70d668df19e4b3a6d63b7c9c6599)) + ## [2.4.0](https://www.github.com/googleapis/python-datastore/compare/v2.3.0...v2.4.0) (2021-11-08) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index fe11624d91dd..5836d8051156 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.4.0" +__version__ = "2.5.0" From 5cf503b9047d71ed570062c20ec5338246c5a3f8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 1 Mar 2022 12:00:41 +0000 Subject: [PATCH 395/611] chore(deps): update actions/setup-python action to v3 (#274) Source-Link: https://github.com/googleapis/synthtool/commit/571ee2c3b26182429eddcf115122ee545d7d3787 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:660abdf857d3ab9aabcd967c163c70e657fcc5653595c709263af5f3fa23ef67 --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 3 +-- packages/google-cloud-datastore/.github/workflows/docs.yml | 4 ++-- packages/google-cloud-datastore/.github/workflows/lint.yml | 2 +- .../google-cloud-datastore/.github/workflows/unittest.yml | 4 ++-- 4 files changed, 6 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index b668c04d5d65..d9a55fa405e8 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 - + digest: sha256:660abdf857d3ab9aabcd967c163c70e657fcc5653595c709263af5f3fa23ef67 diff --git a/packages/google-cloud-datastore/.github/workflows/docs.yml b/packages/google-cloud-datastore/.github/workflows/docs.yml index f7b8344c4500..cca4e98bf236 100644 --- a/packages/google-cloud-datastore/.github/workflows/docs.yml +++ b/packages/google-cloud-datastore/.github/workflows/docs.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install nox @@ -26,7 +26,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install nox diff --git a/packages/google-cloud-datastore/.github/workflows/lint.yml b/packages/google-cloud-datastore/.github/workflows/lint.yml index 1e8b05c3d7ff..f687324ef2eb 100644 --- a/packages/google-cloud-datastore/.github/workflows/lint.yml +++ b/packages/google-cloud-datastore/.github/workflows/lint.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install nox diff --git a/packages/google-cloud-datastore/.github/workflows/unittest.yml b/packages/google-cloud-datastore/.github/workflows/unittest.yml index 074ee2504ca5..d3003e09e0c6 100644 --- a/packages/google-cloud-datastore/.github/workflows/unittest.yml +++ b/packages/google-cloud-datastore/.github/workflows/unittest.yml @@ -13,7 +13,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: ${{ matrix.python }} - name: Install nox @@ -39,7 +39,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install coverage From 86d2fba2342429ee9414be5ff1049d0d4f71b26b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 2 Mar 2022 17:08:55 +0100 Subject: [PATCH 396/611] chore(deps): update all dependencies to v3 (#275) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies to v3 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/workflows/mypy.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/.github/workflows/mypy.yml b/packages/google-cloud-datastore/.github/workflows/mypy.yml index 5a0f0e090d69..f9f07f4de171 100644 --- a/packages/google-cloud-datastore/.github/workflows/mypy.yml +++ b/packages/google-cloud-datastore/.github/workflows/mypy.yml @@ -8,9 +8,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.8" - name: Install nox From 6ddeabe0d357f5ed1cc15183e3282bac73b27c79 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 2 Mar 2022 19:48:49 -0500 Subject: [PATCH 397/611] chore(deps): update actions/checkout action to v3 (#277) Source-Link: https://github.com/googleapis/synthtool/commit/ca879097772aeec2cbb971c3cea8ecc81522b68a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:6162c384d685c5fe22521d3f37f6fc732bf99a085f6d47b677dbcae97fc21392 Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/.github/workflows/docs.yml | 4 ++-- packages/google-cloud-datastore/.github/workflows/lint.yml | 2 +- .../google-cloud-datastore/.github/workflows/unittest.yml | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index d9a55fa405e8..480226ac08a9 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:660abdf857d3ab9aabcd967c163c70e657fcc5653595c709263af5f3fa23ef67 + digest: sha256:6162c384d685c5fe22521d3f37f6fc732bf99a085f6d47b677dbcae97fc21392 diff --git a/packages/google-cloud-datastore/.github/workflows/docs.yml b/packages/google-cloud-datastore/.github/workflows/docs.yml index cca4e98bf236..b46d7305d8cf 100644 --- a/packages/google-cloud-datastore/.github/workflows/docs.yml +++ b/packages/google-cloud-datastore/.github/workflows/docs.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: @@ -24,7 +24,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: diff --git a/packages/google-cloud-datastore/.github/workflows/lint.yml b/packages/google-cloud-datastore/.github/workflows/lint.yml index f687324ef2eb..f512a4960beb 100644 --- a/packages/google-cloud-datastore/.github/workflows/lint.yml +++ b/packages/google-cloud-datastore/.github/workflows/lint.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: diff --git a/packages/google-cloud-datastore/.github/workflows/unittest.yml b/packages/google-cloud-datastore/.github/workflows/unittest.yml index d3003e09e0c6..e87fe5b7b79a 100644 --- a/packages/google-cloud-datastore/.github/workflows/unittest.yml +++ b/packages/google-cloud-datastore/.github/workflows/unittest.yml @@ -11,7 +11,7 @@ jobs: python: ['3.6', '3.7', '3.8', '3.9', '3.10'] steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: @@ -37,7 +37,7 @@ jobs: - unit steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: From 40593dd57566efb159ea8b695d86117f573f1ab0 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 4 Mar 2022 11:52:37 -0500 Subject: [PATCH 398/611] fix(deps): require google-api-core>=1.31.5, >=2.3.2 (#278) fix(deps): require proto-plus>=1.15.0 --- packages/google-cloud-datastore/setup.py | 4 ++-- packages/google-cloud-datastore/testing/constraints-3.6.txt | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 6d24d2ee9a7c..2bd2f0687aa5 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -32,12 +32,12 @@ # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.28.0, <3.0.0dev", + "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 "google-cloud-core >= 1.4.0, <3.0.0dev", - "proto-plus >= 1.4.0", + "proto-plus >= 1.15.0", ] extras = {"libcst": "libcst >= 0.2.5"} diff --git a/packages/google-cloud-datastore/testing/constraints-3.6.txt b/packages/google-cloud-datastore/testing/constraints-3.6.txt index 1800ac45ef0c..64daec82612f 100644 --- a/packages/google-cloud-datastore/testing/constraints-3.6.txt +++ b/packages/google-cloud-datastore/testing/constraints-3.6.txt @@ -5,7 +5,7 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.28.0 +google-api-core==1.31.5 google-cloud-core==1.4.0 -proto-plus==1.4.0 +proto-plus==1.15.0 libcst==0.2.5 From e616a99f71bbb2fc76286b3ca270b3021731157c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 5 Mar 2022 00:36:13 +0000 Subject: [PATCH 399/611] chore(deps): update actions/download-artifact action to v3 (#280) Source-Link: https://github.com/googleapis/synthtool/commit/38e11ad1104dcc1e63b52691ddf2fe4015d06955 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- .../google-cloud-datastore/.github/workflows/unittest.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 480226ac08a9..44c78f7cc12d 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:6162c384d685c5fe22521d3f37f6fc732bf99a085f6d47b677dbcae97fc21392 + digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 diff --git a/packages/google-cloud-datastore/.github/workflows/unittest.yml b/packages/google-cloud-datastore/.github/workflows/unittest.yml index e87fe5b7b79a..e5be6edbd54d 100644 --- a/packages/google-cloud-datastore/.github/workflows/unittest.yml +++ b/packages/google-cloud-datastore/.github/workflows/unittest.yml @@ -26,7 +26,7 @@ jobs: run: | nox -s unit-${{ matrix.python }} - name: Upload coverage results - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage-artifacts path: .coverage-${{ matrix.python }} @@ -47,7 +47,7 @@ jobs: python -m pip install --upgrade setuptools pip wheel python -m pip install coverage - name: Download coverage results - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v3 with: name: coverage-artifacts path: .coverage-results/ From 22339f3f3ae17b40c61448e36972150676ff1772 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 7 Mar 2022 11:14:00 -0500 Subject: [PATCH 400/611] chore(main): release 2.5.1 (#279) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 8 ++++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 0acb94a4dff9..8064ab655c1a 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +### [2.5.1](https://github.com/googleapis/python-datastore/compare/v2.5.0...v2.5.1) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#278](https://github.com/googleapis/python-datastore/issues/278)) ([ac08eb1](https://github.com/googleapis/python-datastore/commit/ac08eb16221cac02d917800423e182ae462f3c39)) +* **deps:** require proto-plus>=1.15.0 ([ac08eb1](https://github.com/googleapis/python-datastore/commit/ac08eb16221cac02d917800423e182ae462f3c39)) + ## [2.5.0](https://github.com/googleapis/python-datastore/compare/v2.4.0...v2.5.0) (2022-02-26) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 5836d8051156..aa1cc6e9e184 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.5.0" +__version__ = "2.5.1" From cb0a88c43ab0244f1224786c2c0ed89c7296ab8e Mon Sep 17 00:00:00 2001 From: Han Date: Wed, 16 Mar 2022 06:40:22 -0400 Subject: [PATCH 401/611] feat: add api for IN/NOT_IN/NOT_EQUAL operators (#282) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add support for IN/NOT_IN/!= operator * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: fix tests * chore: fix tests Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google/cloud/datastore/query.py | 11 +++++++---- .../google/cloud/datastore_v1/types/query.py | 3 +++ .../google-cloud-datastore/tests/unit/test_query.py | 8 +++++++- 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 5e4f49376521..57c19205f7af 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -86,6 +86,9 @@ class Query(object): "<": query_pb2.PropertyFilter.Operator.LESS_THAN, ">": query_pb2.PropertyFilter.Operator.GREATER_THAN, "=": query_pb2.PropertyFilter.Operator.EQUAL, + "!=": query_pb2.PropertyFilter.Operator.NOT_EQUAL, + "IN": query_pb2.PropertyFilter.Operator.IN, + "NOT_IN": query_pb2.PropertyFilter.Operator.NOT_IN, } """Mapping of operator strings and their protobuf equivalents.""" @@ -215,7 +218,7 @@ def add_filter(self, property_name, operator, value): where property is a property stored on the entity in the datastore and operator is one of ``OPERATORS`` - (ie, ``=``, ``<``, ``<=``, ``>``, ``>=``): + (ie, ``=``, ``<``, ``<=``, ``>``, ``>=``, ``!=``, ``IN``, ``NOT_IN``): .. testsetup:: query-filter @@ -235,7 +238,7 @@ def add_filter(self, property_name, operator, value): :param property_name: A property name. :type operator: str - :param operator: One of ``=``, ``<``, ``<=``, ``>``, ``>=``. + :param operator: One of ``=``, ``<``, ``<=``, ``>``, ``>=``, ``!=``, ``IN``, ``NOT_IN``. :type value: :class:`int`, :class:`str`, :class:`bool`, :class:`float`, :class:`NoneType`, @@ -252,7 +255,7 @@ def add_filter(self, property_name, operator, value): """ if self.OPERATORS.get(operator) is None: error_message = 'Invalid expression: "%s"' % (operator,) - choices_message = "Please use one of: =, <, <=, >, >=." + choices_message = "Please use one of: =, <, <=, >, >=, !=, IN, NOT_IN." raise ValueError(error_message, choices_message) if property_name == "__key__" and not isinstance(value, Key): @@ -293,7 +296,7 @@ def key_filter(self, key, operator="="): :param key: The key to filter on. :type operator: str - :param operator: (Optional) One of ``=``, ``<``, ``<=``, ``>``, ``>=``. + :param operator: (Optional) One of ``=``, ``<``, ``<=``, ``>``, ``>=``, ``!=``, ``IN``, ``NOT_IN``. Defaults to ``=``. """ self.add_filter("__key__", operator, key) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index 46147f057071..920d39b758b1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -260,7 +260,10 @@ class Operator(proto.Enum): GREATER_THAN = 3 GREATER_THAN_OR_EQUAL = 4 EQUAL = 5 + IN = 6 + NOT_EQUAL = 9 HAS_ANCESTOR = 11 + NOT_IN = 13 property = proto.Field(proto.MESSAGE, number=1, message="PropertyReference",) op = proto.Field(proto.ENUM, number=2, enum=Operator,) diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 3cbd95b84155..1f250f4636bb 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -175,12 +175,18 @@ def test_query_add_filter_w_all_operators(): query.add_filter("lt_prop", "<", "val3") query.add_filter("gt_prop", ">", "val4") query.add_filter("eq_prop", "=", "val5") - assert len(query.filters) == 5 + query.add_filter("in_prop", "IN", ["val6"]) + query.add_filter("neq_prop", "!=", "val9") + query.add_filter("not_in_prop", "NOT_IN", ["val13"]) + assert len(query.filters) == 8 assert query.filters[0] == ("leq_prop", "<=", "val1") assert query.filters[1] == ("geq_prop", ">=", "val2") assert query.filters[2] == ("lt_prop", "<", "val3") assert query.filters[3] == ("gt_prop", ">", "val4") assert query.filters[4] == ("eq_prop", "=", "val5") + assert query.filters[5] == ("in_prop", "IN", ["val6"]) + assert query.filters[6] == ("neq_prop", "!=", "val9") + assert query.filters[7] == ("not_in_prop", "NOT_IN", ["val13"]) def test_query_add_filter_w_known_operator_and_entity(): From 3da64fcf8b9b20ab8393d6bc8949f73a7dbb53dd Mon Sep 17 00:00:00 2001 From: Han Date: Wed, 16 Mar 2022 14:01:29 -0400 Subject: [PATCH 402/611] chore(revert): revert add api for IN/NOT_IN/NOT_EQUAL operators (#286) This reverts commit be1164d05e50e6a25b6752cbfaeee06e06f8814d. --- .../google/cloud/datastore/query.py | 11 ++++------- .../google/cloud/datastore_v1/types/query.py | 3 --- .../google-cloud-datastore/tests/unit/test_query.py | 8 +------- 3 files changed, 5 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 57c19205f7af..5e4f49376521 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -86,9 +86,6 @@ class Query(object): "<": query_pb2.PropertyFilter.Operator.LESS_THAN, ">": query_pb2.PropertyFilter.Operator.GREATER_THAN, "=": query_pb2.PropertyFilter.Operator.EQUAL, - "!=": query_pb2.PropertyFilter.Operator.NOT_EQUAL, - "IN": query_pb2.PropertyFilter.Operator.IN, - "NOT_IN": query_pb2.PropertyFilter.Operator.NOT_IN, } """Mapping of operator strings and their protobuf equivalents.""" @@ -218,7 +215,7 @@ def add_filter(self, property_name, operator, value): where property is a property stored on the entity in the datastore and operator is one of ``OPERATORS`` - (ie, ``=``, ``<``, ``<=``, ``>``, ``>=``, ``!=``, ``IN``, ``NOT_IN``): + (ie, ``=``, ``<``, ``<=``, ``>``, ``>=``): .. testsetup:: query-filter @@ -238,7 +235,7 @@ def add_filter(self, property_name, operator, value): :param property_name: A property name. :type operator: str - :param operator: One of ``=``, ``<``, ``<=``, ``>``, ``>=``, ``!=``, ``IN``, ``NOT_IN``. + :param operator: One of ``=``, ``<``, ``<=``, ``>``, ``>=``. :type value: :class:`int`, :class:`str`, :class:`bool`, :class:`float`, :class:`NoneType`, @@ -255,7 +252,7 @@ def add_filter(self, property_name, operator, value): """ if self.OPERATORS.get(operator) is None: error_message = 'Invalid expression: "%s"' % (operator,) - choices_message = "Please use one of: =, <, <=, >, >=, !=, IN, NOT_IN." + choices_message = "Please use one of: =, <, <=, >, >=." raise ValueError(error_message, choices_message) if property_name == "__key__" and not isinstance(value, Key): @@ -296,7 +293,7 @@ def key_filter(self, key, operator="="): :param key: The key to filter on. :type operator: str - :param operator: (Optional) One of ``=``, ``<``, ``<=``, ``>``, ``>=``, ``!=``, ``IN``, ``NOT_IN``. + :param operator: (Optional) One of ``=``, ``<``, ``<=``, ``>``, ``>=``. Defaults to ``=``. """ self.add_filter("__key__", operator, key) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index 920d39b758b1..46147f057071 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -260,10 +260,7 @@ class Operator(proto.Enum): GREATER_THAN = 3 GREATER_THAN_OR_EQUAL = 4 EQUAL = 5 - IN = 6 - NOT_EQUAL = 9 HAS_ANCESTOR = 11 - NOT_IN = 13 property = proto.Field(proto.MESSAGE, number=1, message="PropertyReference",) op = proto.Field(proto.ENUM, number=2, enum=Operator,) diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 1f250f4636bb..3cbd95b84155 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -175,18 +175,12 @@ def test_query_add_filter_w_all_operators(): query.add_filter("lt_prop", "<", "val3") query.add_filter("gt_prop", ">", "val4") query.add_filter("eq_prop", "=", "val5") - query.add_filter("in_prop", "IN", ["val6"]) - query.add_filter("neq_prop", "!=", "val9") - query.add_filter("not_in_prop", "NOT_IN", ["val13"]) - assert len(query.filters) == 8 + assert len(query.filters) == 5 assert query.filters[0] == ("leq_prop", "<=", "val1") assert query.filters[1] == ("geq_prop", ">=", "val2") assert query.filters[2] == ("lt_prop", "<", "val3") assert query.filters[3] == ("gt_prop", ">", "val4") assert query.filters[4] == ("eq_prop", "=", "val5") - assert query.filters[5] == ("in_prop", "IN", ["val6"]) - assert query.filters[6] == ("neq_prop", "!=", "val9") - assert query.filters[7] == ("not_in_prop", "NOT_IN", ["val13"]) def test_query_add_filter_w_known_operator_and_entity(): From ae31c4a3ce2c0f890856cb28f4b12a6080401ab1 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 16 Mar 2022 15:09:30 -0400 Subject: [PATCH 403/611] chore: add IN/NOT_IN/NOT_EQUALS support to cloud datastore proto (#284) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add IN/NOT_IN/NOT_EQUALS support to cloud datastore proto PiperOrigin-RevId: 434824722 Source-Link: https://github.com/googleapis/googleapis/commit/4bfdcd371b9fac734d4b2ee8d8beae4f9f5e1aba Source-Link: https://github.com/googleapis/googleapis-gen/commit/5982b9b085388d6c9a90a5578ebe47518fe0932e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTk4MmI5YjA4NTM4OGQ2YzlhOTBhNTU3OGViZTQ3NTE4ZmUwOTMyZSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../google/cloud/datastore_v1/types/entity.py | 2 +- .../google/cloud/datastore_v1/types/query.py | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index 1c432ee61218..62daa0e7564c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -189,7 +189,7 @@ class Value(proto.Message): This field is a member of `oneof`_ ``value_type``. string_value (str): A UTF-8 encoded string value. When ``exclude_from_indexes`` - is false (it is indexed), may have at most 1500 bytes. + is false (it is indexed) , may have at most 1500 bytes. Otherwise, may be set to at most 1,000,000 bytes. This field is a member of `oneof`_ ``value_type``. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index 46147f057071..920d39b758b1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -260,7 +260,10 @@ class Operator(proto.Enum): GREATER_THAN = 3 GREATER_THAN_OR_EQUAL = 4 EQUAL = 5 + IN = 6 + NOT_EQUAL = 9 HAS_ANCESTOR = 11 + NOT_IN = 13 property = proto.Field(proto.MESSAGE, number=1, message="PropertyReference",) op = proto.Field(proto.ENUM, number=2, enum=Operator,) From 1fe3845f09388befbc4f2e2bfad0748a76720401 Mon Sep 17 00:00:00 2001 From: Mariatta Wijaya Date: Thu, 17 Mar 2022 02:40:10 -0700 Subject: [PATCH 404/611] chore: Change the Codeowner to cloud-native-db-dpes (#288) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Change the Codeowner to cloud-native-db-dpes * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/CODEOWNERS | 8 ++++---- packages/google-cloud-datastore/.repo-metadata.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/.github/CODEOWNERS b/packages/google-cloud-datastore/.github/CODEOWNERS index 6116b8379e24..cac512401802 100644 --- a/packages/google-cloud-datastore/.github/CODEOWNERS +++ b/packages/google-cloud-datastore/.github/CODEOWNERS @@ -5,8 +5,8 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax # Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# @googleapis/yoshi-python @googleapis/firestore-dpe are the default owners for changes in this repo -* @googleapis/yoshi-python @googleapis/firestore-dpe +# @googleapis/yoshi-python @googleapis/cloud-native-db-dpes are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/cloud-native-db-dpes -# @googleapis/python-samples-reviewers @googleapis/firestore-dpe are the default owners for samples changes -/samples/ @googleapis/python-samples-reviewers @googleapis/firestore-dpe +# @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes diff --git a/packages/google-cloud-datastore/.repo-metadata.json b/packages/google-cloud-datastore/.repo-metadata.json index a5bf20b2c694..44c2f180f25b 100644 --- a/packages/google-cloud-datastore/.repo-metadata.json +++ b/packages/google-cloud-datastore/.repo-metadata.json @@ -11,6 +11,6 @@ "distribution_name": "google-cloud-datastore", "api_id": "datastore.googleapis.com", "default_version": "v1", - "codeowner_team": "@googleapis/firestore-dpe", + "codeowner_team": "@googleapis/cloud-native-db-dpes", "api_shortname": "datastore" } From 8dacf14be7410d929cb61320bb9c4a4cd64f3ce9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 28 Mar 2022 23:56:10 +0000 Subject: [PATCH 405/611] chore(python): use black==22.3.0 (#290) Source-Link: https://github.com/googleapis/synthtool/commit/6fab84af09f2cf89a031fd8671d1def6b2931b11 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe --- .../.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/docs/conf.py | 5 +- .../google/cloud/datastore/_http.py | 15 +- .../google/cloud/datastore/helpers.py | 5 +- .../google/cloud/datastore/key.py | 8 +- .../services/datastore_admin/async_client.py | 47 ++- .../services/datastore_admin/client.py | 88 ++++- .../datastore_admin/transports/base.py | 22 +- .../datastore_admin/transports/grpc.py | 3 +- .../types/datastore_admin.py | 248 ++++++++++--- .../cloud/datastore_admin_v1/types/index.py | 51 ++- .../datastore_admin_v1/types/migration.py | 25 +- .../services/datastore/async_client.py | 53 ++- .../datastore_v1/services/datastore/client.py | 94 ++++- .../services/datastore/transports/base.py | 26 +- .../services/datastore/transports/grpc.py | 3 +- .../cloud/datastore_v1/types/datastore.py | 234 ++++++++++--- .../google/cloud/datastore_v1/types/entity.py | 140 ++++++-- .../google/cloud/datastore_v1/types/query.py | 197 +++++++++-- packages/google-cloud-datastore/noxfile.py | 9 +- .../tests/system/conftest.py | 4 +- .../tests/system/test_allocate_reserve_ids.py | 3 +- .../tests/system/test_put.py | 21 +- .../tests/system/test_query.py | 21 +- .../tests/system/test_transaction.py | 11 +- .../tests/system/utils/populate_datastore.py | 18 +- .../test_datastore_admin.py | 305 ++++++++++++---- .../unit/gapic/datastore_v1/test_datastore.py | 330 +++++++++++++----- .../tests/unit/test__gapic.py | 4 +- .../tests/unit/test__http.py | 56 ++- .../tests/unit/test_client.py | 20 +- .../tests/unit/test_helpers.py | 38 +- .../tests/unit/test_key.py | 19 +- .../tests/unit/test_transaction.py | 4 +- 34 files changed, 1657 insertions(+), 472 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 44c78f7cc12d..87dd00611576 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 + digest: sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index d51558bef117..febe857a9da4 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -361,7 +361,10 @@ intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index f92c76f05ef4..60b8af894767 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -53,7 +53,14 @@ def _make_request_pb(request, request_pb_type): def _request( - http, project, method, data, base_url, client_info, retry=None, timeout=None, + http, + project, + method, + data, + base_url, + client_info, + retry=None, + timeout=None, ): """Make a request over the Http transport to the Cloud Datastore API. @@ -103,7 +110,11 @@ def _request( if timeout is not None: response = requester( - url=api_url, method="POST", headers=headers, data=data, timeout=timeout, + url=api_url, + method="POST", + headers=headers, + data=data, + timeout=timeout, ) else: response = requester(url=api_url, method="POST", headers=headers, data=data) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index 85dfc240cf19..f976070e9833 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -418,7 +418,10 @@ def _get_value_from_value_pb(pb): ] elif value_type == "geo_point_value": - result = GeoPoint(pb.geo_point_value.latitude, pb.geo_point_value.longitude,) + result = GeoPoint( + pb.geo_point_value.latitude, + pb.geo_point_value.longitude, + ) elif value_type == "null_value": result = None diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index 76f18455c312..1a8e3645fc8e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -361,7 +361,7 @@ def from_legacy_urlsafe(cls, urlsafe): reference.ParseFromString(raw_bytes) project = _clean_app(reference.app) - namespace = _get_empty(reference.name_space, u"") + namespace = _get_empty(reference.name_space, "") _check_database_id(reference.database_id) flat_path = _get_flat_path(reference.path) return cls(*flat_path, project=project, namespace=namespace) @@ -554,7 +554,7 @@ def _check_database_id(database_id): :raises: :exc:`ValueError` if the ``database_id`` is not empty. """ - if database_id != u"": + if database_id != "": msg = _DATABASE_ID_TEMPLATE.format(database_id) raise ValueError(msg) @@ -580,13 +580,13 @@ def _add_id_or_name(flat_path, element_pb, empty_allowed): # NOTE: Below 0 and the empty string are the "null" values for their # respective types, indicating that the value is unset. if id_ == 0: - if name == u"": + if name == "": if not empty_allowed: raise ValueError(_EMPTY_ELEMENT) else: flat_path.append(name) else: - if name == u"": + if name == "": flat_path.append(id_) else: msg = _BAD_ELEMENT_TEMPLATE.format(id_, name) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index ebac62bd8569..c6cd885c1449 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -411,7 +411,12 @@ def sample_export_entities(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -574,7 +579,12 @@ def sample_import_entities(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -665,7 +675,12 @@ def sample_create_index(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -755,7 +770,12 @@ def sample_delete_index(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -832,7 +852,12 @@ def sample_get_index(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -911,12 +936,20 @@ def sample_list_indexes(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListIndexesAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index 4f4f9211724f..74bf49c469a7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -59,7 +59,10 @@ class DatastoreAdminClientMeta(type): _transport_registry["grpc"] = DatastoreAdminGrpcTransport _transport_registry["grpc_asyncio"] = DatastoreAdminGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[DatastoreAdminTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[DatastoreAdminTransport]: """Returns an appropriate transport class. Args: @@ -219,7 +222,9 @@ def transport(self) -> DatastoreAdminTransport: return self._transport @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -232,9 +237,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -243,9 +252,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -254,9 +267,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -265,10 +282,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -590,7 +611,12 @@ def sample_export_entities(): rpc = self._transport._wrapped_methods[self._transport.export_entities] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -752,7 +778,12 @@ def sample_import_entities(): rpc = self._transport._wrapped_methods[self._transport.import_entities] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -844,7 +875,12 @@ def sample_create_index(): rpc = self._transport._wrapped_methods[self._transport.create_index] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -935,7 +971,12 @@ def sample_delete_index(): rpc = self._transport._wrapped_methods[self._transport.delete_index] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -1003,7 +1044,12 @@ def sample_get_index(): rpc = self._transport._wrapped_methods[self._transport.get_index] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1073,12 +1119,20 @@ def sample_list_indexes(): rpc = self._transport._wrapped_methods[self._transport.list_indexes] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListIndexesPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index 1b47ae2b17c5..daa2096f0ef9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -127,16 +127,24 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.export_entities: gapic_v1.method.wrap_method( - self.export_entities, default_timeout=60.0, client_info=client_info, + self.export_entities, + default_timeout=60.0, + client_info=client_info, ), self.import_entities: gapic_v1.method.wrap_method( - self.import_entities, default_timeout=60.0, client_info=client_info, + self.import_entities, + default_timeout=60.0, + client_info=client_info, ), self.create_index: gapic_v1.method.wrap_method( - self.create_index, default_timeout=60.0, client_info=client_info, + self.create_index, + default_timeout=60.0, + client_info=client_info, ), self.delete_index: gapic_v1.method.wrap_method( - self.delete_index, default_timeout=60.0, client_info=client_info, + self.delete_index, + default_timeout=60.0, + client_info=client_info, ), self.get_index: gapic_v1.method.wrap_method( self.get_index, @@ -173,9 +181,9 @@ def _prep_wrapped_messages(self, client_info): def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index e27734f805b9..ba43c4b60555 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -285,8 +285,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py index 4e5ad0dafa35..a490fd9318cd 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -83,11 +83,31 @@ class State(proto.Enum): FAILED = 6 CANCELLED = 7 - start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) - operation_type = proto.Field(proto.ENUM, number=3, enum="OperationType",) - labels = proto.MapField(proto.STRING, proto.STRING, number=4,) - state = proto.Field(proto.ENUM, number=5, enum=State,) + start_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + operation_type = proto.Field( + proto.ENUM, + number=3, + enum="OperationType", + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + state = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) class Progress(proto.Message): @@ -103,8 +123,14 @@ class Progress(proto.Message): unavailable. """ - work_completed = proto.Field(proto.INT64, number=1,) - work_estimated = proto.Field(proto.INT64, number=2,) + work_completed = proto.Field( + proto.INT64, + number=1, + ) + work_estimated = proto.Field( + proto.INT64, + number=2, + ) class ExportEntitiesRequest(proto.Message): @@ -145,10 +171,24 @@ class ExportEntitiesRequest(proto.Message): without conflict. """ - project_id = proto.Field(proto.STRING, number=1,) - labels = proto.MapField(proto.STRING, proto.STRING, number=2,) - entity_filter = proto.Field(proto.MESSAGE, number=3, message="EntityFilter",) - output_url_prefix = proto.Field(proto.STRING, number=4,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + entity_filter = proto.Field( + proto.MESSAGE, + number=3, + message="EntityFilter", + ) + output_url_prefix = proto.Field( + proto.STRING, + number=4, + ) class ImportEntitiesRequest(proto.Message): @@ -185,10 +225,24 @@ class ImportEntitiesRequest(proto.Message): specified then all entities from the export are imported. """ - project_id = proto.Field(proto.STRING, number=1,) - labels = proto.MapField(proto.STRING, proto.STRING, number=2,) - input_url = proto.Field(proto.STRING, number=3,) - entity_filter = proto.Field(proto.MESSAGE, number=4, message="EntityFilter",) + project_id = proto.Field( + proto.STRING, + number=1, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + input_url = proto.Field( + proto.STRING, + number=3, + ) + entity_filter = proto.Field( + proto.MESSAGE, + number=4, + message="EntityFilter", + ) class ExportEntitiesResponse(proto.Message): @@ -204,7 +258,10 @@ class ExportEntitiesResponse(proto.Message): Only present if the operation completed successfully. """ - output_url = proto.Field(proto.STRING, number=1,) + output_url = proto.Field( + proto.STRING, + number=1, + ) class ExportEntitiesMetadata(proto.Message): @@ -230,11 +287,30 @@ class ExportEntitiesMetadata(proto.Message): [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. """ - common = proto.Field(proto.MESSAGE, number=1, message="CommonMetadata",) - progress_entities = proto.Field(proto.MESSAGE, number=2, message="Progress",) - progress_bytes = proto.Field(proto.MESSAGE, number=3, message="Progress",) - entity_filter = proto.Field(proto.MESSAGE, number=4, message="EntityFilter",) - output_url_prefix = proto.Field(proto.STRING, number=5,) + common = proto.Field( + proto.MESSAGE, + number=1, + message="CommonMetadata", + ) + progress_entities = proto.Field( + proto.MESSAGE, + number=2, + message="Progress", + ) + progress_bytes = proto.Field( + proto.MESSAGE, + number=3, + message="Progress", + ) + entity_filter = proto.Field( + proto.MESSAGE, + number=4, + message="EntityFilter", + ) + output_url_prefix = proto.Field( + proto.STRING, + number=5, + ) class ImportEntitiesMetadata(proto.Message): @@ -259,11 +335,30 @@ class ImportEntitiesMetadata(proto.Message): field. """ - common = proto.Field(proto.MESSAGE, number=1, message="CommonMetadata",) - progress_entities = proto.Field(proto.MESSAGE, number=2, message="Progress",) - progress_bytes = proto.Field(proto.MESSAGE, number=3, message="Progress",) - entity_filter = proto.Field(proto.MESSAGE, number=4, message="EntityFilter",) - input_url = proto.Field(proto.STRING, number=5,) + common = proto.Field( + proto.MESSAGE, + number=1, + message="CommonMetadata", + ) + progress_entities = proto.Field( + proto.MESSAGE, + number=2, + message="Progress", + ) + progress_bytes = proto.Field( + proto.MESSAGE, + number=3, + message="Progress", + ) + entity_filter = proto.Field( + proto.MESSAGE, + number=4, + message="EntityFilter", + ) + input_url = proto.Field( + proto.STRING, + number=5, + ) class EntityFilter(proto.Message): @@ -298,8 +393,14 @@ class EntityFilter(proto.Message): Each namespace in this list must be unique. """ - kinds = proto.RepeatedField(proto.STRING, number=1,) - namespace_ids = proto.RepeatedField(proto.STRING, number=2,) + kinds = proto.RepeatedField( + proto.STRING, + number=1, + ) + namespace_ids = proto.RepeatedField( + proto.STRING, + number=2, + ) class CreateIndexRequest(proto.Message): @@ -316,8 +417,15 @@ class CreateIndexRequest(proto.Message): deleted. """ - project_id = proto.Field(proto.STRING, number=1,) - index = proto.Field(proto.MESSAGE, number=3, message=gda_index.Index,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + index = proto.Field( + proto.MESSAGE, + number=3, + message=gda_index.Index, + ) class DeleteIndexRequest(proto.Message): @@ -331,8 +439,14 @@ class DeleteIndexRequest(proto.Message): The resource ID of the index to delete. """ - project_id = proto.Field(proto.STRING, number=1,) - index_id = proto.Field(proto.STRING, number=3,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + index_id = proto.Field( + proto.STRING, + number=3, + ) class GetIndexRequest(proto.Message): @@ -346,8 +460,14 @@ class GetIndexRequest(proto.Message): The resource ID of the index to get. """ - project_id = proto.Field(proto.STRING, number=1,) - index_id = proto.Field(proto.STRING, number=3,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + index_id = proto.Field( + proto.STRING, + number=3, + ) class ListIndexesRequest(proto.Message): @@ -367,10 +487,22 @@ class ListIndexesRequest(proto.Message): request, if any. """ - project_id = proto.Field(proto.STRING, number=1,) - filter = proto.Field(proto.STRING, number=3,) - page_size = proto.Field(proto.INT32, number=4,) - page_token = proto.Field(proto.STRING, number=5,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + filter = proto.Field( + proto.STRING, + number=3, + ) + page_size = proto.Field( + proto.INT32, + number=4, + ) + page_token = proto.Field( + proto.STRING, + number=5, + ) class ListIndexesResponse(proto.Message): @@ -388,8 +520,15 @@ class ListIndexesResponse(proto.Message): def raw_page(self): return self - indexes = proto.RepeatedField(proto.MESSAGE, number=1, message=gda_index.Index,) - next_page_token = proto.Field(proto.STRING, number=2,) + indexes = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gda_index.Index, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class IndexOperationMetadata(proto.Message): @@ -407,9 +546,20 @@ class IndexOperationMetadata(proto.Message): acting on. """ - common = proto.Field(proto.MESSAGE, number=1, message="CommonMetadata",) - progress_entities = proto.Field(proto.MESSAGE, number=2, message="Progress",) - index_id = proto.Field(proto.STRING, number=3,) + common = proto.Field( + proto.MESSAGE, + number=1, + message="CommonMetadata", + ) + progress_entities = proto.Field( + proto.MESSAGE, + number=2, + message="Progress", + ) + index_id = proto.Field( + proto.STRING, + number=3, + ) class DatastoreFirestoreMigrationMetadata(proto.Message): @@ -431,8 +581,16 @@ class DatastoreFirestoreMigrationMetadata(proto.Message): Datastore to Cloud Firestore in Datastore mode. """ - migration_state = proto.Field(proto.ENUM, number=1, enum=migration.MigrationState,) - migration_step = proto.Field(proto.ENUM, number=2, enum=migration.MigrationStep,) + migration_state = proto.Field( + proto.ENUM, + number=1, + enum=migration.MigrationState, + ) + migration_step = proto.Field( + proto.ENUM, + number=2, + enum=migration.MigrationStep, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py index 8d50f03a5a3f..e00c3bb70800 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py @@ -16,7 +16,12 @@ import proto # type: ignore -__protobuf__ = proto.module(package="google.datastore.admin.v1", manifest={"Index",},) +__protobuf__ = proto.module( + package="google.datastore.admin.v1", + manifest={ + "Index", + }, +) class Index(proto.Message): @@ -73,15 +78,43 @@ class IndexedProperty(proto.Message): DIRECTION_UNSPECIFIED. """ - name = proto.Field(proto.STRING, number=1,) - direction = proto.Field(proto.ENUM, number=2, enum="Index.Direction",) + name = proto.Field( + proto.STRING, + number=1, + ) + direction = proto.Field( + proto.ENUM, + number=2, + enum="Index.Direction", + ) - project_id = proto.Field(proto.STRING, number=1,) - index_id = proto.Field(proto.STRING, number=3,) - kind = proto.Field(proto.STRING, number=4,) - ancestor = proto.Field(proto.ENUM, number=5, enum=AncestorMode,) - properties = proto.RepeatedField(proto.MESSAGE, number=6, message=IndexedProperty,) - state = proto.Field(proto.ENUM, number=7, enum=State,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + index_id = proto.Field( + proto.STRING, + number=3, + ) + kind = proto.Field( + proto.STRING, + number=4, + ) + ancestor = proto.Field( + proto.ENUM, + number=5, + enum=AncestorMode, + ) + properties = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=IndexedProperty, + ) + state = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py index 18cdd8d61051..97d4145f665d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py @@ -57,7 +57,11 @@ class MigrationStateEvent(proto.Message): The new state of the migration. """ - state = proto.Field(proto.ENUM, number=1, enum="MigrationState",) + state = proto.Field( + proto.ENUM, + number=1, + enum="MigrationState", + ) class MigrationProgressEvent(proto.Message): @@ -105,7 +109,9 @@ class PrepareStepDetails(proto.Message): """ concurrency_mode = proto.Field( - proto.ENUM, number=1, enum="MigrationProgressEvent.ConcurrencyMode", + proto.ENUM, + number=1, + enum="MigrationProgressEvent.ConcurrencyMode", ) class RedirectWritesStepDetails(proto.Message): @@ -117,12 +123,21 @@ class RedirectWritesStepDetails(proto.Message): """ concurrency_mode = proto.Field( - proto.ENUM, number=1, enum="MigrationProgressEvent.ConcurrencyMode", + proto.ENUM, + number=1, + enum="MigrationProgressEvent.ConcurrencyMode", ) - step = proto.Field(proto.ENUM, number=1, enum="MigrationStep",) + step = proto.Field( + proto.ENUM, + number=1, + enum="MigrationStep", + ) prepare_step_details = proto.Field( - proto.MESSAGE, number=2, oneof="step_details", message=PrepareStepDetails, + proto.MESSAGE, + number=2, + oneof="step_details", + message=PrepareStepDetails, ) redirect_writes_step_details = proto.Field( proto.MESSAGE, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index c6f8431bd383..247a388cbdac 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -309,7 +309,12 @@ def sample_lookup(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -381,7 +386,12 @@ def sample_run_query(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -465,7 +475,12 @@ def sample_begin_transaction(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -596,7 +611,12 @@ def sample_commit(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -692,7 +712,12 @@ def sample_rollback(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -790,7 +815,12 @@ def sample_allocate_ids(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -897,7 +927,12 @@ def sample_reserve_ids(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -911,7 +946,9 @@ async def __aexit__(self, exc_type, exc, tb): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-datastore",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-datastore", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index 49c741dee835..de663367d06b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -54,7 +54,10 @@ class DatastoreClientMeta(type): _transport_registry["grpc"] = DatastoreGrpcTransport _transport_registry["grpc_asyncio"] = DatastoreGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[DatastoreTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[DatastoreTransport]: """Returns an appropriate transport class. Args: @@ -166,7 +169,9 @@ def transport(self) -> DatastoreTransport: return self._transport @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -179,9 +184,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -190,9 +199,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -201,9 +214,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -212,10 +229,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -486,7 +507,12 @@ def sample_lookup(): rpc = self._transport._wrapped_methods[self._transport.lookup] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -549,7 +575,12 @@ def sample_run_query(): rpc = self._transport._wrapped_methods[self._transport.run_query] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -633,7 +664,12 @@ def sample_begin_transaction(): rpc = self._transport._wrapped_methods[self._transport.begin_transaction] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -764,7 +800,12 @@ def sample_commit(): rpc = self._transport._wrapped_methods[self._transport.commit] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -860,7 +901,12 @@ def sample_rollback(): rpc = self._transport._wrapped_methods[self._transport.rollback] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -958,7 +1004,12 @@ def sample_allocate_ids(): rpc = self._transport._wrapped_methods[self._transport.allocate_ids] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1055,7 +1106,12 @@ def sample_reserve_ids(): rpc = self._transport._wrapped_methods[self._transport.reserve_ids] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1076,7 +1132,9 @@ def __exit__(self, type, value, traceback): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-datastore",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-datastore", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py index 487a1a456813..b50c0fca7104 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -29,7 +29,9 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-datastore",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-datastore", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() @@ -152,16 +154,24 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.begin_transaction: gapic_v1.method.wrap_method( - self.begin_transaction, default_timeout=60.0, client_info=client_info, + self.begin_transaction, + default_timeout=60.0, + client_info=client_info, ), self.commit: gapic_v1.method.wrap_method( - self.commit, default_timeout=60.0, client_info=client_info, + self.commit, + default_timeout=60.0, + client_info=client_info, ), self.rollback: gapic_v1.method.wrap_method( - self.rollback, default_timeout=60.0, client_info=client_info, + self.rollback, + default_timeout=60.0, + client_info=client_info, ), self.allocate_ids: gapic_v1.method.wrap_method( - self.allocate_ids, default_timeout=60.0, client_info=client_info, + self.allocate_ids, + default_timeout=60.0, + client_info=client_info, ), self.reserve_ids: gapic_v1.method.wrap_method( self.reserve_ids, @@ -183,9 +193,9 @@ def _prep_wrapped_messages(self, client_info): def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py index 410aa89d4a0f..d8a2f0018491 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -230,8 +230,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index e77ad1e9398b..d5d974c2eef9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -58,9 +58,20 @@ class LookupRequest(proto.Message): Required. Keys of entities to look up. """ - project_id = proto.Field(proto.STRING, number=8,) - read_options = proto.Field(proto.MESSAGE, number=1, message="ReadOptions",) - keys = proto.RepeatedField(proto.MESSAGE, number=3, message=entity.Key,) + project_id = proto.Field( + proto.STRING, + number=8, + ) + read_options = proto.Field( + proto.MESSAGE, + number=1, + message="ReadOptions", + ) + keys = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=entity.Key, + ) class LookupResponse(proto.Message): @@ -83,11 +94,21 @@ class LookupResponse(proto.Message): the order of the keys in the input. """ - found = proto.RepeatedField(proto.MESSAGE, number=1, message=gd_query.EntityResult,) + found = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gd_query.EntityResult, + ) missing = proto.RepeatedField( - proto.MESSAGE, number=2, message=gd_query.EntityResult, + proto.MESSAGE, + number=2, + message=gd_query.EntityResult, + ) + deferred = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=entity.Key, ) - deferred = proto.RepeatedField(proto.MESSAGE, number=3, message=entity.Key,) class RunQueryRequest(proto.Message): @@ -123,14 +144,31 @@ class RunQueryRequest(proto.Message): This field is a member of `oneof`_ ``query_type``. """ - project_id = proto.Field(proto.STRING, number=8,) - partition_id = proto.Field(proto.MESSAGE, number=2, message=entity.PartitionId,) - read_options = proto.Field(proto.MESSAGE, number=1, message="ReadOptions",) + project_id = proto.Field( + proto.STRING, + number=8, + ) + partition_id = proto.Field( + proto.MESSAGE, + number=2, + message=entity.PartitionId, + ) + read_options = proto.Field( + proto.MESSAGE, + number=1, + message="ReadOptions", + ) query = proto.Field( - proto.MESSAGE, number=3, oneof="query_type", message=gd_query.Query, + proto.MESSAGE, + number=3, + oneof="query_type", + message=gd_query.Query, ) gql_query = proto.Field( - proto.MESSAGE, number=7, oneof="query_type", message=gd_query.GqlQuery, + proto.MESSAGE, + number=7, + oneof="query_type", + message=gd_query.GqlQuery, ) @@ -146,8 +184,16 @@ class RunQueryResponse(proto.Message): was set. """ - batch = proto.Field(proto.MESSAGE, number=1, message=gd_query.QueryResultBatch,) - query = proto.Field(proto.MESSAGE, number=2, message=gd_query.Query,) + batch = proto.Field( + proto.MESSAGE, + number=1, + message=gd_query.QueryResultBatch, + ) + query = proto.Field( + proto.MESSAGE, + number=2, + message=gd_query.Query, + ) class BeginTransactionRequest(proto.Message): @@ -162,9 +208,14 @@ class BeginTransactionRequest(proto.Message): Options for a new transaction. """ - project_id = proto.Field(proto.STRING, number=8,) + project_id = proto.Field( + proto.STRING, + number=8, + ) transaction_options = proto.Field( - proto.MESSAGE, number=10, message="TransactionOptions", + proto.MESSAGE, + number=10, + message="TransactionOptions", ) @@ -177,7 +228,10 @@ class BeginTransactionResponse(proto.Message): The transaction identifier (always present). """ - transaction = proto.Field(proto.BYTES, number=1,) + transaction = proto.Field( + proto.BYTES, + number=1, + ) class RollbackRequest(proto.Message): @@ -193,8 +247,14 @@ class RollbackRequest(proto.Message): [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. """ - project_id = proto.Field(proto.STRING, number=8,) - transaction = proto.Field(proto.BYTES, number=1,) + project_id = proto.Field( + proto.STRING, + number=8, + ) + transaction = proto.Field( + proto.BYTES, + number=1, + ) class RollbackResponse(proto.Message): @@ -248,10 +308,25 @@ class Mode(proto.Enum): TRANSACTIONAL = 1 NON_TRANSACTIONAL = 2 - project_id = proto.Field(proto.STRING, number=8,) - mode = proto.Field(proto.ENUM, number=5, enum=Mode,) - transaction = proto.Field(proto.BYTES, number=1, oneof="transaction_selector",) - mutations = proto.RepeatedField(proto.MESSAGE, number=6, message="Mutation",) + project_id = proto.Field( + proto.STRING, + number=8, + ) + mode = proto.Field( + proto.ENUM, + number=5, + enum=Mode, + ) + transaction = proto.Field( + proto.BYTES, + number=1, + oneof="transaction_selector", + ) + mutations = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="Mutation", + ) class CommitResponse(proto.Message): @@ -269,9 +344,14 @@ class CommitResponse(proto.Message): """ mutation_results = proto.RepeatedField( - proto.MESSAGE, number=3, message="MutationResult", + proto.MESSAGE, + number=3, + message="MutationResult", + ) + index_updates = proto.Field( + proto.INT32, + number=4, ) - index_updates = proto.Field(proto.INT32, number=4,) class AllocateIdsRequest(proto.Message): @@ -288,8 +368,15 @@ class AllocateIdsRequest(proto.Message): reserved/read-only. """ - project_id = proto.Field(proto.STRING, number=8,) - keys = proto.RepeatedField(proto.MESSAGE, number=1, message=entity.Key,) + project_id = proto.Field( + proto.STRING, + number=8, + ) + keys = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=entity.Key, + ) class AllocateIdsResponse(proto.Message): @@ -303,7 +390,11 @@ class AllocateIdsResponse(proto.Message): with a newly allocated ID. """ - keys = proto.RepeatedField(proto.MESSAGE, number=1, message=entity.Key,) + keys = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=entity.Key, + ) class ReserveIdsRequest(proto.Message): @@ -323,9 +414,19 @@ class ReserveIdsRequest(proto.Message): auto-allocated. """ - project_id = proto.Field(proto.STRING, number=8,) - database_id = proto.Field(proto.STRING, number=9,) - keys = proto.RepeatedField(proto.MESSAGE, number=1, message=entity.Key,) + project_id = proto.Field( + proto.STRING, + number=8, + ) + database_id = proto.Field( + proto.STRING, + number=9, + ) + keys = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=entity.Key, + ) class ReserveIdsResponse(proto.Message): @@ -380,19 +481,33 @@ class Mutation(proto.Message): """ insert = proto.Field( - proto.MESSAGE, number=4, oneof="operation", message=entity.Entity, + proto.MESSAGE, + number=4, + oneof="operation", + message=entity.Entity, ) update = proto.Field( - proto.MESSAGE, number=5, oneof="operation", message=entity.Entity, + proto.MESSAGE, + number=5, + oneof="operation", + message=entity.Entity, ) upsert = proto.Field( - proto.MESSAGE, number=6, oneof="operation", message=entity.Entity, + proto.MESSAGE, + number=6, + oneof="operation", + message=entity.Entity, ) delete = proto.Field( - proto.MESSAGE, number=7, oneof="operation", message=entity.Key, + proto.MESSAGE, + number=7, + oneof="operation", + message=entity.Key, ) base_version = proto.Field( - proto.INT64, number=8, oneof="conflict_detection_strategy", + proto.INT64, + number=8, + oneof="conflict_detection_strategy", ) @@ -418,9 +533,19 @@ class MutationResult(proto.Message): strategy field is not set in the mutation. """ - key = proto.Field(proto.MESSAGE, number=3, message=entity.Key,) - version = proto.Field(proto.INT64, number=4,) - conflict_detected = proto.Field(proto.BOOL, number=5,) + key = proto.Field( + proto.MESSAGE, + number=3, + message=entity.Key, + ) + version = proto.Field( + proto.INT64, + number=4, + ) + conflict_detected = proto.Field( + proto.BOOL, + number=5, + ) class ReadOptions(proto.Message): @@ -454,9 +579,16 @@ class ReadConsistency(proto.Enum): EVENTUAL = 2 read_consistency = proto.Field( - proto.ENUM, number=1, oneof="consistency_type", enum=ReadConsistency, + proto.ENUM, + number=1, + oneof="consistency_type", + enum=ReadConsistency, + ) + transaction = proto.Field( + proto.BYTES, + number=2, + oneof="consistency_type", ) - transaction = proto.Field(proto.BYTES, number=2, oneof="consistency_type",) class TransactionOptions(proto.Message): @@ -496,14 +628,26 @@ class ReadWrite(proto.Message): being retried. """ - previous_transaction = proto.Field(proto.BYTES, number=1,) + previous_transaction = proto.Field( + proto.BYTES, + number=1, + ) class ReadOnly(proto.Message): - r"""Options specific to read-only transactions. - """ + r"""Options specific to read-only transactions.""" - read_write = proto.Field(proto.MESSAGE, number=1, oneof="mode", message=ReadWrite,) - read_only = proto.Field(proto.MESSAGE, number=2, oneof="mode", message=ReadOnly,) + read_write = proto.Field( + proto.MESSAGE, + number=1, + oneof="mode", + message=ReadWrite, + ) + read_only = proto.Field( + proto.MESSAGE, + number=2, + oneof="mode", + message=ReadOnly, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index 62daa0e7564c..a9371a9872b8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -22,7 +22,13 @@ __protobuf__ = proto.module( package="google.datastore.v1", - manifest={"PartitionId", "Key", "ArrayValue", "Value", "Entity",}, + manifest={ + "PartitionId", + "Key", + "ArrayValue", + "Value", + "Entity", + }, ) @@ -56,8 +62,14 @@ class PartitionId(proto.Message): which the entities belong. """ - project_id = proto.Field(proto.STRING, number=2,) - namespace_id = proto.Field(proto.STRING, number=4,) + project_id = proto.Field( + proto.STRING, + number=2, + ) + namespace_id = proto.Field( + proto.STRING, + number=4, + ) class Key(proto.Message): @@ -127,12 +139,31 @@ class PathElement(proto.Message): This field is a member of `oneof`_ ``id_type``. """ - kind = proto.Field(proto.STRING, number=1,) - id = proto.Field(proto.INT64, number=2, oneof="id_type",) - name = proto.Field(proto.STRING, number=3, oneof="id_type",) - - partition_id = proto.Field(proto.MESSAGE, number=1, message="PartitionId",) - path = proto.RepeatedField(proto.MESSAGE, number=2, message=PathElement,) + kind = proto.Field( + proto.STRING, + number=1, + ) + id = proto.Field( + proto.INT64, + number=2, + oneof="id_type", + ) + name = proto.Field( + proto.STRING, + number=3, + oneof="id_type", + ) + + partition_id = proto.Field( + proto.MESSAGE, + number=1, + message="PartitionId", + ) + path = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=PathElement, + ) class ArrayValue(proto.Message): @@ -145,7 +176,11 @@ class ArrayValue(proto.Message): 'exclude_from_indexes'. """ - values = proto.RepeatedField(proto.MESSAGE, number=1, message="Value",) + values = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Value", + ) class Value(proto.Message): @@ -226,28 +261,74 @@ class Value(proto.Message): """ null_value = proto.Field( - proto.ENUM, number=11, oneof="value_type", enum=struct_pb2.NullValue, + proto.ENUM, + number=11, + oneof="value_type", + enum=struct_pb2.NullValue, + ) + boolean_value = proto.Field( + proto.BOOL, + number=1, + oneof="value_type", + ) + integer_value = proto.Field( + proto.INT64, + number=2, + oneof="value_type", + ) + double_value = proto.Field( + proto.DOUBLE, + number=3, + oneof="value_type", ) - boolean_value = proto.Field(proto.BOOL, number=1, oneof="value_type",) - integer_value = proto.Field(proto.INT64, number=2, oneof="value_type",) - double_value = proto.Field(proto.DOUBLE, number=3, oneof="value_type",) timestamp_value = proto.Field( - proto.MESSAGE, number=10, oneof="value_type", message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=10, + oneof="value_type", + message=timestamp_pb2.Timestamp, + ) + key_value = proto.Field( + proto.MESSAGE, + number=5, + oneof="value_type", + message="Key", + ) + string_value = proto.Field( + proto.STRING, + number=17, + oneof="value_type", + ) + blob_value = proto.Field( + proto.BYTES, + number=18, + oneof="value_type", ) - key_value = proto.Field(proto.MESSAGE, number=5, oneof="value_type", message="Key",) - string_value = proto.Field(proto.STRING, number=17, oneof="value_type",) - blob_value = proto.Field(proto.BYTES, number=18, oneof="value_type",) geo_point_value = proto.Field( - proto.MESSAGE, number=8, oneof="value_type", message=latlng_pb2.LatLng, + proto.MESSAGE, + number=8, + oneof="value_type", + message=latlng_pb2.LatLng, ) entity_value = proto.Field( - proto.MESSAGE, number=6, oneof="value_type", message="Entity", + proto.MESSAGE, + number=6, + oneof="value_type", + message="Entity", ) array_value = proto.Field( - proto.MESSAGE, number=9, oneof="value_type", message="ArrayValue", + proto.MESSAGE, + number=9, + oneof="value_type", + message="ArrayValue", + ) + meaning = proto.Field( + proto.INT32, + number=14, + ) + exclude_from_indexes = proto.Field( + proto.BOOL, + number=19, ) - meaning = proto.Field(proto.INT32, number=14,) - exclude_from_indexes = proto.Field(proto.BOOL, number=19,) class Entity(proto.Message): @@ -273,8 +354,17 @@ class Entity(proto.Message): characters. The name cannot be ``""``. """ - key = proto.Field(proto.MESSAGE, number=1, message="Key",) - properties = proto.MapField(proto.STRING, proto.MESSAGE, number=3, message="Value",) + key = proto.Field( + proto.MESSAGE, + number=1, + message="Key", + ) + properties = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=3, + message="Value", + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index 920d39b758b1..7c368c57a784 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -75,9 +75,19 @@ class ResultType(proto.Enum): PROJECTION = 2 KEY_ONLY = 3 - entity = proto.Field(proto.MESSAGE, number=1, message=gd_entity.Entity,) - version = proto.Field(proto.INT64, number=4,) - cursor = proto.Field(proto.BYTES, number=3,) + entity = proto.Field( + proto.MESSAGE, + number=1, + message=gd_entity.Entity, + ) + version = proto.Field( + proto.INT64, + number=4, + ) + cursor = proto.Field( + proto.BYTES, + number=3, + ) class Query(proto.Message): @@ -122,17 +132,48 @@ class Query(proto.Message): Must be >= 0 if specified. """ - projection = proto.RepeatedField(proto.MESSAGE, number=2, message="Projection",) - kind = proto.RepeatedField(proto.MESSAGE, number=3, message="KindExpression",) - filter = proto.Field(proto.MESSAGE, number=4, message="Filter",) - order = proto.RepeatedField(proto.MESSAGE, number=5, message="PropertyOrder",) + projection = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Projection", + ) + kind = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="KindExpression", + ) + filter = proto.Field( + proto.MESSAGE, + number=4, + message="Filter", + ) + order = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="PropertyOrder", + ) distinct_on = proto.RepeatedField( - proto.MESSAGE, number=6, message="PropertyReference", + proto.MESSAGE, + number=6, + message="PropertyReference", + ) + start_cursor = proto.Field( + proto.BYTES, + number=7, + ) + end_cursor = proto.Field( + proto.BYTES, + number=8, + ) + offset = proto.Field( + proto.INT32, + number=10, + ) + limit = proto.Field( + proto.MESSAGE, + number=12, + message=wrappers_pb2.Int32Value, ) - start_cursor = proto.Field(proto.BYTES, number=7,) - end_cursor = proto.Field(proto.BYTES, number=8,) - offset = proto.Field(proto.INT32, number=10,) - limit = proto.Field(proto.MESSAGE, number=12, message=wrappers_pb2.Int32Value,) class KindExpression(proto.Message): @@ -143,7 +184,10 @@ class KindExpression(proto.Message): The name of the kind. """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class PropertyReference(proto.Message): @@ -156,7 +200,10 @@ class PropertyReference(proto.Message): a property name path. """ - name = proto.Field(proto.STRING, number=2,) + name = proto.Field( + proto.STRING, + number=2, + ) class Projection(proto.Message): @@ -167,7 +214,11 @@ class Projection(proto.Message): The property to project. """ - property = proto.Field(proto.MESSAGE, number=1, message="PropertyReference",) + property = proto.Field( + proto.MESSAGE, + number=1, + message="PropertyReference", + ) class PropertyOrder(proto.Message): @@ -186,8 +237,16 @@ class Direction(proto.Enum): ASCENDING = 1 DESCENDING = 2 - property = proto.Field(proto.MESSAGE, number=1, message="PropertyReference",) - direction = proto.Field(proto.ENUM, number=2, enum=Direction,) + property = proto.Field( + proto.MESSAGE, + number=1, + message="PropertyReference", + ) + direction = proto.Field( + proto.ENUM, + number=2, + enum=Direction, + ) class Filter(proto.Message): @@ -212,10 +271,16 @@ class Filter(proto.Message): """ composite_filter = proto.Field( - proto.MESSAGE, number=1, oneof="filter_type", message="CompositeFilter", + proto.MESSAGE, + number=1, + oneof="filter_type", + message="CompositeFilter", ) property_filter = proto.Field( - proto.MESSAGE, number=2, oneof="filter_type", message="PropertyFilter", + proto.MESSAGE, + number=2, + oneof="filter_type", + message="PropertyFilter", ) @@ -236,8 +301,16 @@ class Operator(proto.Enum): OPERATOR_UNSPECIFIED = 0 AND = 1 - op = proto.Field(proto.ENUM, number=1, enum=Operator,) - filters = proto.RepeatedField(proto.MESSAGE, number=2, message="Filter",) + op = proto.Field( + proto.ENUM, + number=1, + enum=Operator, + ) + filters = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Filter", + ) class PropertyFilter(proto.Message): @@ -265,9 +338,21 @@ class Operator(proto.Enum): HAS_ANCESTOR = 11 NOT_IN = 13 - property = proto.Field(proto.MESSAGE, number=1, message="PropertyReference",) - op = proto.Field(proto.ENUM, number=2, enum=Operator,) - value = proto.Field(proto.MESSAGE, number=3, message=gd_entity.Value,) + property = proto.Field( + proto.MESSAGE, + number=1, + message="PropertyReference", + ) + op = proto.Field( + proto.ENUM, + number=2, + enum=Operator, + ) + value = proto.Field( + proto.MESSAGE, + number=3, + message=gd_entity.Value, + ) class GqlQuery(proto.Message): @@ -300,13 +385,24 @@ class GqlQuery(proto.Message): true. """ - query_string = proto.Field(proto.STRING, number=1,) - allow_literals = proto.Field(proto.BOOL, number=2,) + query_string = proto.Field( + proto.STRING, + number=1, + ) + allow_literals = proto.Field( + proto.BOOL, + number=2, + ) named_bindings = proto.MapField( - proto.STRING, proto.MESSAGE, number=5, message="GqlQueryParameter", + proto.STRING, + proto.MESSAGE, + number=5, + message="GqlQueryParameter", ) positional_bindings = proto.RepeatedField( - proto.MESSAGE, number=4, message="GqlQueryParameter", + proto.MESSAGE, + number=4, + message="GqlQueryParameter", ) @@ -333,9 +429,16 @@ class GqlQueryParameter(proto.Message): """ value = proto.Field( - proto.MESSAGE, number=2, oneof="parameter_type", message=gd_entity.Value, + proto.MESSAGE, + number=2, + oneof="parameter_type", + message=gd_entity.Value, + ) + cursor = proto.Field( + proto.BYTES, + number=3, + oneof="parameter_type", ) - cursor = proto.Field(proto.BYTES, number=3, oneof="parameter_type",) class QueryResultBatch(proto.Message): @@ -380,17 +483,37 @@ class MoreResultsType(proto.Enum): MORE_RESULTS_AFTER_CURSOR = 4 NO_MORE_RESULTS = 3 - skipped_results = proto.Field(proto.INT32, number=6,) - skipped_cursor = proto.Field(proto.BYTES, number=3,) + skipped_results = proto.Field( + proto.INT32, + number=6, + ) + skipped_cursor = proto.Field( + proto.BYTES, + number=3, + ) entity_result_type = proto.Field( - proto.ENUM, number=1, enum="EntityResult.ResultType", + proto.ENUM, + number=1, + enum="EntityResult.ResultType", ) entity_results = proto.RepeatedField( - proto.MESSAGE, number=2, message="EntityResult", + proto.MESSAGE, + number=2, + message="EntityResult", + ) + end_cursor = proto.Field( + proto.BYTES, + number=4, + ) + more_results = proto.Field( + proto.ENUM, + number=5, + enum=MoreResultsType, + ) + snapshot_version = proto.Field( + proto.INT64, + number=7, ) - end_cursor = proto.Field(proto.BYTES, number=4,) - more_results = proto.Field(proto.ENUM, number=5, enum=MoreResultsType,) - snapshot_version = proto.Field(proto.INT64, number=7,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index c8cc807033a1..b8b09a169b36 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -24,7 +24,7 @@ import nox -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" @@ -59,7 +59,9 @@ def lint(session): """ session.install("flake8", BLACK_VERSION) session.run( - "black", "--check", *BLACK_PATHS, + "black", + "--check", + *BLACK_PATHS, ) session.run("flake8", "google", "tests") @@ -69,7 +71,8 @@ def blacken(session): """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( - "black", *BLACK_PATHS, + "black", + *BLACK_PATHS, ) diff --git a/packages/google-cloud-datastore/tests/system/conftest.py b/packages/google-cloud-datastore/tests/system/conftest.py index 61f8c1f02309..b0547f83396a 100644 --- a/packages/google-cloud-datastore/tests/system/conftest.py +++ b/packages/google-cloud-datastore/tests/system/conftest.py @@ -34,7 +34,9 @@ def datastore_client(test_namespace): if _helpers.EMULATOR_DATASET is not None: http = requests.Session() # Un-authorized. return datastore.Client( - project=_helpers.EMULATOR_DATASET, namespace=test_namespace, _http=http, + project=_helpers.EMULATOR_DATASET, + namespace=test_namespace, + _http=http, ) else: return datastore.Client(namespace=test_namespace) diff --git a/packages/google-cloud-datastore/tests/system/test_allocate_reserve_ids.py b/packages/google-cloud-datastore/tests/system/test_allocate_reserve_ids.py index 8c40538f0bac..f934d067a769 100644 --- a/packages/google-cloud-datastore/tests/system/test_allocate_reserve_ids.py +++ b/packages/google-cloud-datastore/tests/system/test_allocate_reserve_ids.py @@ -18,7 +18,8 @@ def test_client_allocate_ids(datastore_client): num_ids = 10 allocated_keys = datastore_client.allocate_ids( - datastore_client.key("Kind"), num_ids, + datastore_client.key("Kind"), + num_ids, ) assert len(allocated_keys) == num_ids diff --git a/packages/google-cloud-datastore/tests/system/test_put.py b/packages/google-cloud-datastore/tests/system/test_put.py index 5e884cf3151a..2f8de3a06295 100644 --- a/packages/google-cloud-datastore/tests/system/test_put.py +++ b/packages/google-cloud-datastore/tests/system/test_put.py @@ -29,10 +29,10 @@ def parent_key(datastore_client): def _get_post(datastore_client, id_or_name=None, post_content=None): post_content = post_content or { - "title": u"How to make the perfect pizza in your grill", - "tags": [u"pizza", u"grill"], + "title": "How to make the perfect pizza in your grill", + "tags": ["pizza", "grill"], "publishedAt": datetime.datetime(2001, 1, 1, tzinfo=UTC), - "author": u"Silvano", + "author": "Silvano", "isDraft": False, "wordCount": 400, "rating": 5.0, @@ -77,15 +77,18 @@ def test_client_put_w_multiple_in_txn(datastore_client, entities_to_delete): entities_to_delete.append(entity1) second_post_content = { - "title": u"How to make the perfect homemade pasta", - "tags": [u"pasta", u"homemade"], + "title": "How to make the perfect homemade pasta", + "tags": ["pasta", "homemade"], "publishedAt": datetime.datetime(2001, 1, 1), - "author": u"Silvano", + "author": "Silvano", "isDraft": False, "wordCount": 450, "rating": 4.5, } - entity2 = _get_post(datastore_client, post_content=second_post_content,) + entity2 = _get_post( + datastore_client, + post_content=second_post_content, + ) xact.put(entity2) # Register entity to be deleted. entities_to_delete.append(entity2) @@ -111,7 +114,7 @@ def test_client_put_w_all_value_types(datastore_client, entities_to_delete): entity["truthy"] = True entity["float"] = 2.718281828 entity["int"] = 3735928559 - entity["words"] = u"foo" + entity["words"] = "foo" entity["blob"] = b"seekretz" entity_stored = datastore.Entity(key=key_stored) entity_stored["hi"] = "bye" @@ -133,7 +136,7 @@ def test_client_put_w_entity_w_self_reference(datastore_client, entities_to_dele parent_key = datastore_client.key("Residence", "NewYork") key = datastore_client.key("Person", "name", parent=parent_key) entity = datastore.Entity(key=key) - entity["fullName"] = u"Full name" + entity["fullName"] = "Full name" entity["linkedTo"] = key # Self reference. datastore_client.put(entity) diff --git a/packages/google-cloud-datastore/tests/system/test_query.py b/packages/google-cloud-datastore/tests/system/test_query.py index c5921bc910f7..499bc507a5a3 100644 --- a/packages/google-cloud-datastore/tests/system/test_query.py +++ b/packages/google-cloud-datastore/tests/system/test_query.py @@ -301,7 +301,8 @@ def test_query_distinct_on(ancestor_query): @pytest.fixture(scope="session") def large_query_client(datastore_client): large_query_client = _helpers.clone_client( - datastore_client, namespace=populate_datastore.LARGE_CHARACTER_NAMESPACE, + datastore_client, + namespace=populate_datastore.LARGE_CHARACTER_NAMESPACE, ) # Populate the datastore if necessary. populate_datastore.add_large_character_entities(client=large_query_client) @@ -322,11 +323,23 @@ def large_query(large_query_client): "limit,offset,expected", [ # with no offset there are the correct # of results - (None, None, populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS,), + ( + None, + None, + populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS, + ), # with no limit there are results (offset provided) - (None, 900, populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS - 900,), + ( + None, + 900, + populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS - 900, + ), # Offset beyond items larger: verify 200 items found - (200, 1100, 200,), + ( + 200, + 1100, + 200, + ), # offset within range, expect 50 despite larger limit") (100, populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS - 50, 50), # Offset beyond items larger Verify no items found") diff --git a/packages/google-cloud-datastore/tests/system/test_transaction.py b/packages/google-cloud-datastore/tests/system/test_transaction.py index d27bc43949d0..b380561f45c7 100644 --- a/packages/google-cloud-datastore/tests/system/test_transaction.py +++ b/packages/google-cloud-datastore/tests/system/test_transaction.py @@ -23,7 +23,7 @@ def test_transaction_via_with_statement(datastore_client, entities_to_delete): key = datastore_client.key("Company", "Google") entity = datastore.Entity(key=key) - entity["url"] = u"www.google.com" + entity["url"] = "www.google.com" with datastore_client.transaction() as xact: result = datastore_client.get(entity.key) @@ -39,7 +39,8 @@ def test_transaction_via_with_statement(datastore_client, entities_to_delete): def test_transaction_via_explicit_begin_get_commit( - datastore_client, entities_to_delete, + datastore_client, + entities_to_delete, ): # See # github.com/GoogleCloudPlatform/google-cloud-python/issues/1859 @@ -87,7 +88,7 @@ def test_failure_with_contention(datastore_client, entities_to_delete): # and updated outside it with a contentious value. key = local_client.key("BreakTxn", 1234) orig_entity = datastore.Entity(key=key) - orig_entity["foo"] = u"bar" + orig_entity["foo"] = "bar" local_client.put(orig_entity) entities_to_delete.append(orig_entity) @@ -97,10 +98,10 @@ def test_failure_with_contention(datastore_client, entities_to_delete): entity_in_txn = local_client.get(key) # Update the original entity outside the transaction. - orig_entity[contention_prop_name] = u"outside" + orig_entity[contention_prop_name] = "outside" datastore_client.put(orig_entity) # Try to update the entity which we already updated outside the # transaction. - entity_in_txn[contention_prop_name] = u"inside" + entity_in_txn[contention_prop_name] = "inside" txn.put(entity_in_txn) diff --git a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py index 52f453f65b85..473950708e49 100644 --- a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py @@ -40,19 +40,19 @@ EDDARD + ("Character", "Jon Snow"), ) CHARACTERS = ( - {"name": u"Rickard", "family": u"Stark", "appearances": 0, "alive": False}, - {"name": u"Eddard", "family": u"Stark", "appearances": 9, "alive": False}, + {"name": "Rickard", "family": "Stark", "appearances": 0, "alive": False}, + {"name": "Eddard", "family": "Stark", "appearances": 9, "alive": False}, { - "name": u"Catelyn", - "family": [u"Stark", u"Tully"], + "name": "Catelyn", + "family": ["Stark", "Tully"], "appearances": 26, "alive": False, }, - {"name": u"Arya", "family": u"Stark", "appearances": 33, "alive": True}, - {"name": u"Sansa", "family": u"Stark", "appearances": 31, "alive": True}, - {"name": u"Robb", "family": u"Stark", "appearances": 22, "alive": False}, - {"name": u"Bran", "family": u"Stark", "appearances": 25, "alive": True}, - {"name": u"Jon Snow", "family": u"Stark", "appearances": 32, "alive": True}, + {"name": "Arya", "family": "Stark", "appearances": 33, "alive": True}, + {"name": "Sansa", "family": "Stark", "appearances": 31, "alive": True}, + {"name": "Robb", "family": "Stark", "appearances": 22, "alive": False}, + {"name": "Bran", "family": "Stark", "appearances": 25, "alive": True}, + {"name": "Jon Snow", "family": "Stark", "appearances": 32, "alive": True}, ) LARGE_CHARACTER_TOTAL_OBJECTS = 2500 LARGE_CHARACTER_NAMESPACE = "LargeCharacterEntity" diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index e6ed5508d9ff..7e678103e2db 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -95,7 +95,11 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [DatastoreAdminClient, DatastoreAdminAsyncClient,] + "client_class", + [ + DatastoreAdminClient, + DatastoreAdminAsyncClient, + ], ) def test_datastore_admin_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() @@ -137,7 +141,11 @@ def test_datastore_admin_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [DatastoreAdminClient, DatastoreAdminAsyncClient,] + "client_class", + [ + DatastoreAdminClient, + DatastoreAdminAsyncClient, + ], ) def test_datastore_admin_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -501,7 +509,9 @@ def test_datastore_admin_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -644,10 +654,17 @@ def test_datastore_admin_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [datastore_admin.ExportEntitiesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.ExportEntitiesRequest, + dict, + ], +) def test_export_entities(request_type, transport: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -673,7 +690,8 @@ def test_export_entities_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -689,7 +707,8 @@ async def test_export_entities_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ExportEntitiesRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -719,7 +738,9 @@ async def test_export_entities_async_from_dict(): def test_export_entities_flattened(): - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_entities), "__call__") as call: @@ -753,7 +774,9 @@ def test_export_entities_flattened(): def test_export_entities_flattened_error(): - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -826,10 +849,17 @@ async def test_export_entities_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datastore_admin.ImportEntitiesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.ImportEntitiesRequest, + dict, + ], +) def test_import_entities(request_type, transport: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -855,7 +885,8 @@ def test_import_entities_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -871,7 +902,8 @@ async def test_import_entities_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ImportEntitiesRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -901,7 +933,9 @@ async def test_import_entities_async_from_dict(): def test_import_entities_flattened(): - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.import_entities), "__call__") as call: @@ -935,7 +969,9 @@ def test_import_entities_flattened(): def test_import_entities_flattened_error(): - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1008,10 +1044,17 @@ async def test_import_entities_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datastore_admin.CreateIndexRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.CreateIndexRequest, + dict, + ], +) def test_create_index(request_type, transport: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1037,7 +1080,8 @@ def test_create_index_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1053,7 +1097,8 @@ async def test_create_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.CreateIndexRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1082,10 +1127,17 @@ async def test_create_index_async_from_dict(): await test_create_index_async(request_type=dict) -@pytest.mark.parametrize("request_type", [datastore_admin.DeleteIndexRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.DeleteIndexRequest, + dict, + ], +) def test_delete_index(request_type, transport: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1111,7 +1163,8 @@ def test_delete_index_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1127,7 +1180,8 @@ async def test_delete_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.DeleteIndexRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1156,10 +1210,17 @@ async def test_delete_index_async_from_dict(): await test_delete_index_async(request_type=dict) -@pytest.mark.parametrize("request_type", [datastore_admin.GetIndexRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.GetIndexRequest, + dict, + ], +) def test_get_index(request_type, transport: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1196,7 +1257,8 @@ def test_get_index_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1212,7 +1274,8 @@ async def test_get_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.GetIndexRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1252,10 +1315,17 @@ async def test_get_index_async_from_dict(): await test_get_index_async(request_type=dict) -@pytest.mark.parametrize("request_type", [datastore_admin.ListIndexesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.ListIndexesRequest, + dict, + ], +) def test_list_indexes(request_type, transport: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1284,7 +1354,8 @@ def test_list_indexes_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1300,7 +1371,8 @@ async def test_list_indexes_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ListIndexesRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1334,7 +1406,8 @@ async def test_list_indexes_async_from_dict(): def test_list_indexes_pager(transport_name: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1342,15 +1415,28 @@ def test_list_indexes_pager(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(), index.Index(),], + indexes=[ + index.Index(), + index.Index(), + index.Index(), + ], next_page_token="abc", ), - datastore_admin.ListIndexesResponse(indexes=[], next_page_token="def",), datastore_admin.ListIndexesResponse( - indexes=[index.Index(),], next_page_token="ghi", + indexes=[], + next_page_token="def", + ), + datastore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + ], + next_page_token="ghi", ), datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(),], + indexes=[ + index.Index(), + index.Index(), + ], ), RuntimeError, ) @@ -1367,7 +1453,8 @@ def test_list_indexes_pager(transport_name: str = "grpc"): def test_list_indexes_pages(transport_name: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1375,15 +1462,28 @@ def test_list_indexes_pages(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(), index.Index(),], + indexes=[ + index.Index(), + index.Index(), + index.Index(), + ], next_page_token="abc", ), - datastore_admin.ListIndexesResponse(indexes=[], next_page_token="def",), datastore_admin.ListIndexesResponse( - indexes=[index.Index(),], next_page_token="ghi", + indexes=[], + next_page_token="def", ), datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(),], + indexes=[ + index.Index(), + ], + next_page_token="ghi", + ), + datastore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + ], ), RuntimeError, ) @@ -1394,7 +1494,9 @@ def test_list_indexes_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_indexes_async_pager(): - client = DatastoreAdminAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1403,19 +1505,34 @@ async def test_list_indexes_async_pager(): # Set the response to a series of pages. call.side_effect = ( datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(), index.Index(),], + indexes=[ + index.Index(), + index.Index(), + index.Index(), + ], next_page_token="abc", ), - datastore_admin.ListIndexesResponse(indexes=[], next_page_token="def",), datastore_admin.ListIndexesResponse( - indexes=[index.Index(),], next_page_token="ghi", + indexes=[], + next_page_token="def", ), datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(),], + indexes=[ + index.Index(), + ], + next_page_token="ghi", + ), + datastore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + ], ), RuntimeError, ) - async_pager = await client.list_indexes(request={},) + async_pager = await client.list_indexes( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1427,7 +1544,9 @@ async def test_list_indexes_async_pager(): @pytest.mark.asyncio async def test_list_indexes_async_pages(): - client = DatastoreAdminAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1436,15 +1555,28 @@ async def test_list_indexes_async_pages(): # Set the response to a series of pages. call.side_effect = ( datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(), index.Index(),], + indexes=[ + index.Index(), + index.Index(), + index.Index(), + ], next_page_token="abc", ), - datastore_admin.ListIndexesResponse(indexes=[], next_page_token="def",), datastore_admin.ListIndexesResponse( - indexes=[index.Index(),], next_page_token="ghi", + indexes=[], + next_page_token="def", ), datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(),], + indexes=[ + index.Index(), + ], + next_page_token="ghi", + ), + datastore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + ], ), RuntimeError, ) @@ -1462,7 +1594,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1482,7 +1615,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = DatastoreAdminClient(client_options=options, transport=transport,) + client = DatastoreAdminClient( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -1498,7 +1634,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DatastoreAdminClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -1543,8 +1680,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.DatastoreAdminGrpcTransport,) + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DatastoreAdminGrpcTransport, + ) def test_datastore_admin_base_transport_error(): @@ -1599,7 +1741,8 @@ def test_datastore_admin_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DatastoreAdminTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1769,7 +1912,8 @@ def test_datastore_admin_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.DatastoreAdminGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1781,7 +1925,8 @@ def test_datastore_admin_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.DatastoreAdminGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1890,12 +2035,16 @@ def test_datastore_admin_transport_channel_mtls_with_adc(transport_class): def test_datastore_admin_grpc_lro_client(): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -1903,12 +2052,16 @@ def test_datastore_admin_grpc_lro_client(): def test_datastore_admin_grpc_lro_async_client(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -1936,7 +2089,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = DatastoreAdminClient.common_folder_path(folder) assert expected == actual @@ -1954,7 +2109,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = DatastoreAdminClient.common_organization_path(organization) assert expected == actual @@ -1972,7 +2129,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = DatastoreAdminClient.common_project_path(project) assert expected == actual @@ -1992,7 +2151,8 @@ def test_common_location_path(): project = "winkle" location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = DatastoreAdminClient.common_location_path(project, location) assert expected == actual @@ -2017,7 +2177,8 @@ def test_client_with_default_client_info(): transports.DatastoreAdminTransport, "_prep_wrapped_messages" ) as prep: client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2026,7 +2187,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = DatastoreAdminClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2034,7 +2196,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index fee5a408e71f..445f96fadbb1 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -84,7 +84,13 @@ def test__get_default_mtls_endpoint(): assert DatastoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [DatastoreClient, DatastoreAsyncClient,]) +@pytest.mark.parametrize( + "client_class", + [ + DatastoreClient, + DatastoreAsyncClient, + ], +) def test_datastore_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( @@ -124,7 +130,13 @@ def test_datastore_client_service_account_always_use_jwt( use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class", [DatastoreClient, DatastoreAsyncClient,]) +@pytest.mark.parametrize( + "client_class", + [ + DatastoreClient, + DatastoreAsyncClient, + ], +) def test_datastore_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( @@ -477,7 +489,9 @@ def test_datastore_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -608,10 +622,17 @@ def test_datastore_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [datastore.LookupRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.LookupRequest, + dict, + ], +) def test_lookup(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -637,7 +658,8 @@ def test_lookup_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -653,7 +675,8 @@ async def test_lookup_async( transport: str = "grpc_asyncio", request_type=datastore.LookupRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -683,7 +706,9 @@ async def test_lookup_async_from_dict(): def test_lookup_flattened(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup), "__call__") as call: @@ -723,7 +748,9 @@ def test_lookup_flattened(): def test_lookup_flattened_error(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -744,7 +771,9 @@ def test_lookup_flattened_error(): @pytest.mark.asyncio async def test_lookup_flattened_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup), "__call__") as call: @@ -789,7 +818,9 @@ async def test_lookup_flattened_async(): @pytest.mark.asyncio async def test_lookup_flattened_error_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -808,10 +839,17 @@ async def test_lookup_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datastore.RunQueryRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.RunQueryRequest, + dict, + ], +) def test_run_query(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -837,7 +875,8 @@ def test_run_query_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -853,7 +892,8 @@ async def test_run_query_async( transport: str = "grpc_asyncio", request_type=datastore.RunQueryRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -882,10 +922,17 @@ async def test_run_query_async_from_dict(): await test_run_query_async(request_type=dict) -@pytest.mark.parametrize("request_type", [datastore.BeginTransactionRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.BeginTransactionRequest, + dict, + ], +) def test_begin_transaction(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -916,7 +963,8 @@ def test_begin_transaction_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -934,7 +982,8 @@ async def test_begin_transaction_async( transport: str = "grpc_asyncio", request_type=datastore.BeginTransactionRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -947,7 +996,9 @@ async def test_begin_transaction_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.BeginTransactionResponse(transaction=b"transaction_blob",) + datastore.BeginTransactionResponse( + transaction=b"transaction_blob", + ) ) response = await client.begin_transaction(request) @@ -967,7 +1018,9 @@ async def test_begin_transaction_async_from_dict(): def test_begin_transaction_flattened(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -977,7 +1030,9 @@ def test_begin_transaction_flattened(): call.return_value = datastore.BeginTransactionResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.begin_transaction(project_id="project_id_value",) + client.begin_transaction( + project_id="project_id_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -989,19 +1044,24 @@ def test_begin_transaction_flattened(): def test_begin_transaction_flattened_error(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.begin_transaction( - datastore.BeginTransactionRequest(), project_id="project_id_value", + datastore.BeginTransactionRequest(), + project_id="project_id_value", ) @pytest.mark.asyncio async def test_begin_transaction_flattened_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1015,7 +1075,9 @@ async def test_begin_transaction_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.begin_transaction(project_id="project_id_value",) + response = await client.begin_transaction( + project_id="project_id_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1028,20 +1090,30 @@ async def test_begin_transaction_flattened_async(): @pytest.mark.asyncio async def test_begin_transaction_flattened_error_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.begin_transaction( - datastore.BeginTransactionRequest(), project_id="project_id_value", + datastore.BeginTransactionRequest(), + project_id="project_id_value", ) -@pytest.mark.parametrize("request_type", [datastore.CommitRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.CommitRequest, + dict, + ], +) def test_commit(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1051,7 +1123,9 @@ def test_commit(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = datastore.CommitResponse(index_updates=1389,) + call.return_value = datastore.CommitResponse( + index_updates=1389, + ) response = client.commit(request) # Establish that the underlying gRPC stub method was called. @@ -1068,7 +1142,8 @@ def test_commit_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1084,7 +1159,8 @@ async def test_commit_async( transport: str = "grpc_asyncio", request_type=datastore.CommitRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1095,7 +1171,9 @@ async def test_commit_async( with mock.patch.object(type(client.transport.commit), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.CommitResponse(index_updates=1389,) + datastore.CommitResponse( + index_updates=1389, + ) ) response = await client.commit(request) @@ -1115,7 +1193,9 @@ async def test_commit_async_from_dict(): def test_commit_flattened(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: @@ -1165,7 +1245,9 @@ def test_commit_flattened(): def test_commit_flattened_error(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1191,7 +1273,9 @@ def test_commit_flattened_error(): @pytest.mark.asyncio async def test_commit_flattened_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: @@ -1246,7 +1330,9 @@ async def test_commit_flattened_async(): @pytest.mark.asyncio async def test_commit_flattened_error_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1270,10 +1356,17 @@ async def test_commit_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datastore.RollbackRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.RollbackRequest, + dict, + ], +) def test_rollback(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1299,7 +1392,8 @@ def test_rollback_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1315,7 +1409,8 @@ async def test_rollback_async( transport: str = "grpc_asyncio", request_type=datastore.RollbackRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1345,7 +1440,9 @@ async def test_rollback_async_from_dict(): def test_rollback_flattened(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback), "__call__") as call: @@ -1354,7 +1451,8 @@ def test_rollback_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.rollback( - project_id="project_id_value", transaction=b"transaction_blob", + project_id="project_id_value", + transaction=b"transaction_blob", ) # Establish that the underlying call was made with the expected @@ -1370,7 +1468,9 @@ def test_rollback_flattened(): def test_rollback_flattened_error(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1384,7 +1484,9 @@ def test_rollback_flattened_error(): @pytest.mark.asyncio async def test_rollback_flattened_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback), "__call__") as call: @@ -1397,7 +1499,8 @@ async def test_rollback_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.rollback( - project_id="project_id_value", transaction=b"transaction_blob", + project_id="project_id_value", + transaction=b"transaction_blob", ) # Establish that the underlying call was made with the expected @@ -1414,7 +1517,9 @@ async def test_rollback_flattened_async(): @pytest.mark.asyncio async def test_rollback_flattened_error_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1426,10 +1531,17 @@ async def test_rollback_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datastore.AllocateIdsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.AllocateIdsRequest, + dict, + ], +) def test_allocate_ids(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1455,7 +1567,8 @@ def test_allocate_ids_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1471,7 +1584,8 @@ async def test_allocate_ids_async( transport: str = "grpc_asyncio", request_type=datastore.AllocateIdsRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1501,7 +1615,9 @@ async def test_allocate_ids_async_from_dict(): def test_allocate_ids_flattened(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: @@ -1533,7 +1649,9 @@ def test_allocate_ids_flattened(): def test_allocate_ids_flattened_error(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1551,7 +1669,9 @@ def test_allocate_ids_flattened_error(): @pytest.mark.asyncio async def test_allocate_ids_flattened_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: @@ -1588,7 +1708,9 @@ async def test_allocate_ids_flattened_async(): @pytest.mark.asyncio async def test_allocate_ids_flattened_error_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1604,10 +1726,17 @@ async def test_allocate_ids_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datastore.ReserveIdsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.ReserveIdsRequest, + dict, + ], +) def test_reserve_ids(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1633,7 +1762,8 @@ def test_reserve_ids_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1649,7 +1779,8 @@ async def test_reserve_ids_async( transport: str = "grpc_asyncio", request_type=datastore.ReserveIdsRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1679,7 +1810,9 @@ async def test_reserve_ids_async_from_dict(): def test_reserve_ids_flattened(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: @@ -1711,7 +1844,9 @@ def test_reserve_ids_flattened(): def test_reserve_ids_flattened_error(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1729,7 +1864,9 @@ def test_reserve_ids_flattened_error(): @pytest.mark.asyncio async def test_reserve_ids_flattened_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: @@ -1766,7 +1903,9 @@ async def test_reserve_ids_flattened_async(): @pytest.mark.asyncio async def test_reserve_ids_flattened_error_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1789,7 +1928,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1809,7 +1949,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = DatastoreClient(client_options=options, transport=transport,) + client = DatastoreClient( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -1825,7 +1968,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DatastoreClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -1855,7 +1999,10 @@ def test_transport_get_channel(): @pytest.mark.parametrize( "transport_class", - [transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport,], + [ + transports.DatastoreGrpcTransport, + transports.DatastoreGrpcAsyncIOTransport, + ], ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -1867,8 +2014,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.DatastoreGrpcTransport,) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DatastoreGrpcTransport, + ) def test_datastore_base_transport_error(): @@ -1919,7 +2071,8 @@ def test_datastore_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DatastoreTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1960,7 +2113,10 @@ def test_datastore_auth_adc(): @pytest.mark.parametrize( "transport_class", - [transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport,], + [ + transports.DatastoreGrpcTransport, + transports.DatastoreGrpcAsyncIOTransport, + ], ) def test_datastore_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use @@ -2083,7 +2239,8 @@ def test_datastore_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.DatastoreGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2095,7 +2252,8 @@ def test_datastore_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.DatastoreGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2216,7 +2374,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = DatastoreClient.common_folder_path(folder) assert expected == actual @@ -2234,7 +2394,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = DatastoreClient.common_organization_path(organization) assert expected == actual @@ -2252,7 +2414,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = DatastoreClient.common_project_path(project) assert expected == actual @@ -2272,7 +2436,8 @@ def test_common_location_path(): project = "winkle" location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = DatastoreClient.common_location_path(project, location) assert expected == actual @@ -2297,7 +2462,8 @@ def test_client_with_default_client_info(): transports.DatastoreTransport, "_prep_wrapped_messages" ) as prep: client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2306,7 +2472,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = DatastoreClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2314,7 +2481,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/packages/google-cloud-datastore/tests/unit/test__gapic.py b/packages/google-cloud-datastore/tests/unit/test__gapic.py index e7f0b6903733..b72a68b56d39 100644 --- a/packages/google-cloud-datastore/tests/unit/test__gapic.py +++ b/packages/google-cloud-datastore/tests/unit/test__gapic.py @@ -48,7 +48,9 @@ def test_live_api(make_chan, mock_transport, mock_klass): mock_transport.assert_called_once_with(channel=mock.sentinel.channel) make_chan.assert_called_once_with( - mock.sentinel.credentials, DEFAULT_USER_AGENT, "datastore.googleapis.com:443", + mock.sentinel.credentials, + DEFAULT_USER_AGENT, + "datastore.googleapis.com:443", ) mock_klass.assert_called_once_with( diff --git a/packages/google-cloud-datastore/tests/unit/test__http.py b/packages/google-cloud-datastore/tests/unit/test__http.py index 67f28ffe0a75..a03397d5098e 100644 --- a/packages/google-cloud-datastore/tests/unit/test__http.py +++ b/packages/google-cloud-datastore/tests/unit/test__http.py @@ -240,7 +240,11 @@ def test_api_ctor(): def _lookup_single_helper( - read_consistency=None, transaction=None, empty=True, retry=None, timeout=None, + read_consistency=None, + transaction=None, + empty=True, + retry=None, + timeout=None, ): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 @@ -295,7 +299,11 @@ def _lookup_single_helper( uri = _build_expected_url(client._base_url, project, "lookup") request = _verify_protobuf_call( - http, uri, datastore_pb2.LookupRequest(), retry=retry, timeout=timeout, + http, + uri, + datastore_pb2.LookupRequest(), + retry=retry, + timeout=timeout, ) if retry is not None: @@ -336,7 +344,11 @@ def test_api_lookup_single_key_hit_w_timeout(): def _lookup_multiple_helper( - found=0, missing=0, deferred=0, retry=None, timeout=None, + found=0, + missing=0, + deferred=0, + retry=None, + timeout=None, ): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 @@ -399,7 +411,11 @@ def _lookup_multiple_helper( uri = _build_expected_url(client._base_url, project, "lookup") request = _verify_protobuf_call( - http, uri, datastore_pb2.LookupRequest(), retry=retry, timeout=timeout, + http, + uri, + datastore_pb2.LookupRequest(), + retry=retry, + timeout=timeout, ) assert list(request.keys) == [key_pb1._pb, key_pb2._pb] assert request.read_options == read_options._pb @@ -499,7 +515,11 @@ def _run_query_helper( uri = _build_expected_url(client._base_url, project, "runQuery") request = _verify_protobuf_call( - http, uri, datastore_pb2.RunQueryRequest(), retry=retry, timeout=timeout, + http, + uri, + datastore_pb2.RunQueryRequest(), + retry=retry, + timeout=timeout, ) assert request.partition_id == partition_id._pb assert request.query == query_pb._pb @@ -615,7 +635,7 @@ def _commit_helper(transaction=None, retry=None, timeout=None): insert = mutation.upsert insert.key.CopyFrom(key_pb._pb) value_pb = _new_value_pb(insert, "foo") - value_pb.string_value = u"Foo" + value_pb.string_value = "Foo" http = _make_requests_session( [_make_response(content=rsp_pb._pb.SerializeToString())] @@ -647,7 +667,11 @@ def _commit_helper(transaction=None, retry=None, timeout=None): uri = _build_expected_url(client._base_url, project, "commit") request = _verify_protobuf_call( - http, uri, rq_class(), retry=retry, timeout=timeout, + http, + uri, + rq_class(), + retry=retry, + timeout=timeout, ) assert list(request.mutations) == [mutation] assert request.mode == mode @@ -709,7 +733,11 @@ def _rollback_helper(retry=None, timeout=None): uri = _build_expected_url(client._base_url, project, "rollback") request = _verify_protobuf_call( - http, uri, datastore_pb2.RollbackRequest(), retry=retry, timeout=timeout, + http, + uri, + datastore_pb2.RollbackRequest(), + retry=retry, + timeout=timeout, ) assert request.transaction == transaction @@ -765,7 +793,11 @@ def _allocate_ids_helper(count=0, retry=None, timeout=None): uri = _build_expected_url(client._base_url, project, "allocateIds") request = _verify_protobuf_call( - http, uri, datastore_pb2.AllocateIdsRequest(), retry=retry, timeout=timeout, + http, + uri, + datastore_pb2.AllocateIdsRequest(), + retry=retry, + timeout=timeout, ) assert len(request.keys) == len(before_key_pbs) for key_before, key_after in zip(before_key_pbs, request.keys): @@ -822,7 +854,11 @@ def _reserve_ids_helper(count=0, retry=None, timeout=None): uri = _build_expected_url(client._base_url, project, "reserveIds") request = _verify_protobuf_call( - http, uri, datastore_pb2.AllocateIdsRequest(), retry=retry, timeout=timeout, + http, + uri, + datastore_pb2.AllocateIdsRequest(), + retry=retry, + timeout=timeout, ) assert len(request.keys) == len(before_key_pbs) for key_before, key_after in zip(before_key_pbs, request.keys): diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index da253deb6517..51cddb6a9fd7 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -130,7 +130,8 @@ def test_client_ctor_w_implicit_inputs(): other = "other" patch1 = mock.patch( - "google.cloud.datastore.client._determine_default_project", return_value=other, + "google.cloud.datastore.client._determine_default_project", + return_value=other, ) creds = _make_credentials() @@ -151,7 +152,9 @@ def test_client_ctor_w_implicit_inputs(): assert client.current_batch is None assert client.current_transaction is None - default.assert_called_once_with(scopes=Client.SCOPE,) + default.assert_called_once_with( + scopes=Client.SCOPE, + ) _determine_default_project.assert_called_once_with(None) @@ -258,7 +261,10 @@ def test_client_base_url_property_w_client_options(): creds = _make_credentials() client_options = {"api_endpoint": "endpoint"} - client = _make_client(credentials=creds, client_options=client_options,) + client = _make_client( + credentials=creds, + client_options=client_options, + ) assert client.base_url == "endpoint" client.base_url = alternate_url @@ -784,7 +790,7 @@ def test_client_put_multi_w_single_empty_entity(): def test_client_put_multi_no_batch_w_partial_key_w_retry_w_timeout(): from google.cloud.datastore_v1.types import datastore as datastore_pb2 - entity = _Entity(foo=u"bar") + entity = _Entity(foo="bar") key = entity.key = _Key(_Key.kind, None) retry = mock.Mock() timeout = 100000 @@ -817,13 +823,13 @@ def test_client_put_multi_no_batch_w_partial_key_w_retry_w_timeout(): assert len(prop_list) == 1 name, value_pb = prop_list[0] assert name == "foo" - assert value_pb.string_value == u"bar" + assert value_pb.string_value == "bar" def test_client_put_multi_existing_batch_w_completed_key(): creds = _make_credentials() client = _make_client(credentials=creds) - entity = _Entity(foo=u"bar") + entity = _Entity(foo="bar") key = entity.key = _Key() with _NoCommitBatch(client) as CURR_BATCH: @@ -837,7 +843,7 @@ def test_client_put_multi_existing_batch_w_completed_key(): assert len(prop_list) == 1 name, value_pb = prop_list[0] assert name == "foo" - assert value_pb.string_value == u"bar" + assert value_pb.string_value == "bar" def test_client_delete(): diff --git a/packages/google-cloud-datastore/tests/unit/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py index 4c1861a2537f..a8477f2de1fa 100644 --- a/packages/google-cloud-datastore/tests/unit/test_helpers.py +++ b/packages/google-cloud-datastore/tests/unit/test_helpers.py @@ -131,7 +131,7 @@ def test_entity_from_protobuf_w_entity_with_meaning(): name = "hello" value_pb = _new_value_pb(entity_pb, name) value_pb.meaning = meaning = 9 - value_pb.string_value = val = u"something" + value_pb.string_value = val = "something" entity = entity_from_protobuf(entity_pb) assert entity.key is None @@ -249,7 +249,7 @@ def test_enity_to_protobf_w_simple_fields(): name1 = "foo" entity[name1] = value1 = 42 name2 = "bar" - entity[name2] = value2 = u"some-string" + entity[name2] = value2 = "some-string" entity_pb = entity_to_protobuf(entity) expected_pb = entity_pb2.Entity() @@ -299,7 +299,7 @@ def test_enity_to_protobf_w_inverts_to_protobuf(): val_pb1.exclude_from_indexes = True # Add a string property. val_pb2 = _new_value_pb(original_pb, "bar") - val_pb2.string_value = u"hello" + val_pb2.string_value = "hello" # Add a nested (entity) property. val_pb3 = _new_value_pb(original_pb, "entity-baz") @@ -386,7 +386,7 @@ def test_enity_to_protobf_w_dict_to_entity(): from google.cloud.datastore.helpers import entity_to_protobuf entity = Entity() - entity["a"] = {"b": u"c"} + entity["a"] = {"b": "c"} entity_pb = entity_to_protobuf(entity) expected_pb = entity_pb2.Entity( @@ -624,9 +624,9 @@ def test__pb_attr_value_w_bytes(): def test__pb_attr_value_w_unicode(): from google.cloud.datastore.helpers import _pb_attr_value - name, value = _pb_attr_value(u"str") + name, value = _pb_attr_value("str") assert name == "string_value" - assert value == u"str" + assert value == "str" def test__pb_attr_value_w_entity(): @@ -758,8 +758,8 @@ def test__get_value_from_value_pb_w_bytes(): def test__get_value_from_value_pb_w_unicode(): from google.cloud.datastore.helpers import _get_value_from_value_pb - value = _make_value_pb("string_value", u"str") - assert _get_value_from_value_pb(value._pb) == u"str" + value = _make_value_pb("string_value", "str") + assert _get_value_from_value_pb(value._pb) == "str" def test__get_value_from_value_pb_w_entity(): @@ -929,9 +929,9 @@ def test__set_protobuf_value_w_unicode(): from google.cloud.datastore.helpers import _set_protobuf_value pb = _make_empty_value_pb() - _set_protobuf_value(pb, u"str") + _set_protobuf_value(pb, "str") value = pb.string_value - assert value == u"str" + assert value == "str" def test__set_protobuf_value_w_entity_empty_wo_key(): @@ -952,7 +952,7 @@ def test__set_protobuf_value_w_entity_w_key(): from google.cloud.datastore.helpers import _set_protobuf_value name = "foo" - value = u"Foo" + value = "Foo" pb = _make_empty_value_pb() key = Key("KIND", 123, project="PROJECT") entity = Entity(key=key) @@ -971,7 +971,7 @@ def test__set_protobuf_value_w_array(): from google.cloud.datastore.helpers import _set_protobuf_value pb = _make_empty_value_pb() - values = [u"a", 0, 3.14] + values = ["a", 0, 3.14] _set_protobuf_value(pb, values) marshalled = pb.array_value.values assert len(marshalled) == len(values) @@ -1009,7 +1009,7 @@ def test__get_meaning_w_single(): value_pb = entity_pb2.Value() value_pb.meaning = meaning = 22 - value_pb.string_value = u"hi" + value_pb.string_value = "hi" result = _get_meaning(value_pb) assert meaning == result @@ -1036,8 +1036,8 @@ def test__get_meaning_w_array_value(): sub_value_pb2 = value_pb._pb.array_value.values.add() sub_value_pb1.meaning = sub_value_pb2.meaning = meaning - sub_value_pb1.string_value = u"hi" - sub_value_pb2.string_value = u"bye" + sub_value_pb1.string_value = "hi" + sub_value_pb2.string_value = "bye" result = _get_meaning(value_pb, is_list=True) assert meaning == result @@ -1055,8 +1055,8 @@ def test__get_meaning_w_array_value_multiple_meanings(): sub_value_pb1.meaning = meaning1 sub_value_pb2.meaning = meaning2 - sub_value_pb1.string_value = u"hi" - sub_value_pb2.string_value = u"bye" + sub_value_pb1.string_value = "hi" + sub_value_pb2.string_value = "bye" result = _get_meaning(value_pb, is_list=True) assert result == [meaning1, meaning2] @@ -1072,8 +1072,8 @@ def test__get_meaning_w_array_value_meaning_partially_unset(): sub_value_pb2 = value_pb._pb.array_value.values.add() sub_value_pb1.meaning = meaning1 - sub_value_pb1.string_value = u"hi" - sub_value_pb2.string_value = u"bye" + sub_value_pb1.string_value = "hi" + sub_value_pb2.string_value = "bye" result = _get_meaning(value_pb, is_list=True) assert result == [meaning1, None] diff --git a/packages/google-cloud-datastore/tests/unit/test_key.py b/packages/google-cloud-datastore/tests/unit/test_key.py index 2d2a88e76035..575601f0b4f3 100644 --- a/packages/google-cloud-datastore/tests/unit/test_key.py +++ b/packages/google-cloud-datastore/tests/unit/test_key.py @@ -72,7 +72,10 @@ def test_key_ctor_parent(): {"kind": _CHILD_KIND, "id": _CHILD_ID}, ] parent_key = _make_key( - _PARENT_KIND, _PARENT_ID, project=_PARENT_PROJECT, namespace=_PARENT_NAMESPACE, + _PARENT_KIND, + _PARENT_ID, + project=_PARENT_PROJECT, + namespace=_PARENT_NAMESPACE, ) key = _make_key(_CHILD_KIND, _CHILD_ID, parent=parent_key) assert key.project == parent_key.project @@ -97,7 +100,11 @@ def test_key_ctor_parent_bad_namespace(): parent_key = _make_key("KIND", 1234, namespace="FOO", project=_DEFAULT_PROJECT) with pytest.raises(ValueError): _make_key( - "KIND2", 1234, namespace="BAR", parent=parent_key, PROJECT=_DEFAULT_PROJECT, + "KIND2", + 1234, + namespace="BAR", + parent=parent_key, + PROJECT=_DEFAULT_PROJECT, ) @@ -585,7 +592,7 @@ def test__cliean_app_w_dev_server(): def test__get_empty_w_unset(): from google.cloud.datastore.key import _get_empty - for empty_value in (u"", 0, 0.0, []): + for empty_value in ("", 0, 0.0, []): ret_val = _get_empty(empty_value, empty_value) assert ret_val is None @@ -593,7 +600,7 @@ def test__get_empty_w_unset(): def test__get_empty_w_actually_set(): from google.cloud.datastore.key import _get_empty - value_pairs = ((u"hello", u""), (10, 0), (3.14, 0.0), (["stuff", "here"], [])) + value_pairs = (("hello", ""), (10, 0), (3.14, 0.0), (["stuff", "here"], [])) for value, empty_value in value_pairs: ret_val = _get_empty(value, empty_value) assert ret_val is value @@ -602,7 +609,7 @@ def test__get_empty_w_actually_set(): def test__check_database_id_w_empty_value(): from google.cloud.datastore.key import _check_database_id - ret_val = _check_database_id(u"") + ret_val = _check_database_id("") # Really we are just happy there was no exception. assert ret_val is None @@ -611,7 +618,7 @@ def test__check_database_id_w_failure(): from google.cloud.datastore.key import _check_database_id with pytest.raises(ValueError): - _check_database_id(u"some-database-id") + _check_database_id("some-database-id") def test__add_id_or_name_add_id(): diff --git a/packages/google-cloud-datastore/tests/unit/test_transaction.py b/packages/google-cloud-datastore/tests/unit/test_transaction.py index 648ae7e450fe..3e78a6a31ac3 100644 --- a/packages/google-cloud-datastore/tests/unit/test_transaction.py +++ b/packages/google-cloud-datastore/tests/unit/test_transaction.py @@ -144,7 +144,9 @@ def test_transaction_begin_w_retry_w_timeout(): expected_request = _make_begin_request(project) ds_api.begin_transaction.assert_called_once_with( - request=expected_request, retry=retry, timeout=timeout, + request=expected_request, + retry=retry, + timeout=timeout, ) From 483d1d418d2ae3de3629c8489931e28ee791e73b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 30 Mar 2022 16:48:25 +0000 Subject: [PATCH 406/611] chore(python): add E231 to .flake8 ignore list (#291) Source-Link: https://github.com/googleapis/synthtool/commit/7ff4aad2ec5af0380e8bd6da1fa06eaadf24ec81 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 --- packages/google-cloud-datastore/.flake8 | 2 +- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/.flake8 b/packages/google-cloud-datastore/.flake8 index 29227d4cf419..2e438749863d 100644 --- a/packages/google-cloud-datastore/.flake8 +++ b/packages/google-cloud-datastore/.flake8 @@ -16,7 +16,7 @@ # Generated by synthtool. DO NOT EDIT! [flake8] -ignore = E203, E266, E501, W503 +ignore = E203, E231, E266, E501, W503 exclude = # Exclude generated code. **/proto/** diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 87dd00611576..9e0a9356b6eb 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe + digest: sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 From a073d2e796604f5c13cadfb7ea8d48660351ed51 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Apr 2022 00:22:25 +0000 Subject: [PATCH 407/611] chore(python): update .pre-commit-config.yaml to use black==22.3.0 (#292) Source-Link: https://github.com/googleapis/synthtool/commit/7804ade3daae0d66649bee8df6c55484c6580b8d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 3 ++- packages/google-cloud-datastore/.pre-commit-config.yaml | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 9e0a9356b6eb..22cc254afa2c 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 + digest: sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d +# created: 2022-03-30T23:44:26.560599165Z diff --git a/packages/google-cloud-datastore/.pre-commit-config.yaml b/packages/google-cloud-datastore/.pre-commit-config.yaml index 62eb5a77d9a3..46d237160f6d 100644 --- a/packages/google-cloud-datastore/.pre-commit-config.yaml +++ b/packages/google-cloud-datastore/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 19.10b0 + rev: 22.3.0 hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 From 122888aca504865006da633c94e62188c3ace2e5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Apr 2022 02:10:30 +0000 Subject: [PATCH 408/611] chore(python): Enable size-label bot (#293) Source-Link: https://github.com/googleapis/synthtool/commit/06e82790dd719a165ad32b8a06f8f6ec3e3cae0f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-datastore/.github/auto-label.yaml | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 packages/google-cloud-datastore/.github/auto-label.yaml diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 22cc254afa2c..58a0b153bf0e 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d -# created: 2022-03-30T23:44:26.560599165Z + digest: sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce +# created: 2022-04-01T01:42:03.609279246Z diff --git a/packages/google-cloud-datastore/.github/auto-label.yaml b/packages/google-cloud-datastore/.github/auto-label.yaml new file mode 100644 index 000000000000..09c8d735b456 --- /dev/null +++ b/packages/google-cloud-datastore/.github/auto-label.yaml @@ -0,0 +1,2 @@ +requestsize: + enabled: true From eeceb759eaa56c142f11109e9853667061b9b092 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Apr 2022 19:40:21 +0000 Subject: [PATCH 409/611] chore(python): refactor unit / system test dependency install (#294) Source-Link: https://github.com/googleapis/synthtool/commit/993985f0fc4b37152e588f0549bcbdaf34666023 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd --- .../.github/.OwlBot.lock.yaml | 4 +- packages/google-cloud-datastore/noxfile.py | 105 ++++++++++++++---- 2 files changed, 87 insertions(+), 22 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 58a0b153bf0e..fa5762290c5b 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce -# created: 2022-04-01T01:42:03.609279246Z + digest: sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd +# created: 2022-04-01T15:48:07.524222836Z diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index b8b09a169b36..975a93e36baa 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -20,16 +20,40 @@ import os import pathlib import shutil +import warnings import nox - BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] + UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -93,23 +117,41 @@ def lint_setup_py(session): session.run("python", "setup.py", "check", "--restructuredtext", "--strict") +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + def default(session): # Install all test dependencies, then install this package in-place. constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install( - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", - "-c", - constraints_path, - ) - - session.install("-e", ".", "-c", constraints_path) + install_unittest_dependencies(session, "-c", constraints_path) # Run py.test against the unit tests. session.run( @@ -133,6 +175,35 @@ def unit(session): default(session) +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) @nox.parametrize("disable_grpc", [False, True]) def system(session, disable_grpc): @@ -156,13 +227,7 @@ def system(session, disable_grpc): if not system_test_exists and not system_test_folder_exists: session.skip("System tests were not found") - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) - session.install("-e", ".", "-c", constraints_path) + install_systemtest_dependencies(session, "-c", constraints_path) env = {} if disable_grpc: From 13116e39bf27af05628d996dc1c66c9800cb5de0 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 4 Apr 2022 12:37:53 -0400 Subject: [PATCH 410/611] chore: allow releases on previous major versions (#295) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: allow releases on previous major versions * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../google-cloud-datastore/.github/release-please.yml | 9 +++++++++ packages/google-cloud-datastore/owlbot.py | 2 ++ 2 files changed, 11 insertions(+) diff --git a/packages/google-cloud-datastore/.github/release-please.yml b/packages/google-cloud-datastore/.github/release-please.yml index 466597e5b196..29601ad4692c 100644 --- a/packages/google-cloud-datastore/.github/release-please.yml +++ b/packages/google-cloud-datastore/.github/release-please.yml @@ -1,2 +1,11 @@ releaseType: python handleGHRelease: true +# NOTE: this section is generated by synthtool.languages.python +# See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py +branches: +- branch: v1 + handleGHRelease: true + releaseType: python +- branch: v0 + handleGHRelease: true + releaseType: python diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index fbf8c1312364..63214a43745e 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -107,6 +107,8 @@ def get_staging_dirs( python.py_samples(skip_readmes=True) +python.configure_previous_major_version_branches() + # Preserve system tests w/ GOOGLE_DISABLE_GRPC set (#133, PR #136) assert 1 == s.replace( "noxfile.py", From 5e13ef1e73f5aaa8a9884f54346195fcddbee481 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 6 Apr 2022 06:59:02 -0400 Subject: [PATCH 411/611] chore(python): add license header to auto-label.yaml (#297) Source-Link: https://github.com/googleapis/synthtool/commit/eb78c980b52c7c6746d2edb77d9cf7aaa99a2aab Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 ++-- .../google-cloud-datastore/.github/auto-label.yaml | 13 +++++++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index fa5762290c5b..bc893c979e20 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd -# created: 2022-04-01T15:48:07.524222836Z + digest: sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 +# created: 2022-04-06T10:30:21.687684602Z diff --git a/packages/google-cloud-datastore/.github/auto-label.yaml b/packages/google-cloud-datastore/.github/auto-label.yaml index 09c8d735b456..41bff0b5375a 100644 --- a/packages/google-cloud-datastore/.github/auto-label.yaml +++ b/packages/google-cloud-datastore/.github/auto-label.yaml @@ -1,2 +1,15 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. requestsize: enabled: true From 607ab6da84652a4aaebd7d846e597e5805ed46c1 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 12 Apr 2022 23:50:12 +0000 Subject: [PATCH 412/611] chore: Use gapic-generator-python 0.65.0 (#300) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 440970084 Source-Link: https://github.com/googleapis/googleapis/commit/5e0a3d57254ab9857ccac77fc6ffade7b69a2dc7 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b0c628a3fade768f225d76992791ea1ba2a881be Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjBjNjI4YTNmYWRlNzY4ZjIyNWQ3Njk5Mjc5MWVhMWJhMmE4ODFiZSJ9 feat: expose new read_time API fields, currently only available in private preview docs: fix type in docstring for map fields PiperOrigin-RevId: 440914241 Source-Link: https://github.com/googleapis/googleapis/commit/0ed730f27474890a727a72bdc85e6d20715e2f87 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b2e5ae97fd24f64af0fef1999dad14945fdc3663 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjJlNWFlOTdmZDI0ZjY0YWYwZmVmMTk5OWRhZDE0OTQ1ZmRjMzY2MyJ9 --- .../services/datastore_admin/async_client.py | 21 ++--- .../services/datastore_admin/client.py | 15 ++-- .../datastore_admin/transports/base.py | 11 ++- .../datastore_admin/transports/grpc.py | 4 + .../datastore_admin_v1/types/__init__.py | 4 +- .../types/datastore_admin.py | 6 +- .../services/datastore/async_client.py | 15 ++-- .../datastore_v1/services/datastore/client.py | 6 +- .../services/datastore/transports/base.py | 14 ++-- .../services/datastore/transports/grpc.py | 4 + .../cloud/datastore_v1/types/datastore.py | 69 +++++++++++++++- .../google/cloud/datastore_v1/types/entity.py | 2 +- .../google/cloud/datastore_v1/types/query.py | 30 ++++++- .../test_datastore_admin.py | 81 ++++++++++++++----- .../unit/gapic/datastore_v1/test_datastore.py | 75 +++++++++++++---- 15 files changed, 266 insertions(+), 91 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index c6cd885c1449..122d1fe56b13 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -267,7 +267,7 @@ async def export_entities( request: Union[datastore_admin.ExportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Dict[str, str] = None, + labels: Mapping[str, str] = None, entity_filter: datastore_admin.EntityFilter = None, output_url_prefix: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -285,7 +285,6 @@ async def export_entities( before completion it may leave partial data behind in Google Cloud Storage. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -321,7 +320,7 @@ def sample_export_entities(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Dict[str, str]`): + labels (:class:`Mapping[str, str]`): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -434,7 +433,7 @@ async def import_entities( request: Union[datastore_admin.ImportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Dict[str, str] = None, + labels: Mapping[str, str] = None, input_url: str = None, entity_filter: datastore_admin.EntityFilter = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -449,7 +448,6 @@ async def import_entities( is possible that a subset of the data has already been imported to Cloud Datastore. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -485,7 +483,7 @@ def sample_import_entities(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Dict[str, str]`): + labels (:class:`Mapping[str, str]`): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -621,7 +619,6 @@ async def create_index( Indexes with a single property cannot be created. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -716,7 +713,6 @@ async def delete_index( [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex] again. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -842,8 +838,7 @@ def sample_get_index(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -875,7 +870,6 @@ async def list_indexes( the list of indexes and may occasionally return stale results. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -926,8 +920,7 @@ def sample_list_indexes(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index 74bf49c469a7..8f5364a74505 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -468,7 +468,7 @@ def export_entities( request: Union[datastore_admin.ExportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Dict[str, str] = None, + labels: Mapping[str, str] = None, entity_filter: datastore_admin.EntityFilter = None, output_url_prefix: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -486,7 +486,6 @@ def export_entities( before completion it may leave partial data behind in Google Cloud Storage. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -522,7 +521,7 @@ def sample_export_entities(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (Dict[str, str]): + labels (Mapping[str, str]): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -634,7 +633,7 @@ def import_entities( request: Union[datastore_admin.ImportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Dict[str, str] = None, + labels: Mapping[str, str] = None, input_url: str = None, entity_filter: datastore_admin.EntityFilter = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -649,7 +648,6 @@ def import_entities( is possible that a subset of the data has already been imported to Cloud Datastore. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -685,7 +683,7 @@ def sample_import_entities(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (Dict[str, str]): + labels (Mapping[str, str]): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -820,7 +818,6 @@ def create_index( Indexes with a single property cannot be created. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -916,7 +913,6 @@ def delete_index( [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex] again. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -1067,7 +1063,6 @@ def list_indexes( the list of indexes and may occasionally return stale results. - .. code-block:: python from google.cloud import datastore_admin_v1 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index daa2096f0ef9..0cf9ac647439 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -86,6 +86,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -153,8 +154,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -168,8 +168,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -248,5 +247,9 @@ def list_indexes( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("DatastoreAdminTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index ba43c4b60555..e41933662adc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -506,5 +506,9 @@ def list_indexes( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("DatastoreAdminGrpcTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py index fbc4f65f995b..f194f3cf3208 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py @@ -31,7 +31,9 @@ Progress, OperationType, ) -from .index import Index +from .index import ( + Index, +) from .migration import ( MigrationProgressEvent, MigrationStateEvent, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py index a490fd9318cd..82bacec17632 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -64,7 +64,7 @@ class CommonMetadata(proto.Message): operation_type (google.cloud.datastore_admin_v1.types.OperationType): The type of the operation. Can be used as a filter in ListOperationsRequest. - labels (Dict[str, str]): + labels (Mapping[str, str]): The client-assigned labels which were provided when the operation was created. May also include additional labels. @@ -141,7 +141,7 @@ class ExportEntitiesRequest(proto.Message): project_id (str): Required. Project ID against which to make the request. - labels (Dict[str, str]): + labels (Mapping[str, str]): Client-assigned labels. entity_filter (google.cloud.datastore_admin_v1.types.EntityFilter): Description of what data from the project is @@ -199,7 +199,7 @@ class ImportEntitiesRequest(proto.Message): project_id (str): Required. Project ID against which to make the request. - labels (Dict[str, str]): + labels (Mapping[str, str]): Client-assigned labels. input_url (str): Required. The full resource URL of the external storage diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index 247a388cbdac..a4c415435de4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -34,6 +34,7 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DatastoreGrpcAsyncIOTransport from .client import DatastoreClient @@ -299,8 +300,7 @@ def sample_lookup(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -376,8 +376,7 @@ def sample_run_query(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -500,7 +499,6 @@ async def commit( r"""Commits a transaction, optionally creating, deleting or modifying some entities. - .. code-block:: python from google.cloud import datastore_v1 @@ -735,7 +733,6 @@ async def allocate_ids( r"""Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. - .. code-block:: python from google.cloud import datastore_v1 @@ -838,7 +835,6 @@ async def reserve_ids( r"""Prevents the supplied keys' IDs from being auto-allocated by Cloud Datastore. - .. code-block:: python from google.cloud import datastore_v1 @@ -917,8 +913,7 @@ def sample_reserve_ids(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index de663367d06b..5b012a2f31f4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -37,6 +37,7 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DatastoreGrpcTransport from .transports.grpc_asyncio import DatastoreGrpcAsyncIOTransport @@ -689,7 +690,6 @@ def commit( r"""Commits a transaction, optionally creating, deleting or modifying some entities. - .. code-block:: python from google.cloud import datastore_v1 @@ -924,7 +924,6 @@ def allocate_ids( r"""Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. - .. code-block:: python from google.cloud import datastore_v1 @@ -1027,7 +1026,6 @@ def reserve_ids( r"""Prevents the supplied keys' IDs from being auto-allocated by Cloud Datastore. - .. code-block:: python from google.cloud import datastore_v1 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py index b50c0fca7104..0bf916c8ab0c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -83,6 +83,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -130,8 +131,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -145,8 +145,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -180,8 +179,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -265,5 +263,9 @@ def reserve_ids( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("DatastoreTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py index d8a2f0018491..16938b68a459 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -419,5 +419,9 @@ def reserve_ids( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("DatastoreGrpcTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index d5d974c2eef9..f4907298ca5e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -17,6 +17,7 @@ from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query as gd_query +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -92,6 +93,9 @@ class LookupResponse(proto.Message): resource constraints. The order of results in this field is undefined and has no relation to the order of the keys in the input. + read_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which these entities were read or + found missing. """ found = proto.RepeatedField( @@ -109,6 +113,11 @@ class LookupResponse(proto.Message): number=3, message=entity.Key, ) + read_time = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) class RunQueryRequest(proto.Message): @@ -341,6 +350,9 @@ class CommitResponse(proto.Message): index_updates (int): The number of index entries updated during the commit, or zero if none were updated. + commit_time (google.protobuf.timestamp_pb2.Timestamp): + The transaction commit timestamp. Not set for + non-transactional commits. """ mutation_results = proto.RepeatedField( @@ -352,6 +364,11 @@ class CommitResponse(proto.Message): proto.INT32, number=4, ) + commit_time = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) class AllocateIdsRequest(proto.Message): @@ -477,6 +494,13 @@ class Mutation(proto.Message): current version on the server, the mutation conflicts. + This field is a member of `oneof`_ ``conflict_detection_strategy``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The update time of the entity that this + mutation is being applied to. If this does not + match the current update time on the server, the + mutation conflicts. + This field is a member of `oneof`_ ``conflict_detection_strategy``. """ @@ -509,6 +533,12 @@ class Mutation(proto.Message): number=8, oneof="conflict_detection_strategy", ) + update_time = proto.Field( + proto.MESSAGE, + number=11, + oneof="conflict_detection_strategy", + message=timestamp_pb2.Timestamp, + ) class MutationResult(proto.Message): @@ -527,6 +557,13 @@ class MutationResult(proto.Message): greater than the version of any previous entity and less than the version of any possible future entity. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The update time of the entity on the server + after processing the mutation. If the mutation + doesn't change anything on the server, then the + timestamp will be the update timestamp of the + current entity. This field will not be set after + a 'delete'. conflict_detected (bool): Whether a conflict was detected for this mutation. Always false when a conflict detection @@ -542,6 +579,11 @@ class MutationResult(proto.Message): proto.INT64, number=4, ) + update_time = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) conflict_detected = proto.Field( proto.BOOL, number=5, @@ -569,6 +611,13 @@ class ReadOptions(proto.Message): transaction identifier is returned by a call to [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + This field is a member of `oneof`_ ``consistency_type``. + read_time (google.protobuf.timestamp_pb2.Timestamp): + Reads entities as they were at the given + time. This may not be older than 270 seconds. + This value is only supported for Cloud Firestore + in Datastore mode. + This field is a member of `oneof`_ ``consistency_type``. """ @@ -589,6 +638,12 @@ class ReadConsistency(proto.Enum): number=2, oneof="consistency_type", ) + read_time = proto.Field( + proto.MESSAGE, + number=4, + oneof="consistency_type", + message=timestamp_pb2.Timestamp, + ) class TransactionOptions(proto.Message): @@ -634,7 +689,19 @@ class ReadWrite(proto.Message): ) class ReadOnly(proto.Message): - r"""Options specific to read-only transactions.""" + r"""Options specific to read-only transactions. + + Attributes: + read_time (google.protobuf.timestamp_pb2.Timestamp): + Reads entities at the given time. + This may not be older than 60 seconds. + """ + + read_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) read_write = proto.Field( proto.MESSAGE, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index a9371a9872b8..e949a56a95b0 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -346,7 +346,7 @@ class Entity(proto.Message): example, an entity in ``Value.entity_value`` may have no key). An entity's kind is its key path's last element's kind, or null if it has no key. - properties (Sequence[google.cloud.datastore_v1.types.Entity.PropertiesEntry]): + properties (Mapping[str, google.cloud.datastore_v1.types.Value]): The entity's properties. The map's keys are property names. A property name matching regex ``__.*__`` is reserved. A reserved property name is forbidden in certain documented diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index 7c368c57a784..1179efcef561 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -16,6 +16,7 @@ import proto # type: ignore from google.cloud.datastore_v1.types import entity as gd_entity +from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore @@ -56,6 +57,12 @@ class EntityResult(proto.Message): entities in ``LookupResponse``, this is the version of the snapshot that was used to look up the entity, and it is always set except for eventually consistent reads. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the entity was last changed. This field is + set for + [``FULL``][google.datastore.v1.EntityResult.ResultType.FULL] + entity results. If this entity is missing, this field will + not be set. cursor (bytes): A cursor that points to the position after the result entity. Set only when the ``EntityResult`` is part of a @@ -84,6 +91,11 @@ class ResultType(proto.Enum): proto.INT64, number=4, ) + update_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) cursor = proto.Field( proto.BYTES, number=3, @@ -368,7 +380,7 @@ class GqlQuery(proto.Message): and instead must bind all values. For example, ``SELECT * FROM Kind WHERE a = 'string literal'`` is not allowed, while ``SELECT * FROM Kind WHERE a = @value`` is. - named_bindings (Sequence[google.cloud.datastore_v1.types.GqlQuery.NamedBindingsEntry]): + named_bindings (Mapping[str, google.cloud.datastore_v1.types.GqlQueryParameter]): For each non-reserved named binding site in the query string, there must be a named parameter with that name, but not necessarily the inverse. @@ -473,6 +485,17 @@ class QueryResultBatch(proto.Message): Each batch's snapshot version is valid for all preceding batches. The value will be zero for eventually consistent queries. + read_time (google.protobuf.timestamp_pb2.Timestamp): + Read timestamp this batch was returned from. This applies to + the range of results from the query's ``start_cursor`` (or + the beginning of the query if no cursor was given) to this + batch's ``end_cursor`` (not the query's ``end_cursor``). + + In a single transaction, subsequent query result batches for + the same query can have a greater timestamp. Each batch's + read timestamp is valid for all preceding batches. This + value will not be set for eventually consistent queries in + Cloud Datastore. """ class MoreResultsType(proto.Enum): @@ -514,6 +537,11 @@ class MoreResultsType(proto.Enum): proto.INT64, number=7, ) + read_time = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index 7e678103e2db..fd1fc14c4834 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -95,24 +95,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - DatastoreAdminClient, - DatastoreAdminAsyncClient, + (DatastoreAdminClient, "grpc"), + (DatastoreAdminAsyncClient, "grpc_asyncio"), ], ) -def test_datastore_admin_client_from_service_account_info(client_class): +def test_datastore_admin_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "datastore.googleapis.com:443" + assert client.transport._host == ("datastore.googleapis.com:443") @pytest.mark.parametrize( @@ -141,27 +141,31 @@ def test_datastore_admin_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - DatastoreAdminClient, - DatastoreAdminAsyncClient, + (DatastoreAdminClient, "grpc"), + (DatastoreAdminAsyncClient, "grpc_asyncio"), ], ) -def test_datastore_admin_client_from_service_account_file(client_class): +def test_datastore_admin_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "datastore.googleapis.com:443" + assert client.transport._host == ("datastore.googleapis.com:443") def test_datastore_admin_client_get_transport_class(): @@ -1535,7 +1539,7 @@ async def test_list_indexes_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1581,7 +1585,9 @@ async def test_list_indexes_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_indexes(request={})).pages: + async for page_ in ( + await client.list_indexes(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1678,6 +1684,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = DatastoreAdminClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = DatastoreAdminClient( @@ -1730,6 +1749,14 @@ def test_datastore_admin_base_transport(): with pytest.raises(NotImplementedError): transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_datastore_admin_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -1887,24 +1914,40 @@ def test_datastore_admin_grpc_transport_client_cert_source_for_mtls(transport_cl ) -def test_datastore_admin_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_datastore_admin_host_no_port(transport_name): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datastore.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "datastore.googleapis.com:443" + assert client.transport._host == ("datastore.googleapis.com:443") -def test_datastore_admin_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_datastore_admin_host_with_port(transport_name): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datastore.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "datastore.googleapis.com:8000" + assert client.transport._host == ("datastore.googleapis.com:8000") def test_datastore_admin_grpc_transport_channel(): diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 445f96fadbb1..4106b21769cd 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -85,24 +85,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - DatastoreClient, - DatastoreAsyncClient, + (DatastoreClient, "grpc"), + (DatastoreAsyncClient, "grpc_asyncio"), ], ) -def test_datastore_client_from_service_account_info(client_class): +def test_datastore_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "datastore.googleapis.com:443" + assert client.transport._host == ("datastore.googleapis.com:443") @pytest.mark.parametrize( @@ -131,27 +131,31 @@ def test_datastore_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - DatastoreClient, - DatastoreAsyncClient, + (DatastoreClient, "grpc"), + (DatastoreAsyncClient, "grpc_asyncio"), ], ) -def test_datastore_client_from_service_account_file(client_class): +def test_datastore_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "datastore.googleapis.com:443" + assert client.transport._host == ("datastore.googleapis.com:443") def test_datastore_client_get_transport_class(): @@ -2012,6 +2016,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = DatastoreClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = DatastoreClient( @@ -2060,6 +2077,14 @@ def test_datastore_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_datastore_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2214,24 +2239,40 @@ def test_datastore_grpc_transport_client_cert_source_for_mtls(transport_class): ) -def test_datastore_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_datastore_host_no_port(transport_name): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datastore.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "datastore.googleapis.com:443" + assert client.transport._host == ("datastore.googleapis.com:443") -def test_datastore_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_datastore_host_with_port(transport_name): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datastore.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "datastore.googleapis.com:8000" + assert client.transport._host == ("datastore.googleapis.com:8000") def test_datastore_grpc_transport_channel(): From 13a8f88339450b2c2d37e5f86235d45277155792 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 13 Apr 2022 20:49:39 -0400 Subject: [PATCH 413/611] chore: use gapic-generator-python 0.65.1 (#302) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.65.1 PiperOrigin-RevId: 441524537 Source-Link: https://github.com/googleapis/googleapis/commit/2a273915b3f70fe86c9d2a75470a0b83e48d0abf Source-Link: https://github.com/googleapis/googleapis-gen/commit/ab6756a48c89b5bcb9fb73443cb8e55d574f4643 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWI2NzU2YTQ4Yzg5YjViY2I5ZmI3MzQ0M2NiOGU1NWQ1NzRmNDY0MyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../services/datastore_admin/async_client.py | 6 ++++-- .../services/datastore_admin/transports/base.py | 6 ++++-- .../datastore_v1/services/datastore/async_client.py | 9 ++++++--- .../datastore_v1/services/datastore/transports/base.py | 9 ++++++--- 4 files changed, 20 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 122d1fe56b13..0f6be699659e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -838,7 +838,8 @@ def sample_get_index(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -920,7 +921,8 @@ def sample_list_indexes(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index 0cf9ac647439..618a990c4312 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -154,7 +154,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -168,7 +169,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index a4c415435de4..ab4d60cc5ef6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -300,7 +300,8 @@ def sample_lookup(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -376,7 +377,8 @@ def sample_run_query(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -913,7 +915,8 @@ def sample_reserve_ids(): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py index 0bf916c8ab0c..22a4c167dd2e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -131,7 +131,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -145,7 +146,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -179,7 +181,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), From 4d408d0d456c13b6fd0b65cb7f6285bb0104835d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 20 Apr 2022 20:49:57 -0400 Subject: [PATCH 414/611] chore(python): add nox session to sort python imports (#303) Source-Link: https://github.com/googleapis/synthtool/commit/1b71c10e20de7ed3f97f692f99a0e3399b67049f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +-- packages/google-cloud-datastore/noxfile.py | 27 ++++++++++++++++--- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index bc893c979e20..7c454abf76f3 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 -# created: 2022-04-06T10:30:21.687684602Z + digest: sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 +# created: 2022-04-20T23:42:53.970438194Z diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 975a93e36baa..27e2a51e5db6 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -25,7 +25,8 @@ import nox BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" @@ -85,7 +86,7 @@ def lint(session): session.run( "black", "--check", - *BLACK_PATHS, + *LINT_PATHS, ) session.run("flake8", "google", "tests") @@ -96,7 +97,27 @@ def blacken(session): session.install(BLACK_VERSION) session.run( "black", - *BLACK_PATHS, + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, ) From 84d1c8e256e248c64df6db7170ec5972584b6126 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 21 Apr 2022 16:08:20 +0000 Subject: [PATCH 415/611] chore(python): use ubuntu 22.04 in docs image (#305) Source-Link: https://github.com/googleapis/synthtool/commit/f15cc72fb401b4861cedebb10af74afe428fb1f8 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd --- .../.github/.OwlBot.lock.yaml | 4 ++-- .../.kokoro/docker/docs/Dockerfile | 20 +++++++++++++++++-- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 7c454abf76f3..64f82d6bf4bc 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 -# created: 2022-04-20T23:42:53.970438194Z + digest: sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd +# created: 2022-04-21T15:43:16.246106921Z diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile index 4e1b1fb8b5a5..238b87b9d1c9 100644 --- a/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ubuntu:20.04 +from ubuntu:22.04 ENV DEBIAN_FRONTEND noninteractive @@ -60,8 +60,24 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb +###################### Install python 3.8.11 + +# Download python 3.8.11 +RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz + +# Extract files +RUN tar -xvf Python-3.8.11.tgz + +# Install python 3.8.11 +RUN ./Python-3.8.11/configure --enable-optimizations +RUN make altinstall + +###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.8 /tmp/get-pip.py \ + && python3 /tmp/get-pip.py \ && rm /tmp/get-pip.py +# Test pip +RUN python3 -m pip + CMD ["python3.8"] From b0c6142a76a6bf119b1f08f1fe2b935065d034c3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 5 May 2022 12:12:55 -0400 Subject: [PATCH 416/611] chore: [autoapprove] update readme_gen.py to include autoescape True (#307) Source-Link: https://github.com/googleapis/synthtool/commit/6b4d5a6407d740beb4158b302194a62a4108a8a6 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 4 ++-- .../google-cloud-datastore/scripts/readme-gen/readme_gen.py | 5 ++++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 64f82d6bf4bc..b631901e99f4 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd -# created: 2022-04-21T15:43:16.246106921Z + digest: sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 +# created: 2022-05-05T15:17:27.599381182Z diff --git a/packages/google-cloud-datastore/scripts/readme-gen/readme_gen.py b/packages/google-cloud-datastore/scripts/readme-gen/readme_gen.py index d309d6e97518..91b59676bfc7 100644 --- a/packages/google-cloud-datastore/scripts/readme-gen/readme_gen.py +++ b/packages/google-cloud-datastore/scripts/readme-gen/readme_gen.py @@ -28,7 +28,10 @@ jinja_env = jinja2.Environment( trim_blocks=True, loader=jinja2.FileSystemLoader( - os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) README_TMPL = jinja_env.get_template('README.tmpl.rst') From 66bb6f7b11c95d65f6a24288db17e3504105e255 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 5 May 2022 23:02:12 +0000 Subject: [PATCH 417/611] chore(python): auto approve template changes (#309) Source-Link: https://github.com/googleapis/synthtool/commit/453a5d9c9a55d1969240a37d36cec626d20a9024 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-datastore/.github/auto-approve.yml | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 packages/google-cloud-datastore/.github/auto-approve.yml diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index b631901e99f4..757c9dca75ad 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 -# created: 2022-05-05T15:17:27.599381182Z + digest: sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 +# created: 2022-05-05T22:08:23.383410683Z diff --git a/packages/google-cloud-datastore/.github/auto-approve.yml b/packages/google-cloud-datastore/.github/auto-approve.yml new file mode 100644 index 000000000000..311ebbb853a9 --- /dev/null +++ b/packages/google-cloud-datastore/.github/auto-approve.yml @@ -0,0 +1,3 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve +processes: + - "OwlBotTemplateChanges" From 7d961b7d0f18faa47e5ece5bcd7c702f05ad9a55 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 19 May 2022 08:51:28 -0400 Subject: [PATCH 418/611] chore(main): release 2.6.0 (#301) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- packages/google-cloud-datastore/CHANGELOG.md | 12 ++++++++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 8064ab655c1a..5c7d5bb1e4c0 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.6.0](https://github.com/googleapis/python-datastore/compare/v2.5.1...v2.6.0) (2022-05-05) + + +### Features + +* expose new read_time API fields, currently only available in private preview ([8d2bd17](https://github.com/googleapis/python-datastore/commit/8d2bd1788d8dc7da57ab9272b274a29082878ece)) + + +### Documentation + +* fix type in docstring for map fields ([8d2bd17](https://github.com/googleapis/python-datastore/commit/8d2bd1788d8dc7da57ab9272b274a29082878ece)) + ### [2.5.1](https://github.com/googleapis/python-datastore/compare/v2.5.0...v2.5.1) (2022-03-05) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index aa1cc6e9e184..ae34a9fbeb85 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.5.1" +__version__ = "2.6.0" From ae27b09f98e557a7b68d40cf7a47825cb6386dc9 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 26 May 2022 21:37:45 -0400 Subject: [PATCH 419/611] fix: regenerate pb2 file with grpcio-tools (#314) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: regenerate pb2 file with grpcio-tools * set coverage level to 99% * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../.github/workflows/unittest.yml | 2 +- .../cloud/datastore/_app_engine_key_pb2.py | 275 +++--------------- packages/google-cloud-datastore/noxfile.py | 2 +- packages/google-cloud-datastore/owlbot.py | 1 + 4 files changed, 48 insertions(+), 232 deletions(-) diff --git a/packages/google-cloud-datastore/.github/workflows/unittest.yml b/packages/google-cloud-datastore/.github/workflows/unittest.yml index e5be6edbd54d..ae99497c80d6 100644 --- a/packages/google-cloud-datastore/.github/workflows/unittest.yml +++ b/packages/google-cloud-datastore/.github/workflows/unittest.yml @@ -54,4 +54,4 @@ jobs: - name: Report coverage results run: | coverage combine .coverage-results/.coverage* - coverage report --show-missing --fail-under=100 + coverage report --show-missing --fail-under=99 diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py index 7fcd8fb181a4..16e24aca7520 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py @@ -1,265 +1,80 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: _app_engine_key.proto - -import sys +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/datastore/_app_engine_key.proto +"""Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -DESCRIPTOR = _descriptor.FileDescriptor( - name="_app_engine_key.proto", - package="", - syntax="proto2", - serialized_pb=_b( - '\n\x15_app_engine_key.proto"V\n\tReference\x12\x0b\n\x03\x61pp\x18\r \x02(\t\x12\x12\n\nname_space\x18\x14 \x01(\t\x12\x13\n\x04path\x18\x0e \x02(\x0b\x32\x05.Path\x12\x13\n\x0b\x64\x61tabase_id\x18\x17 \x01(\t"Y\n\x04Path\x12\x1e\n\x07\x65lement\x18\x01 \x03(\n2\r.Path.Element\x1a\x31\n\x07\x45lement\x12\x0c\n\x04type\x18\x02 \x02(\t\x12\n\n\x02id\x18\x03 \x01(\x03\x12\x0c\n\x04name\x18\x04 \x01(\t' - ), -) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - - -_REFERENCE = _descriptor.Descriptor( - name="Reference", - full_name="Reference", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="app", - full_name="Reference.app", - index=0, - number=13, - type=9, - cpp_type=9, - label=2, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - options=None, - ), - _descriptor.FieldDescriptor( - name="name_space", - full_name="Reference.name_space", - index=1, - number=20, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - options=None, - ), - _descriptor.FieldDescriptor( - name="path", - full_name="Reference.path", - index=2, - number=14, - type=11, - cpp_type=10, - label=2, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - options=None, - ), - _descriptor.FieldDescriptor( - name="database_id", - full_name="Reference.database_id", - index=3, - number=23, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - options=None, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - options=None, - is_extendable=False, - syntax="proto2", - extension_ranges=[], - oneofs=[], - serialized_start=25, - serialized_end=111, -) - - -_PATH_ELEMENT = _descriptor.Descriptor( - name="Element", - full_name="Path.Element", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="type", - full_name="Path.Element.type", - index=0, - number=2, - type=9, - cpp_type=9, - label=2, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - options=None, - ), - _descriptor.FieldDescriptor( - name="id", - full_name="Path.Element.id", - index=1, - number=3, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - options=None, - ), - _descriptor.FieldDescriptor( - name="name", - full_name="Path.Element.name", - index=2, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - options=None, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - options=None, - is_extendable=False, - syntax="proto2", - extension_ranges=[], - oneofs=[], - serialized_start=153, - serialized_end=202, -) - -_PATH = _descriptor.Descriptor( - name="Path", - full_name="Path", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="element", - full_name="Path.element", - index=0, - number=1, - type=10, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - options=None, - ) - ], - extensions=[], - nested_types=[_PATH_ELEMENT], - enum_types=[], - options=None, - is_extendable=False, - syntax="proto2", - extension_ranges=[], - oneofs=[], - serialized_start=113, - serialized_end=202, +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n,google/cloud/datastore/_app_engine_key.proto"V\n\tReference\x12\x0b\n\x03\x61pp\x18\r \x02(\t\x12\x12\n\nname_space\x18\x14 \x01(\t\x12\x13\n\x04path\x18\x0e \x02(\x0b\x32\x05.Path\x12\x13\n\x0b\x64\x61tabase_id\x18\x17 \x01(\t"Y\n\x04Path\x12\x1e\n\x07\x65lement\x18\x01 \x03(\n2\r.Path.Element\x1a\x31\n\x07\x45lement\x12\x0c\n\x04type\x18\x02 \x02(\t\x12\n\n\x02id\x18\x03 \x01(\x03\x12\x0c\n\x04name\x18\x04 \x01(\t' ) -_REFERENCE.fields_by_name["path"].message_type = _PATH -_PATH_ELEMENT.containing_type = _PATH -_PATH.fields_by_name["element"].message_type = _PATH_ELEMENT -DESCRIPTOR.message_types_by_name["Reference"] = _REFERENCE -DESCRIPTOR.message_types_by_name["Path"] = _PATH +_REFERENCE = DESCRIPTOR.message_types_by_name["Reference"] +_PATH = DESCRIPTOR.message_types_by_name["Path"] +_PATH_ELEMENT = _PATH.nested_types_by_name["Element"] Reference = _reflection.GeneratedProtocolMessageType( "Reference", (_message.Message,), - dict( - DESCRIPTOR=_REFERENCE, - __module__="_app_engine_key_pb2" + { + "DESCRIPTOR": _REFERENCE, + "__module__": "google.cloud.datastore._app_engine_key_pb2" # @@protoc_insertion_point(class_scope:Reference) - ), + }, ) _sym_db.RegisterMessage(Reference) Path = _reflection.GeneratedProtocolMessageType( "Path", (_message.Message,), - dict( - Element=_reflection.GeneratedProtocolMessageType( + { + "Element": _reflection.GeneratedProtocolMessageType( "Element", (_message.Message,), - dict( - DESCRIPTOR=_PATH_ELEMENT, - __module__="_app_engine_key_pb2" + { + "DESCRIPTOR": _PATH_ELEMENT, + "__module__": "google.cloud.datastore._app_engine_key_pb2" # @@protoc_insertion_point(class_scope:Path.Element) - ), + }, ), - DESCRIPTOR=_PATH, - __module__="_app_engine_key_pb2" + "DESCRIPTOR": _PATH, + "__module__": "google.cloud.datastore._app_engine_key_pb2" # @@protoc_insertion_point(class_scope:Path) - ), + }, ) _sym_db.RegisterMessage(Path) _sym_db.RegisterMessage(Path.Element) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _REFERENCE._serialized_start = 48 + _REFERENCE._serialized_end = 134 + _PATH._serialized_start = 136 + _PATH._serialized_end = 225 + _PATH_ELEMENT._serialized_start = 176 + _PATH_ELEMENT._serialized_end = 225 # @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 27e2a51e5db6..2279ec7eb58d 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -283,7 +283,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") + session.run("coverage", "report", "--show-missing", "--fail-under=99") session.run("coverage", "erase") diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 63214a43745e..ca8809e4ff73 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -99,6 +99,7 @@ def get_staging_dirs( microgenerator=True, split_system_tests=True, unit_test_python_versions=["3.6", "3.7", "3.8", "3.9", "3.10"], + cov_level=99, ) s.move( templated_files, From 6c5db6e2452a82192093b3991a44adb5ab3093a3 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 27 May 2022 12:26:46 -0400 Subject: [PATCH 420/611] chore(main): release 2.6.1 (#316) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 5c7d5bb1e4c0..8f40015fbd30 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +### [2.6.1](https://github.com/googleapis/python-datastore/compare/v2.6.0...v2.6.1) (2022-05-27) + + +### Bug Fixes + +* regenerate pb2 file with grpcio-tools ([#314](https://github.com/googleapis/python-datastore/issues/314)) ([0412cd5](https://github.com/googleapis/python-datastore/commit/0412cd5dbcb8e4042b2ad300e35dd6797710072a)) + ## [2.6.0](https://github.com/googleapis/python-datastore/compare/v2.5.1...v2.6.0) (2022-05-05) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index ae34a9fbeb85..410cd066ed8d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.6.0" +__version__ = "2.6.1" From 3b04287cfe866ee0fb5dc54b6480bfb43ef82a08 Mon Sep 17 00:00:00 2001 From: Mariatta Wijaya Date: Fri, 27 May 2022 11:20:45 -0700 Subject: [PATCH 421/611] chore: Increase coverage to 100% (#317) Added `pragma: NO COVER` to the generated code. Closes #315 Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/workflows/unittest.yml | 2 +- .../google/cloud/datastore/_app_engine_key_pb2.py | 2 +- packages/google-cloud-datastore/noxfile.py | 2 +- packages/google-cloud-datastore/owlbot.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/.github/workflows/unittest.yml b/packages/google-cloud-datastore/.github/workflows/unittest.yml index ae99497c80d6..e5be6edbd54d 100644 --- a/packages/google-cloud-datastore/.github/workflows/unittest.yml +++ b/packages/google-cloud-datastore/.github/workflows/unittest.yml @@ -54,4 +54,4 @@ jobs: - name: Report coverage results run: | coverage combine .coverage-results/.coverage* - coverage report --show-missing --fail-under=99 + coverage report --show-missing --fail-under=100 diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py index 16e24aca7520..11a1df51cf2b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py @@ -68,7 +68,7 @@ _sym_db.RegisterMessage(Path) _sym_db.RegisterMessage(Path.Element) -if _descriptor._USE_C_DESCRIPTORS == False: +if _descriptor._USE_C_DESCRIPTORS == False: # pragma: NO COVER DESCRIPTOR._options = None _REFERENCE._serialized_start = 48 diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 2279ec7eb58d..27e2a51e5db6 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -283,7 +283,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=99") + session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index ca8809e4ff73..79c30cac3d1d 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -99,7 +99,7 @@ def get_staging_dirs( microgenerator=True, split_system_tests=True, unit_test_python_versions=["3.6", "3.7", "3.8", "3.9", "3.10"], - cov_level=99, + cov_level=100, ) s.move( templated_files, From f41882c8f68d8dfabf3fba972db45ff8bf447870 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Tue, 31 May 2022 19:58:51 -0400 Subject: [PATCH 422/611] docs: fix changelog header to consistent size (#319) --- packages/google-cloud-datastore/CHANGELOG.md | 30 ++++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 8f40015fbd30..d85a6386e739 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,7 +4,7 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history -### [2.6.1](https://github.com/googleapis/python-datastore/compare/v2.6.0...v2.6.1) (2022-05-27) +## [2.6.1](https://github.com/googleapis/python-datastore/compare/v2.6.0...v2.6.1) (2022-05-27) ### Bug Fixes @@ -23,7 +23,7 @@ * fix type in docstring for map fields ([8d2bd17](https://github.com/googleapis/python-datastore/commit/8d2bd1788d8dc7da57ab9272b274a29082878ece)) -### [2.5.1](https://github.com/googleapis/python-datastore/compare/v2.5.0...v2.5.1) (2022-03-05) +## [2.5.1](https://github.com/googleapis/python-datastore/compare/v2.5.0...v2.5.1) (2022-03-05) ### Bug Fixes @@ -90,7 +90,7 @@ * add support for Python 3.10 ([#233](https://www.github.com/googleapis/python-datastore/issues/233)) ([f524c40](https://www.github.com/googleapis/python-datastore/commit/f524c40e8251c2b716ea87cd512404f0d6f1b019)) -### [2.1.6](https://www.github.com/googleapis/python-datastore/compare/v2.1.5...v2.1.6) (2021-07-26) +## [2.1.6](https://www.github.com/googleapis/python-datastore/compare/v2.1.5...v2.1.6) (2021-07-26) ### Documentation @@ -98,14 +98,14 @@ * add Samples section to CONTRIBUTING.rst ([#195](https://www.github.com/googleapis/python-datastore/issues/195)) ([f607fb5](https://www.github.com/googleapis/python-datastore/commit/f607fb544a2f7279267e5a5a534fc31e573b6b74)) -### [2.1.5](https://www.github.com/googleapis/python-datastore/compare/v2.1.4...v2.1.5) (2021-07-20) +## [2.1.5](https://www.github.com/googleapis/python-datastore/compare/v2.1.4...v2.1.5) (2021-07-20) ### Bug Fixes * **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#194](https://www.github.com/googleapis/python-datastore/issues/194)) ([e94f97c](https://www.github.com/googleapis/python-datastore/commit/e94f97ce42b04ba76766737eb69cdaf92bc2ac05)) -### [2.1.4](https://www.github.com/googleapis/python-datastore/compare/v2.1.3...v2.1.4) (2021-07-09) +## [2.1.4](https://www.github.com/googleapis/python-datastore/compare/v2.1.3...v2.1.4) (2021-07-09) ### Performance Improvements @@ -117,14 +117,14 @@ * omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-datastore/issues/1127)) ([#181](https://www.github.com/googleapis/python-datastore/issues/181)) ([6efde70](https://www.github.com/googleapis/python-datastore/commit/6efde70db751bf708091b24a932ab8571bd981a6)) -### [2.1.3](https://www.github.com/googleapis/python-datastore/compare/v2.1.2...v2.1.3) (2021-05-25) +## [2.1.3](https://www.github.com/googleapis/python-datastore/compare/v2.1.2...v2.1.3) (2021-05-25) ### Bug Fixes * **perf:** improve performance unmarshalling entities from protobuf2 ([#175](https://www.github.com/googleapis/python-datastore/issues/175)) ([0e5b718](https://www.github.com/googleapis/python-datastore/commit/0e5b718a70368f656ede3a27174ef74ca324ab65)) -### [2.1.2](https://www.github.com/googleapis/python-datastore/compare/v2.1.1...v2.1.2) (2021-05-03) +## [2.1.2](https://www.github.com/googleapis/python-datastore/compare/v2.1.1...v2.1.2) (2021-05-03) ### Bug Fixes @@ -136,7 +136,7 @@ * update intersphinx URLs for grpc and auth ([#93](https://www.github.com/googleapis/python-datastore/issues/93)) ([4f90d04](https://www.github.com/googleapis/python-datastore/commit/4f90d04c81aacdbaf83f5a9dc996898fa9c7ba26)) -### [2.1.1](https://www.github.com/googleapis/python-datastore/compare/v2.1.0...v2.1.1) (2021-04-20) +## [2.1.1](https://www.github.com/googleapis/python-datastore/compare/v2.1.0...v2.1.1) (2021-04-20) ### Bug Fixes @@ -155,7 +155,7 @@ * remove six dependency ([#120](https://www.github.com/googleapis/python-datastore/issues/120)) ([b1715e5](https://www.github.com/googleapis/python-datastore/commit/b1715e500f870fd5292bb84232b0039c2ac6be85)) -### [2.0.1](https://www.github.com/googleapis/python-datastore/compare/v2.0.0...v2.0.1) (2020-11-13) +## [2.0.1](https://www.github.com/googleapis/python-datastore/compare/v2.0.0...v2.0.1) (2020-11-13) ### Bug Fixes @@ -191,28 +191,28 @@ * Leverage new generator, proto-plus, for google-cloud-datastore ([#104](https://www.github.com/googleapis/python-datastore/issues/104)) ([1723a26](https://www.github.com/googleapis/python-datastore/commit/1723a268a6f647d1c798deb076c038f7af9b16c9)) -### [1.15.3](https://www.github.com/googleapis/python-datastore/compare/v1.15.2...v1.15.3) (2020-10-06) +## [1.15.3](https://www.github.com/googleapis/python-datastore/compare/v1.15.2...v1.15.3) (2020-10-06) ### Bug Fixes * use full path and os.path to version.py in setup.py ([#97](https://www.github.com/googleapis/python-datastore/issues/97)) ([0f5506f](https://www.github.com/googleapis/python-datastore/commit/0f5506fe8bcb899e64cc7c1cf881edc3d3aaead8)) -### [1.15.2](https://www.github.com/googleapis/python-datastore/compare/v1.15.1...v1.15.2) (2020-10-06) +## [1.15.2](https://www.github.com/googleapis/python-datastore/compare/v1.15.1...v1.15.2) (2020-10-06) ### Bug Fixes * use version.py instead of pkg_resources.get_distribution ([#94](https://www.github.com/googleapis/python-datastore/issues/94)) ([ea77534](https://www.github.com/googleapis/python-datastore/commit/ea77534bc973e22894357a81420dd17ed8db0027)) -### [1.15.2](https://www.github.com/googleapis/python-datastore/compare/v1.15.1...v1.15.2) (2020-10-06) +## [1.15.2](https://www.github.com/googleapis/python-datastore/compare/v1.15.1...v1.15.2) (2020-10-06) ### Bug Fixes * use version.py instead of pkg_resources.get_distribution ([#94](https://www.github.com/googleapis/python-datastore/issues/94)) ([ea77534](https://www.github.com/googleapis/python-datastore/commit/ea77534bc973e22894357a81420dd17ed8db0027)) -### [1.15.1](https://www.github.com/googleapis/python-datastore/compare/v1.15.0...v1.15.1) (2020-09-23) +## [1.15.1](https://www.github.com/googleapis/python-datastore/compare/v1.15.0...v1.15.1) (2020-09-23) ### Bug Fixes @@ -255,14 +255,14 @@ * correct semantics of 'complete_key' arg to 'Client.reserve_ids' ([#36](https://www.github.com/googleapis/python-datastore/issues/36)) ([50ed945](https://www.github.com/googleapis/python-datastore/commit/50ed94503da244434df0be58098a0ccf2da54b16)) * update docs build (via synth) ([#58](https://www.github.com/googleapis/python-datastore/issues/58)) ([5bdacd4](https://www.github.com/googleapis/python-datastore/commit/5bdacd4785f3d433e6e7302fc6839a3c5a3314b4)), closes [#700](https://www.github.com/googleapis/python-datastore/issues/700) -### [1.13.2](https://www.github.com/googleapis/python-datastore/compare/v1.13.1...v1.13.2) (2020-07-17) +## [1.13.2](https://www.github.com/googleapis/python-datastore/compare/v1.13.1...v1.13.2) (2020-07-17) ### Bug Fixes * modify admin pkg name in gapic ([#47](https://www.github.com/googleapis/python-datastore/issues/47)) ([5b5011d](https://www.github.com/googleapis/python-datastore/commit/5b5011daf74133ecdd579bf19bbcf356e6f40dad)) -### [1.13.1](https://www.github.com/googleapis/python-datastore/compare/v1.13.0...v1.13.1) (2020-07-13) +## [1.13.1](https://www.github.com/googleapis/python-datastore/compare/v1.13.0...v1.13.1) (2020-07-13) ### Bug Fixes From a4618390e234a280b48cdcb6a587f81750c3fadb Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 2 Jun 2022 13:16:12 -0400 Subject: [PATCH 423/611] fix(deps): require protobuf <4.0.0dev (#318) --- packages/google-cloud-datastore/setup.py | 3 ++- packages/google-cloud-datastore/testing/constraints-3.6.txt | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 2bd2f0687aa5..cd203ebe6fb1 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -37,7 +37,8 @@ # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 "google-cloud-core >= 1.4.0, <3.0.0dev", - "proto-plus >= 1.15.0", + "proto-plus >= 1.15.0, <2.0.0dev", + "protobuf >= 3.19.0, <4.0.0dev", ] extras = {"libcst": "libcst >= 0.2.5"} diff --git a/packages/google-cloud-datastore/testing/constraints-3.6.txt b/packages/google-cloud-datastore/testing/constraints-3.6.txt index 64daec82612f..b6789fb79133 100644 --- a/packages/google-cloud-datastore/testing/constraints-3.6.txt +++ b/packages/google-cloud-datastore/testing/constraints-3.6.txt @@ -9,3 +9,4 @@ google-api-core==1.31.5 google-cloud-core==1.4.0 proto-plus==1.15.0 libcst==0.2.5 +protobuf==3.19.0 From 68ccc59fa0e5a93ec56544073fabb4d7527aa6ed Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 7 Jun 2022 06:52:43 -0400 Subject: [PATCH 424/611] chore: test minimum dependencies in python 3.7 (#322) --- .../testing/constraints-3.7.txt | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/packages/google-cloud-datastore/testing/constraints-3.7.txt b/packages/google-cloud-datastore/testing/constraints-3.7.txt index e69de29bb2d1..b6789fb79133 100644 --- a/packages/google-cloud-datastore/testing/constraints-3.7.txt +++ b/packages/google-cloud-datastore/testing/constraints-3.7.txt @@ -0,0 +1,12 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +# Then this file should have foo==1.14.0 +google-api-core==1.31.5 +google-cloud-core==1.4.0 +proto-plus==1.15.0 +libcst==0.2.5 +protobuf==3.19.0 From c280d1b81aeb3d88950b10115c853a1f3ef88779 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 7 Jun 2022 07:15:20 -0400 Subject: [PATCH 425/611] chore(main): release 2.6.2 (#320) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 12 ++++++++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index d85a6386e739..47aa7df39cb7 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.6.2](https://github.com/googleapis/python-datastore/compare/v2.6.1...v2.6.2) (2022-06-07) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#318](https://github.com/googleapis/python-datastore/issues/318)) ([1dccd37](https://github.com/googleapis/python-datastore/commit/1dccd377fd86613b330df11477135b56e19d2226)) + + +### Documentation + +* fix changelog header to consistent size ([#319](https://github.com/googleapis/python-datastore/issues/319)) ([d3e9304](https://github.com/googleapis/python-datastore/commit/d3e93044c4520e5ebb25737cdd356d9d8e57fe6e)) + ## [2.6.1](https://github.com/googleapis/python-datastore/compare/v2.6.0...v2.6.1) (2022-05-27) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 410cd066ed8d..9aaeb8bc4c7d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.6.1" +__version__ = "2.6.2" From ecc61aed79c4f09560ab28e1a45d947d2f52514b Mon Sep 17 00:00:00 2001 From: Han Date: Thu, 9 Jun 2022 17:57:50 +0000 Subject: [PATCH 426/611] feat: support IN/NOT_IN/NOT_EQUAL operators (#287) --- .../google/cloud/datastore/query.py | 11 +++++++---- .../google-cloud-datastore/tests/unit/test_query.py | 8 +++++++- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 5e4f49376521..57c19205f7af 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -86,6 +86,9 @@ class Query(object): "<": query_pb2.PropertyFilter.Operator.LESS_THAN, ">": query_pb2.PropertyFilter.Operator.GREATER_THAN, "=": query_pb2.PropertyFilter.Operator.EQUAL, + "!=": query_pb2.PropertyFilter.Operator.NOT_EQUAL, + "IN": query_pb2.PropertyFilter.Operator.IN, + "NOT_IN": query_pb2.PropertyFilter.Operator.NOT_IN, } """Mapping of operator strings and their protobuf equivalents.""" @@ -215,7 +218,7 @@ def add_filter(self, property_name, operator, value): where property is a property stored on the entity in the datastore and operator is one of ``OPERATORS`` - (ie, ``=``, ``<``, ``<=``, ``>``, ``>=``): + (ie, ``=``, ``<``, ``<=``, ``>``, ``>=``, ``!=``, ``IN``, ``NOT_IN``): .. testsetup:: query-filter @@ -235,7 +238,7 @@ def add_filter(self, property_name, operator, value): :param property_name: A property name. :type operator: str - :param operator: One of ``=``, ``<``, ``<=``, ``>``, ``>=``. + :param operator: One of ``=``, ``<``, ``<=``, ``>``, ``>=``, ``!=``, ``IN``, ``NOT_IN``. :type value: :class:`int`, :class:`str`, :class:`bool`, :class:`float`, :class:`NoneType`, @@ -252,7 +255,7 @@ def add_filter(self, property_name, operator, value): """ if self.OPERATORS.get(operator) is None: error_message = 'Invalid expression: "%s"' % (operator,) - choices_message = "Please use one of: =, <, <=, >, >=." + choices_message = "Please use one of: =, <, <=, >, >=, !=, IN, NOT_IN." raise ValueError(error_message, choices_message) if property_name == "__key__" and not isinstance(value, Key): @@ -293,7 +296,7 @@ def key_filter(self, key, operator="="): :param key: The key to filter on. :type operator: str - :param operator: (Optional) One of ``=``, ``<``, ``<=``, ``>``, ``>=``. + :param operator: (Optional) One of ``=``, ``<``, ``<=``, ``>``, ``>=``, ``!=``, ``IN``, ``NOT_IN``. Defaults to ``=``. """ self.add_filter("__key__", operator, key) diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 3cbd95b84155..1f250f4636bb 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -175,12 +175,18 @@ def test_query_add_filter_w_all_operators(): query.add_filter("lt_prop", "<", "val3") query.add_filter("gt_prop", ">", "val4") query.add_filter("eq_prop", "=", "val5") - assert len(query.filters) == 5 + query.add_filter("in_prop", "IN", ["val6"]) + query.add_filter("neq_prop", "!=", "val9") + query.add_filter("not_in_prop", "NOT_IN", ["val13"]) + assert len(query.filters) == 8 assert query.filters[0] == ("leq_prop", "<=", "val1") assert query.filters[1] == ("geq_prop", ">=", "val2") assert query.filters[2] == ("lt_prop", "<", "val3") assert query.filters[3] == ("gt_prop", ">", "val4") assert query.filters[4] == ("eq_prop", "=", "val5") + assert query.filters[5] == ("in_prop", "IN", ["val6"]) + assert query.filters[6] == ("neq_prop", "!=", "val9") + assert query.filters[7] == ("not_in_prop", "NOT_IN", ["val13"]) def test_query_add_filter_w_known_operator_and_entity(): From fb0038640da794be8fc31474e4bd32ab77f2682d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 9 Jun 2022 12:05:05 -0600 Subject: [PATCH 427/611] chore(main): release 2.7.0 (#326) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 47aa7df39cb7..a7b39f290ca6 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.7.0](https://github.com/googleapis/python-datastore/compare/v2.6.2...v2.7.0) (2022-06-09) + + +### Features + +* support IN/NOT_IN/NOT_EQUAL operators ([#287](https://github.com/googleapis/python-datastore/issues/287)) ([465bd87](https://github.com/googleapis/python-datastore/commit/465bd87c5463b4203b3e087090033a814c4128be)) + ## [2.6.2](https://github.com/googleapis/python-datastore/compare/v2.6.1...v2.6.2) (2022-06-07) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 9aaeb8bc4c7d..d962613e0244 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.6.2" +__version__ = "2.7.0" From 71784d32135c7d6fe4e6d2d6740aee11e1df38b6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 12 Jun 2022 10:46:03 -0400 Subject: [PATCH 428/611] chore: add prerelease nox session (#327) Source-Link: https://github.com/googleapis/synthtool/commit/050953d60f71b4ed4be563e032f03c192c50332f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:65e656411895bff71cffcae97246966460160028f253c2e45b7a25d805a5b142 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/continuous/prerelease-deps.cfg | 7 ++ .../.kokoro/presubmit/prerelease-deps.cfg | 7 ++ packages/google-cloud-datastore/noxfile.py | 64 +++++++++++++++++++ 4 files changed, 80 insertions(+), 2 deletions(-) create mode 100644 packages/google-cloud-datastore/.kokoro/continuous/prerelease-deps.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/presubmit/prerelease-deps.cfg diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 757c9dca75ad..2185b591844c 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 -# created: 2022-05-05T22:08:23.383410683Z + digest: sha256:65e656411895bff71cffcae97246966460160028f253c2e45b7a25d805a5b142 +# created: 2022-06-12T13:11:45.905884945Z diff --git a/packages/google-cloud-datastore/.kokoro/continuous/prerelease-deps.cfg b/packages/google-cloud-datastore/.kokoro/continuous/prerelease-deps.cfg new file mode 100644 index 000000000000..3595fb43f5c0 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/continuous/prerelease-deps.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "prerelease_deps" +} diff --git a/packages/google-cloud-datastore/.kokoro/presubmit/prerelease-deps.cfg b/packages/google-cloud-datastore/.kokoro/presubmit/prerelease-deps.cfg new file mode 100644 index 000000000000..3595fb43f5c0 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/presubmit/prerelease-deps.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "prerelease_deps" +} diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 27e2a51e5db6..8de098db151e 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -359,3 +359,67 @@ def docfx(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + prerel_deps = [ + "protobuf", + "googleapis-common-protos", + "google-auth", + "grpcio", + "grpcio-status", + "google-api-core", + "proto-plus", + # dependencies of google-auth + "cryptography", + "pyasn1", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = ["requests"] + session.install(*other_deps) + + session.install(*UNIT_TEST_STANDARD_DEPENDENCIES) + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Don't overwrite prerelease packages. + deps = [dep for dep in deps if dep not in prerel_deps] + # We use --no-deps to ensure that pre-release versions aren't overwritten + # by the version ranges in setup.py. + session.install(*deps) + session.install("--no-deps", "-e", ".[all]") + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + session.run("py.test", "tests/system") + session.run("py.test", "samples/snippets") From af114a48f3c6a0438fca88d4a7aaaa9298a034dd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 13 Jun 2022 06:40:42 -0400 Subject: [PATCH 429/611] chore(python): add missing import for prerelease testing (#328) Source-Link: https://github.com/googleapis/synthtool/commit/d2871d98e1e767d4ad49a557ff979236d64361a1 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:b2dc5f80edcf5d4486c39068c9fa11f7f851d9568eea4dcba130f994ea9b5e97 Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-datastore/noxfile.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 2185b591844c..50b29ffd2050 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:65e656411895bff71cffcae97246966460160028f253c2e45b7a25d805a5b142 -# created: 2022-06-12T13:11:45.905884945Z + digest: sha256:b2dc5f80edcf5d4486c39068c9fa11f7f851d9568eea4dcba130f994ea9b5e97 +# created: 2022-06-12T16:09:31.61859086Z diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 8de098db151e..85ebf7d804a7 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -19,6 +19,7 @@ from __future__ import absolute_import import os import pathlib +import re import shutil import warnings From 96c82a14e23cb8004efd9dbe000ae99c6fd7c1fd Mon Sep 17 00:00:00 2001 From: Juan Lara Date: Thu, 16 Jun 2022 19:32:20 +0000 Subject: [PATCH 430/611] docs(samples): add samples for IN, NOT_IN, and != operators. (#312) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: add samples for IN, NOT_IN, and != operators. * Update samples/snippets_test.py Co-authored-by: Mariatta Wijaya * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Add an index.yaml file with required indexes * Fix linting errors. Remove unused libraries. * Opt into build specific gcloud projects. * Typo in code comment * Add a test fixture to set up a required index. * Remove snippets that require an index. Will add them in another PR. * Fix linting error * Create a snippets subdirectory under samples * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Use build-specific gcloud projects for noxfile. * Restore noxfile.py to owlbot version. Add noxfile_config.py Co-authored-by: Mariatta Wijaya Co-authored-by: Owl Bot --- .../samples/snippets/noxfile.py | 312 ++++++++++++++++++ .../samples/snippets/noxfile_config.py | 42 +++ .../samples/snippets/requirements-test.txt | 4 + .../samples/snippets/requirements.txt | 1 + .../samples/snippets/snippets.py | 79 +++++ .../samples/snippets/snippets_test.py | 68 ++++ 6 files changed, 506 insertions(+) create mode 100644 packages/google-cloud-datastore/samples/snippets/noxfile.py create mode 100644 packages/google-cloud-datastore/samples/snippets/noxfile_config.py create mode 100644 packages/google-cloud-datastore/samples/snippets/requirements-test.txt create mode 100644 packages/google-cloud-datastore/samples/snippets/requirements.txt create mode 100644 packages/google-cloud-datastore/samples/snippets/snippets.py create mode 100644 packages/google-cloud-datastore/samples/snippets/snippets_test.py diff --git a/packages/google-cloud-datastore/samples/snippets/noxfile.py b/packages/google-cloud-datastore/samples/snippets/noxfile.py new file mode 100644 index 000000000000..38bb0a572b81 --- /dev/null +++ b/packages/google-cloud-datastore/samples/snippets/noxfile.py @@ -0,0 +1,312 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import glob +import os +from pathlib import Path +import sys +from typing import Callable, Dict, List, Optional + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" + +# Copy `noxfile_config.py` to your directory and modify it instead. + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars() -> Dict[str, str]: + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + +# +# Style Checks +# + + +def _determine_local_import_names(start_dir: str) -> List[str]: + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + ".", + ] + session.run("flake8", *args) + + +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + +# +# format = isort + black +# + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) + + if len(test_list) == 0: + print("No tests found, skipping directory.") + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session: nox.sessions.Session) -> None: + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root() -> Optional[str]: + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session: nox.sessions.Session, path: str) -> None: + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/packages/google-cloud-datastore/samples/snippets/noxfile_config.py b/packages/google-cloud-datastore/samples/snippets/noxfile_config.py new file mode 100644 index 000000000000..7bf43541d126 --- /dev/null +++ b/packages/google-cloud-datastore/samples/snippets/noxfile_config.py @@ -0,0 +1,42 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# You can copy this file into your directory, then it will be imported from +# the noxfile.py. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "BUILD_SPECIFIC_GCLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} diff --git a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt new file mode 100644 index 000000000000..f15210891bcc --- /dev/null +++ b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt @@ -0,0 +1,4 @@ +backoff==1.11.1; python_version < "3.7" +backoff==2.0.0; python_version >= "3.7" +pytest==7.0.1 +flaky==3.7.0 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt new file mode 100644 index 000000000000..d5e16c3771aa --- /dev/null +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -0,0 +1 @@ +google-cloud-datastore==2.7.0 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/snippets.py b/packages/google-cloud-datastore/samples/snippets/snippets.py new file mode 100644 index 000000000000..40dd5aad5951 --- /dev/null +++ b/packages/google-cloud-datastore/samples/snippets/snippets.py @@ -0,0 +1,79 @@ +# Copyright 2022 Google, Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +from pprint import pprint + +from google.cloud import datastore # noqa: I100 + + +def _preamble(): + # [START datastore_size_coloration_query] + from google.cloud import datastore + + # For help authenticating your client, visit + # https://cloud.google.com/docs/authentication/getting-started + client = datastore.Client() + + # [END datastore_size_coloration_query] + assert client is not None + + +def in_query(client): + # [START datastore_in_query] + query = client.query(kind="Task") + query.add_filter("tag", "IN", ["learn", "study"]) + # [END datastore_in_query] + + return list(query.fetch()) + + +def not_equals_query(client): + # [START datastore_not_equals_query] + query = client.query(kind="Task") + query.add_filter("category", "!=", "work") + # [END datastore_not_equals_query] + + return list(query.fetch()) + + +def not_in_query(client): + # [START datastore_not_in_query] + query = client.query(kind="Task") + query.add_filter("category", "NOT_IN", ["work", "chores", "school"]) + # [END datastore_not_in_query] + + return list(query.fetch()) + + +def main(project_id): + client = datastore.Client(project_id) + + for name, function in globals().items(): + if name in ("main", "_preamble", "defaultdict") or not callable(function): + continue + + print(name) + pprint(function(client)) + print("\n-----------------\n") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Demonstrates datastore API operations." + ) + parser.add_argument("project_id", help="Your cloud project ID.") + + args = parser.parse_args() + + main(args.project_id) diff --git a/packages/google-cloud-datastore/samples/snippets/snippets_test.py b/packages/google-cloud-datastore/samples/snippets/snippets_test.py new file mode 100644 index 000000000000..27607c07c12a --- /dev/null +++ b/packages/google-cloud-datastore/samples/snippets/snippets_test.py @@ -0,0 +1,68 @@ +# Copyright 2022 Google, Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import backoff +from google.cloud import datastore + + +import pytest + +import snippets + +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] + + +class CleanupClient(datastore.Client): + def __init__(self, *args, **kwargs): + super(CleanupClient, self).__init__(*args, **kwargs) + self.entities_to_delete = [] + self.keys_to_delete = [] + + def cleanup(self): + with self.batch(): + self.delete_multi( + list(set([x.key for x in self.entities_to_delete if x])) + + list(set(self.keys_to_delete)) + ) + + +@pytest.fixture +def client(): + client = CleanupClient(PROJECT) + yield client + client.cleanup() + + +@pytest.mark.flaky +class TestDatastoreSnippets: + # These tests mostly just test the absence of exceptions. + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_in_query(self, client): + tasks = snippets.in_query(client) + client.entities_to_delete.extend(tasks) + assert tasks is not None + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_not_equals_query(self, client): + tasks = snippets.not_equals_query(client) + client.entities_to_delete.extend(tasks) + assert tasks is not None + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_not_in_query(self, client): + tasks = snippets.not_in_query(client) + client.entities_to_delete.extend(tasks) + assert tasks is not None From cf708066722c101a40f154673e85158d2f23c9a1 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 16 Jun 2022 12:42:32 -0700 Subject: [PATCH 431/611] chore(main): release 2.7.1 (#329) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index a7b39f290ca6..cad7baa48c68 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.7.1](https://github.com/googleapis/python-datastore/compare/v2.7.0...v2.7.1) (2022-06-16) + + +### Documentation + +* **samples:** add samples for IN, NOT_IN, and != operators. ([#312](https://github.com/googleapis/python-datastore/issues/312)) ([4170325](https://github.com/googleapis/python-datastore/commit/41703250a37cf26c31aba4828102a5da8357fb76)) + ## [2.7.0](https://github.com/googleapis/python-datastore/compare/v2.6.2...v2.7.0) (2022-06-09) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index d962613e0244..a97caa56255a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.7.0" +__version__ = "2.7.1" From 537a0fe8c06d90e5fdcf6c89caf1e144696097a7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 10 Jul 2022 06:52:08 -0400 Subject: [PATCH 432/611] fix: require python 3.7+ (#332) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): drop python 3.6 Source-Link: https://github.com/googleapis/synthtool/commit/4f89b13af10d086458f9b379e56a614f9d6dab7b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c * add api_description to .repo-metadata.json * require python 3.7+ in setup.py * remove python 3.6 sample configs * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * remove python 3.6 from noxfile * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * remove python 3.6 from noxfile * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * ci: update replacement in owlbot.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +- .../.github/workflows/unittest.yml | 2 +- .../.kokoro/samples/python3.6/common.cfg | 40 -------- .../.kokoro/samples/python3.6/continuous.cfg | 7 -- .../samples/python3.6/periodic-head.cfg | 11 --- .../.kokoro/samples/python3.6/periodic.cfg | 6 -- .../.kokoro/samples/python3.6/presubmit.cfg | 6 -- .../.kokoro/test-samples-impl.sh | 4 +- .../.repo-metadata.json | 3 +- .../google-cloud-datastore/CONTRIBUTING.rst | 6 +- packages/google-cloud-datastore/README.rst | 75 ++++++++------- packages/google-cloud-datastore/noxfile.py | 92 +++++++++++-------- packages/google-cloud-datastore/owlbot.py | 8 +- .../samples/snippets/noxfile.py | 2 +- .../templates/install_deps.tmpl.rst | 2 +- packages/google-cloud-datastore/setup.py | 3 +- 16 files changed, 105 insertions(+), 166 deletions(-) delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.6/common.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.6/continuous.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic-head.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.6/presubmit.cfg diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 50b29ffd2050..1ce608523524 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:b2dc5f80edcf5d4486c39068c9fa11f7f851d9568eea4dcba130f994ea9b5e97 -# created: 2022-06-12T16:09:31.61859086Z + digest: sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c +# created: 2022-07-05T18:31:20.838186805Z diff --git a/packages/google-cloud-datastore/.github/workflows/unittest.yml b/packages/google-cloud-datastore/.github/workflows/unittest.yml index e5be6edbd54d..5531b0141297 100644 --- a/packages/google-cloud-datastore/.github/workflows/unittest.yml +++ b/packages/google-cloud-datastore/.github/workflows/unittest.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.6', '3.7', '3.8', '3.9', '3.10'] + python: ['3.7', '3.8', '3.9', '3.10'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.6/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.6/common.cfg deleted file mode 100644 index c726211bf114..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.6/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.6" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py36" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.6/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.6/continuous.cfg deleted file mode 100644 index 7218af1499e5..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.6/continuous.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic-head.cfg deleted file mode 100644 index 714045a75ed7..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.6/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.6/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.6/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.6/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh b/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh index 8a324c9c7bc6..2c6500cae0b9 100755 --- a/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh +++ b/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh @@ -33,7 +33,7 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Install nox -python3.6 -m pip install --upgrade --quiet nox +python3.9 -m pip install --upgrade --quiet nox # Use secrets acessor service account to get secrets if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then @@ -76,7 +76,7 @@ for file in samples/**/requirements.txt; do echo "------------------------------------------------------------" # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" + python3.9 -m nox -s "$RUN_TESTS_SESSION" EXIT=$? # If this is a periodic build, send the test log to the FlakyBot. diff --git a/packages/google-cloud-datastore/.repo-metadata.json b/packages/google-cloud-datastore/.repo-metadata.json index 44c2f180f25b..21be1c4496a0 100644 --- a/packages/google-cloud-datastore/.repo-metadata.json +++ b/packages/google-cloud-datastore/.repo-metadata.json @@ -12,5 +12,6 @@ "api_id": "datastore.googleapis.com", "default_version": "v1", "codeowner_team": "@googleapis/cloud-native-db-dpes", - "api_shortname": "datastore" + "api_shortname": "datastore", + "api_description": "is a fully managed, schemaless database for\nstoring non-relational data. Cloud Datastore automatically scales with\nyour users and supports ACID transactions, high availability of reads and\nwrites, strong consistency for reads and ancestor queries, and eventual\nconsistency for all other queries." } diff --git a/packages/google-cloud-datastore/CONTRIBUTING.rst b/packages/google-cloud-datastore/CONTRIBUTING.rst index 37893012515c..5018565cf6d0 100644 --- a/packages/google-cloud-datastore/CONTRIBUTING.rst +++ b/packages/google-cloud-datastore/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.6, 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. + 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -246,13 +246,11 @@ Supported Python Versions We support: -- `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ - `Python 3.9`_ - `Python 3.10`_ -.. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ @@ -264,7 +262,7 @@ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-datastore/blob/main/noxfile.py -We also explicitly decided to support Python 3 beginning with version 3.6. +We also explicitly decided to support Python 3 beginning with version 3.7. Reasons for this include: - Encouraging use of newest versions of Python 3 diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index 2315c84a893f..2a39517c5c87 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -1,26 +1,26 @@ -Python Client for Google Cloud Datastore -======================================== +Python Client for Google Cloud Datastore API +============================================ -|GA| |pypi| |versions| +|stable| |pypi| |versions| -`Google Cloud Datastore API`_ is a fully managed, schemaless database for -storing non-relational data. Cloud Datastore automatically scales with your -users and supports ACID transactions, high availability of reads and writes, -strong consistency for reads and ancestor queries, and eventual consistency for -all other queries. +`Google Cloud Datastore API`_: is a fully managed, schemaless database for +storing non-relational data. Cloud Datastore automatically scales with +your users and supports ACID transactions, high availability of reads and +writes, strong consistency for reads and ancestor queries, and eventual +consistency for all other queries. - `Client Library Documentation`_ - `Product Documentation`_ -.. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg - :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-datastore.svg :target: https://pypi.org/project/google-cloud-datastore/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-datastore.svg :target: https://pypi.org/project/google-cloud-datastore/ -.. _Google Cloud Datastore API: https://cloud.google.com/datastore/docs -.. _Product Documentation: https://cloud.google.com/datastore/docs +.. _Google Cloud Datastore API: https://cloud.google.com/datastore .. _Client Library Documentation: https://cloud.google.com/python/docs/reference/datastore/latest +.. _Product Documentation: https://cloud.google.com/datastore Quick Start ----------- @@ -51,11 +51,26 @@ dependencies. .. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.6 +Our client libraries are compatible with all current [active](https://devguide.python.org/devcycle/#in-development-main-branch) and [maintenance](https://devguide.python.org/devcycle/#maintenance-branches) versions of +Python. + +Python >= 3.7 + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an [end-of-life](https://devguide.python.org/devcycle/#end-of-life-branches) +version of Python, we recommend that you update as soon as possible to an actively supported version. -The last version of this library compatible with Python 2.7 is google-cloud-datastore==1.15.3 Mac/Linux ^^^^^^^^^ @@ -78,31 +93,15 @@ Windows \Scripts\activate \Scripts\pip.exe install google-cloud-datastore - -Example Usage -~~~~~~~~~~~~~ - -.. code:: python - - from google.cloud import datastore - # Create, populate and persist an entity with keyID=1234 - client = datastore.Client() - key = client.key('EntityKind', 1234) - entity = datastore.Entity(key=key) - entity.update({ - 'foo': u'bar', - 'baz': 1337, - 'qux': False, - }) - client.put(entity) - # Then get by key for this entity - result = client.get(key) - print(result) - Next Steps ~~~~~~~~~~ -- Read the `Client Library Documentation`_ for Google Cloud Datastore - API to see other available methods on the client. -- Read the `Product documentation`_ to learn +- Read the `Client Library Documentation`_ for Google Cloud Datastore API + to see other available methods on the client. +- Read the `Google Cloud Datastore API Product documentation`_ to learn more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Google Cloud Datastore API Product documentation: https://cloud.google.com/datastore +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 85ebf7d804a7..a27a6a659ba8 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -31,7 +31,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", @@ -311,13 +311,8 @@ def docs(session): ) -@nox.session(python="3.6") +@nox.session(python=DEFAULT_PYTHON_VERSION) def doctests(session): - # Doctests run against Python 3.6 only. - # It is difficult to make doctests run against both Python 2 and Python 3 - # because they test string output equivalence, which is difficult to - # make match (e.g. unicode literals starting with "u"). - # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install("mock", "pytest", "sphinx", "google-cloud-testutils") @@ -366,28 +361,15 @@ def docfx(session): def prerelease_deps(session): """Run all tests with prerelease versions of dependencies installed.""" - prerel_deps = [ - "protobuf", - "googleapis-common-protos", - "google-auth", - "grpcio", - "grpcio-status", - "google-api-core", - "proto-plus", - # dependencies of google-auth - "cryptography", - "pyasn1", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = ["requests"] - session.install(*other_deps) - + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") session.install(*UNIT_TEST_STANDARD_DEPENDENCIES) - session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python # version, the first version we test with in the unit tests sessions has a @@ -401,19 +383,44 @@ def prerelease_deps(session): constraints_text = constraints_file.read() # Ignore leading whitespace and comment lines. - deps = [ + constraints_deps = [ match.group(1) for match in re.finditer( r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE ) ] - # Don't overwrite prerelease packages. - deps = [dep for dep in deps if dep not in prerel_deps] - # We use --no-deps to ensure that pre-release versions aren't overwritten - # by the version ranges in setup.py. - session.install(*deps) - session.install("--no-deps", "-e", ".[all]") + session.install(*constraints_deps) + + if os.path.exists("samples/snippets/requirements.txt"): + session.install("-r", "samples/snippets/requirements.txt") + + if os.path.exists("samples/snippets/requirements-test.txt"): + session.install("-r", "samples/snippets/requirements-test.txt") + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) # Print out prerelease package versions session.run( @@ -422,5 +429,16 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("py.test", "tests/unit") - session.run("py.test", "tests/system") - session.run("py.test", "samples/snippets") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path) or os.path.exists(system_test_folder_path): + session.run("py.test", "tests/system") + + snippets_test_path = os.path.join("samples", "snippets") + + # Only run samples tests if found. + if os.path.exists(snippets_test_path): + session.run("py.test", "samples/snippets") diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 79c30cac3d1d..ca00e0ca0e5e 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -98,7 +98,6 @@ def get_staging_dirs( templated_files = common.py_library( microgenerator=True, split_system_tests=True, - unit_test_python_versions=["3.6", "3.7", "3.8", "3.9", "3.10"], cov_level=100, ) s.move( @@ -181,13 +180,8 @@ def system(session, disable_grpc): def docfx\(session\): """, """\ -@nox.session(python="3.6") +@nox.session(python=DEFAULT_PYTHON_VERSION) def doctests(session): - # Doctests run against Python 3.6 only. - # It is difficult to make doctests run against both Python 2 and Python 3 - # because they test string output equivalence, which is difficult to - # make match (e.g. unicode literals starting with "u"). - # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install("mock", "pytest", "sphinx", "google-cloud-testutils") diff --git a/packages/google-cloud-datastore/samples/snippets/noxfile.py b/packages/google-cloud-datastore/samples/snippets/noxfile.py index 38bb0a572b81..5fcb9d7461f2 100644 --- a/packages/google-cloud-datastore/samples/snippets/noxfile.py +++ b/packages/google-cloud-datastore/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/packages/google-cloud-datastore/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-datastore/scripts/readme-gen/templates/install_deps.tmpl.rst index 275d649890d7..6f069c6c87a5 100644 --- a/packages/google-cloud-datastore/scripts/readme-gen/templates/install_deps.tmpl.rst +++ b/packages/google-cloud-datastore/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -12,7 +12,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 3.6+. +#. Create a virtualenv. Samples are compatible with Python 3.7+. .. code-block:: bash diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index cd203ebe6fb1..b966d7a7b5e8 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -85,7 +85,6 @@ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", @@ -99,7 +98,7 @@ namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires=">=3.6", + python_requires=">=3.7", scripts=[ "scripts/fixup_datastore_v1_keywords.py", "scripts/fixup_datastore_admin_v1_keywords.py", From 796f189e2062d07f82ece4c7e036406bed4a9693 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Sun, 10 Jul 2022 10:21:08 -0700 Subject: [PATCH 433/611] chore(main): release 2.7.2 (#335) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index cad7baa48c68..cebb6a807353 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.7.2](https://github.com/googleapis/python-datastore/compare/v2.7.1...v2.7.2) (2022-07-10) + + +### Bug Fixes + +* require python 3.7+ ([#332](https://github.com/googleapis/python-datastore/issues/332)) ([a9c513f](https://github.com/googleapis/python-datastore/commit/a9c513f0c0d9d843406cfea5ecd2cdd169030179)) + ## [2.7.1](https://github.com/googleapis/python-datastore/compare/v2.7.0...v2.7.1) (2022-06-16) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index a97caa56255a..52126b8e5a38 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.7.1" +__version__ = "2.7.2" From 0d6b7a96df1c2eed2f981cb76d92e81d57012d50 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 13 Jul 2022 22:44:12 -0400 Subject: [PATCH 434/611] chore(python): allow client documentation to be customized in README (#337) Source-Link: https://github.com/googleapis/synthtool/commit/95d9289ac3dc1ca2edae06619c82fe7a24d555f1 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:c8878270182edaab99f2927969d4f700c3af265accd472c3425deedff2b7fd93 Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-datastore/README.rst | 8 ++++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 1ce608523524..58fcbeeed649 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c -# created: 2022-07-05T18:31:20.838186805Z + digest: sha256:c8878270182edaab99f2927969d4f700c3af265accd472c3425deedff2b7fd93 +# created: 2022-07-14T01:58:16.015625351Z diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index 2a39517c5c87..392bfd8c0927 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -59,18 +59,22 @@ Code samples and snippets live in the `samples/` folder. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Our client libraries are compatible with all current [active](https://devguide.python.org/devcycle/#in-development-main-branch) and [maintenance](https://devguide.python.org/devcycle/#maintenance-branches) versions of +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of Python. Python >= 3.7 +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + Unsupported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ Python <= 3.6 -If you are using an [end-of-life](https://devguide.python.org/devcycle/#end-of-life-branches) +If you are using an `end-of-life`_ version of Python, we recommend that you update as soon as possible to an actively supported version. +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches Mac/Linux ^^^^^^^^^ From ec6e08e4ca743ea0dd8e31a60d38fc56a8678ea7 Mon Sep 17 00:00:00 2001 From: yixiaoshen Date: Fri, 15 Jul 2022 12:29:12 -0700 Subject: [PATCH 435/611] feat: Add read_time support for get and query (#334) * feat: add read_time support for get and query * fix timezone issue in read_time unit tests * fix sphinx check error Co-authored-by: Mariatta Wijaya --- .../google/cloud/datastore/client.py | 34 +++++- .../google/cloud/datastore/helpers.py | 28 +++-- .../google/cloud/datastore/query.py | 23 +++- .../google/cloud/datastore/transaction.py | 24 +++- .../tests/system/test_read_consistency.py | 112 ++++++++++++++++++ .../tests/unit/test_client.py | 46 +++++++ .../tests/unit/test_helpers.py | 63 ++++++++-- .../tests/unit/test_query.py | 27 ++++- .../tests/unit/test_transaction.py | 61 +++++++++- 9 files changed, 378 insertions(+), 40 deletions(-) create mode 100644 packages/google-cloud-datastore/tests/system/test_read_consistency.py diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 03829ce0550a..212ba1d4cbff 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -123,6 +123,7 @@ def _extended_lookup( transaction_id=None, retry=None, timeout=None, + read_time=None, ): """Repeat lookup until all keys found (unless stop requested). @@ -157,7 +158,7 @@ def _extended_lookup( :type transaction_id: str :param transaction_id: If passed, make the request in the scope of the given transaction. Incompatible with - ``eventual==True``. + ``eventual==True`` or ``read_time``. :type retry: :class:`google.api_core.retry.Retry` :param retry: @@ -170,6 +171,12 @@ def _extended_lookup( Note that if ``retry`` is specified, the timeout applies to each individual attempt. + :type read_time: datetime + :param read_time: + (Optional) Read time to use for read consistency. Incompatible with + ``eventual==True`` or ``transaction_id``. + This feature is in private preview. + :rtype: list of :class:`.entity_pb2.Entity` :returns: The requested entities. :raises: :class:`ValueError` if missing / deferred are not null or @@ -186,7 +193,7 @@ def _extended_lookup( results = [] loop_num = 0 - read_options = helpers.get_read_options(eventual, transaction_id) + read_options = helpers.get_read_options(eventual, transaction_id, read_time) while loop_num < _MAX_LOOPS: # loop against possible deferred. loop_num += 1 lookup_response = datastore_api.lookup( @@ -401,6 +408,7 @@ def get( eventual=False, retry=None, timeout=None, + read_time=None, ): """Retrieve an entity from a single key (if it exists). @@ -430,7 +438,8 @@ def get( :type eventual: bool :param eventual: (Optional) Defaults to strongly consistent (False). Setting True will use eventual consistency, but cannot - be used inside a transaction or will raise ValueError. + be used inside a transaction or with read_time, or will + raise ValueError. :type retry: :class:`google.api_core.retry.Retry` :param retry: @@ -443,10 +452,16 @@ def get( Note that if ``retry`` is specified, the timeout applies to each individual attempt. + :type read_time: datetime + :param read_time: Read the entity from the specified time (may be null). + Cannot be used with eventual consistency or inside a + transaction, or will raise ValueError. This feature is in private preview. + :rtype: :class:`google.cloud.datastore.entity.Entity` or ``NoneType`` :returns: The requested entity if it exists. - :raises: :class:`ValueError` if eventual is True and in a transaction. + :raises: :class:`ValueError` if more than one of ``eventual==True``, + ``transaction``, and ``read_time`` is specified. """ entities = self.get_multi( keys=[key], @@ -456,6 +471,7 @@ def get( eventual=eventual, retry=retry, timeout=timeout, + read_time=read_time, ) if entities: return entities[0] @@ -469,6 +485,7 @@ def get_multi( eventual=False, retry=None, timeout=None, + read_time=None, ): """Retrieve entities, along with their attributes. @@ -506,11 +523,15 @@ def get_multi( Note that if ``retry`` is specified, the timeout applies to each individual attempt. + :type read_time: datetime + :param read_time: (Optional) Read time to use for read consistency. This feature is in private preview. + :rtype: list of :class:`google.cloud.datastore.entity.Entity` :returns: The requested entities. :raises: :class:`ValueError` if one or more of ``keys`` has a project - which does not match our project. - :raises: :class:`ValueError` if eventual is True and in a transaction. + which does not match our project; or if more than one of + ``eventual==True``, ``transaction``, and ``read_time`` is + specified. """ if not keys: return [] @@ -533,6 +554,7 @@ def get_multi( transaction_id=transaction and transaction.id, retry=retry, timeout=timeout, + read_time=read_time, ) if missing is not None: diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index f976070e9833..123f356efbcf 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -29,6 +29,7 @@ from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key +from google.protobuf import timestamp_pb2 def _get_meaning(value_pb, is_list=False): @@ -230,7 +231,7 @@ def entity_to_protobuf(entity): return entity_pb -def get_read_options(eventual, transaction_id): +def get_read_options(eventual, transaction_id, read_time=None): """Validate rules for read options, and assign to the request. Helper method for ``lookup()`` and ``run_query``. @@ -242,21 +243,34 @@ def get_read_options(eventual, transaction_id): :type transaction_id: bytes :param transaction_id: A transaction identifier (may be null). + :type read_time: datetime + :param read_time: Read data from the specified time (may be null). This feature is in private preview. + :rtype: :class:`.datastore_pb2.ReadOptions` :returns: The read options corresponding to the inputs. - :raises: :class:`ValueError` if ``eventual`` is ``True`` and the - ``transaction_id`` is not ``None``. + :raises: :class:`ValueError` if more than one of ``eventual==True``, + ``transaction``, and ``read_time`` is specified. """ if transaction_id is None: if eventual: - return datastore_pb2.ReadOptions( - read_consistency=datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL - ) + if read_time is not None: + raise ValueError("eventual must be False when read_time is specified") + else: + return datastore_pb2.ReadOptions( + read_consistency=datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL + ) else: - return datastore_pb2.ReadOptions() + if read_time is None: + return datastore_pb2.ReadOptions() + else: + read_time_pb = timestamp_pb2.Timestamp() + read_time_pb.FromDatetime(read_time) + return datastore_pb2.ReadOptions(read_time=read_time_pb) else: if eventual: raise ValueError("eventual must be False when in a transaction") + elif read_time is not None: + raise ValueError("transaction and read_time are mutual exclusive") else: return datastore_pb2.ReadOptions(transaction=transaction_id) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 57c19205f7af..5907f3c1066c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -357,6 +357,7 @@ def fetch( eventual=False, retry=None, timeout=None, + read_time=None, ): """Execute the Query; return an iterator for the matching entities. @@ -412,7 +413,8 @@ def fetch( :param eventual: (Optional) Defaults to strongly consistent (False). Setting True will use eventual consistency, but cannot be used inside a transaction or - will raise ValueError. + with read_time, otherwise will raise + ValueError. :type retry: :class:`google.api_core.retry.Retry` :param retry: @@ -425,6 +427,11 @@ def fetch( Note that if ``retry`` is specified, the timeout applies to each individual attempt. + :type read_time: datetime + :param read_time: + (Optional) use read_time read consistency, cannot be used inside a + transaction or with eventual consistency, or will raise ValueError. + :rtype: :class:`Iterator` :returns: The iterator for the query. """ @@ -441,6 +448,7 @@ def fetch( eventual=eventual, retry=retry, timeout=timeout, + read_time=read_time, ) @@ -473,7 +481,7 @@ class Iterator(page_iterator.Iterator): :param eventual: (Optional) Defaults to strongly consistent (False). Setting True will use eventual consistency, but cannot be used inside a transaction or - will raise ValueError. + with read_time, otherwise will raise ValueError. :type retry: :class:`google.api_core.retry.Retry` :param retry: @@ -485,6 +493,11 @@ class Iterator(page_iterator.Iterator): Time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + + :type read_time: datetime + :param read_time: (Optional) Runs the query with read time consistency. + Cannot be used with eventual consistency or inside a + transaction, otherwise will raise ValueError. This feature is in private preview. """ next_page_token = None @@ -500,6 +513,7 @@ def __init__( eventual=False, retry=None, timeout=None, + read_time=None, ): super(Iterator, self).__init__( client=client, @@ -513,6 +527,7 @@ def __init__( self._eventual = eventual self._retry = retry self._timeout = timeout + self._read_time = read_time # The attributes below will change over the life of the iterator. self._more_results = True self._skipped_results = 0 @@ -593,7 +608,9 @@ def _next_page(self): transaction_id = None else: transaction_id = transaction.id - read_options = helpers.get_read_options(self._eventual, transaction_id) + read_options = helpers.get_read_options( + self._eventual, transaction_id, self._read_time + ) partition_id = entity_pb2.PartitionId( project_id=self._query.project, namespace_id=self._query.namespace diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index 5da64198dc5a..dc18e64daecc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -16,6 +16,7 @@ from google.cloud.datastore.batch import Batch from google.cloud.datastore_v1.types import TransactionOptions +from google.protobuf import timestamp_pb2 def _make_retry_timeout_kwargs(retry, timeout): @@ -141,18 +142,35 @@ class Transaction(Batch): :type read_only: bool :param read_only: indicates the transaction is read only. + + :type read_time: datetime + :param read_time: (Optional) Time at which the transaction reads entities. + Only allowed when ``read_only=True``. This feature is in private preview. + + :raises: :class:`ValueError` if read_time is specified when + ``read_only=False``. """ _status = None - def __init__(self, client, read_only=False): + def __init__(self, client, read_only=False, read_time=None): super(Transaction, self).__init__(client) self._id = None if read_only: - options = TransactionOptions(read_only=TransactionOptions.ReadOnly()) + if read_time is not None: + read_time_pb = timestamp_pb2.Timestamp() + read_time_pb.FromDatetime(read_time) + options = TransactionOptions( + read_only=TransactionOptions.ReadOnly(read_time=read_time_pb) + ) + else: + options = TransactionOptions(read_only=TransactionOptions.ReadOnly()) else: - options = TransactionOptions() + if read_time is not None: + raise ValueError("read_time is only allowed in read only transaction.") + else: + options = TransactionOptions() self._options = options diff --git a/packages/google-cloud-datastore/tests/system/test_read_consistency.py b/packages/google-cloud-datastore/tests/system/test_read_consistency.py new file mode 100644 index 000000000000..d65b935678a5 --- /dev/null +++ b/packages/google-cloud-datastore/tests/system/test_read_consistency.py @@ -0,0 +1,112 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + +from datetime import datetime + +from google.cloud import datastore + + +def _parent_key(datastore_client): + return datastore_client.key("Blog", "PizzaMan") + + +def _put_entity(datastore_client, entity_id): + key = datastore_client.key( + "read_time_test", entity_id, parent=_parent_key(datastore_client) + ) + entity = datastore.Entity(key=key) + entity["field"] = "old_value" + datastore_client.put(entity) + return entity + + +def test_get_w_read_time(datastore_client, entities_to_delete): + entity = _put_entity(datastore_client, 1) + + entities_to_delete.append(entity) + + # Add some sleep to accommodate server & client clock discrepancy. + time.sleep(1) + read_time = datetime.now() + time.sleep(1) + + entity["field"] = "new_value" + datastore_client.put(entity) + + # Get without read_time. + retrieved_entity = datastore_client.get(entity.key) + assert retrieved_entity["field"] == "new_value" + + # Directly specify read_time in get request. + retrieved_entity_from_read_time = datastore_client.get( + entity.key, read_time=read_time + ) + assert retrieved_entity_from_read_time["field"] == "old_value" + + # Use read_time in a read_only transaction. + with datastore_client.transaction(read_only=True, read_time=read_time): + retrieved_entity_from_xact = datastore_client.get(entity.key) + assert retrieved_entity_from_xact["field"] == "old_value" + + +def test_query_w_read_time(datastore_client, entities_to_delete): + entity0 = _put_entity(datastore_client, 1) + entity1 = _put_entity(datastore_client, 2) + entity2 = _put_entity(datastore_client, 3) + + entities_to_delete.append(entity0) + entities_to_delete.append(entity1) + entities_to_delete.append(entity2) + + # Add some sleep to accommodate server & client clock discrepancy. + time.sleep(1) + read_time = datetime.now() + time.sleep(1) + + entity2["field"] = "new_value" + datastore_client.put(entity2) + + query = datastore_client.query( + kind="read_time_test", ancestor=_parent_key(datastore_client) + ) + query = query.add_filter("field", "=", "old_value") + + # Query without read_time. + iterator = query.fetch() + page = next(iterator.pages) + query_results = list(page) + assert len(query_results) == 2 + assert query_results[0].key == entity0.key + assert query_results[1].key == entity1.key + + # Directly specify read_time in query. + iterator_read_time = query.fetch(read_time=read_time) + page_read_time = next(iterator_read_time.pages) + query_results_read_time = list(page_read_time) + assert len(query_results_read_time) == 3 + assert query_results_read_time[0].key == entity0.key + assert query_results_read_time[1].key == entity1.key + assert query_results_read_time[2].key == entity2.key + + # Run the query in a read_only transacxtion with read_time. + with datastore_client.transaction(read_only=True, read_time=read_time): + iterator_from_xact = query.fetch() + page_from_xact = next(iterator_from_xact.pages) + query_results_from_xact = list(page_from_xact) + assert len(query_results_from_xact) == 3 + assert query_results_from_xact[0].key == entity0.key + assert query_results_from_xact[1].key == entity1.key + assert query_results_from_xact[2].key == entity2.key diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 51cddb6a9fd7..2a15677a9646 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -367,6 +367,7 @@ def test_client_get_miss(): eventual=False, retry=None, timeout=None, + read_time=None, ) @@ -389,6 +390,7 @@ def test_client_get_hit(): eventual=False, retry=None, timeout=None, + read_time=None, ) @@ -658,6 +660,50 @@ def test_client_get_multi_hit_w_transaction(): ) +def test_client_get_multi_hit_w_read_time(): + from datetime import datetime + + from google.cloud.datastore.key import Key + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.protobuf.timestamp_pb2 import Timestamp + + read_time = datetime.utcfromtimestamp(1641058200.123456) + read_time_pb = Timestamp(seconds=1641058200, nanos=123456000) + kind = "Kind" + id_ = 1234 + path = [{"kind": kind, "id": id_}] + + # Make a found entity pb to be returned from mock backend. + entity_pb = _make_entity_pb(PROJECT, kind, id_, "foo", "Foo") + + # Make a connection to return the entity pb. + creds = _make_credentials() + client = _make_client(credentials=creds) + lookup_response = _make_lookup_response(results=[entity_pb]) + ds_api = _make_datastore_api(lookup_response=lookup_response) + client._datastore_api_internal = ds_api + + key = Key(kind, id_, project=PROJECT) + (result,) = client.get_multi([key], read_time=read_time) + new_key = result.key + + # Check the returned value is as expected. + assert new_key is not key + assert new_key.project == PROJECT + assert new_key.path == path + assert list(result) == ["foo"] + assert result["foo"] == "Foo" + + read_options = datastore_pb2.ReadOptions(read_time=read_time_pb) + ds_api.lookup.assert_called_once_with( + request={ + "project_id": PROJECT, + "keys": [key.to_protobuf()], + "read_options": read_options, + } + ) + + def test_client_get_multi_hit_multiple_keys_same_project(): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore.key import Key diff --git a/packages/google-cloud-datastore/tests/unit/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py index a8477f2de1fa..cf626ee36822 100644 --- a/packages/google-cloud-datastore/tests/unit/test_helpers.py +++ b/packages/google-cloud-datastore/tests/unit/test_helpers.py @@ -495,43 +495,84 @@ def test_w_nothing_in_pb(): key_from_protobuf(pb) -def test__get_read_options_w_eventual_w_txn(): +def test__get_read_options_w_eventual_w_txn_wo_read_time(): from google.cloud.datastore.helpers import get_read_options with pytest.raises(ValueError): - get_read_options(True, b"123") + get_read_options(True, b"123", None) -def test__get_read_options_w_eventual_wo_txn(): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 +def test__get_read_options_w_eventual_wo_txn_wo_read_time(): from google.cloud.datastore.helpers import get_read_options + from google.cloud.datastore_v1.types import datastore as datastore_pb2 - read_options = get_read_options(True, None) + read_options = get_read_options(True, None, None) expected = datastore_pb2.ReadOptions( read_consistency=datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL ) assert read_options == expected -def test__get_read_options_w_default_w_txn(): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 +def test__get_read_options_w_evntual_w_txn_w_read_time(): + from datetime import datetime + + from google.cloud.datastore.helpers import get_read_options + + with pytest.raises(ValueError): + get_read_options(True, b"123", datetime(2022, 1, 1, 17, 30, 0, 123456)) + + +def test__get_read_options_w_evntual_wo_txn_w_read_time(): + from datetime import datetime + + from google.cloud.datastore.helpers import get_read_options + + with pytest.raises(ValueError): + get_read_options(True, None, datetime(2022, 1, 1, 17, 30, 0, 123456)) + + +def test__get_read_options_w_default_w_txn_wo_read_time(): from google.cloud.datastore.helpers import get_read_options + from google.cloud.datastore_v1.types import datastore as datastore_pb2 txn_id = b"123abc-easy-as" - read_options = get_read_options(False, txn_id) + read_options = get_read_options(False, txn_id, None) expected = datastore_pb2.ReadOptions(transaction=txn_id) assert read_options == expected -def test__get_read_options_w_default_wo_txn(): - from google.cloud.datastore_v1.types import datastore as datastore_pb2 +def test__get_read_options_w_default_wo_txn_wo_read_time(): from google.cloud.datastore.helpers import get_read_options + from google.cloud.datastore_v1.types import datastore as datastore_pb2 - read_options = get_read_options(False, None) + read_options = get_read_options(False, None, None) expected = datastore_pb2.ReadOptions() assert read_options == expected +def test__get_read_options_w_default_w_txn_w_read_time(): + from datetime import datetime + + from google.cloud.datastore.helpers import get_read_options + + with pytest.raises(ValueError): + get_read_options(False, b"123", datetime(2022, 1, 1, 17, 30, 0, 123456)) + + +def test__get_read_options_w_default_wo_txn_w_read_time(): + from datetime import datetime + + from google.cloud.datastore.helpers import get_read_options + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.protobuf.timestamp_pb2 import Timestamp + + read_time = datetime.utcfromtimestamp(1641058200.123456) + read_time_pb = Timestamp(seconds=1641058200, nanos=123456000) + read_options = get_read_options(False, None, read_time) + expected = datastore_pb2.ReadOptions(read_time=read_time_pb) + assert read_options == expected + + def test__pb_attr_value_w_datetime_naive(): import calendar import datetime diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 1f250f4636bb..b473a8c71b1c 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import datetime import mock import pytest @@ -555,12 +556,13 @@ def test_iterator__process_query_results_bad_enum(): iterator._process_query_results(response_pb) -def _next_page_helper(txn_id=None, retry=None, timeout=None): +def _next_page_helper(txn_id=None, retry=None, timeout=None, read_time=None): from google.api_core import page_iterator + from google.cloud.datastore.query import Query from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore_v1.types import query as query_pb2 - from google.cloud.datastore.query import Query + from google.protobuf.timestamp_pb2 import Timestamp more_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED result = _make_query_response([], b"", more_enum, 0) @@ -581,7 +583,11 @@ def _next_page_helper(txn_id=None, retry=None, timeout=None): if timeout is not None: kwargs["timeout"] = timeout - iterator = _make_iterator(query, client, **kwargs) + it_kwargs = kwargs.copy() + if read_time is not None: + it_kwargs["read_time"] = read_time + + iterator = _make_iterator(query, client, **it_kwargs) page = iterator._next_page() @@ -589,10 +595,14 @@ def _next_page_helper(txn_id=None, retry=None, timeout=None): assert page._parent is iterator partition_id = entity_pb2.PartitionId(project_id=project) - if txn_id is None: - read_options = datastore_pb2.ReadOptions() - else: + if txn_id is not None: read_options = datastore_pb2.ReadOptions(transaction=txn_id) + elif read_time is not None: + read_time_pb = Timestamp() + read_time_pb.FromDatetime(read_time) + read_options = datastore_pb2.ReadOptions(read_time=read_time_pb) + else: + read_options = datastore_pb2.ReadOptions() empty_query = query_pb2.Query() ds_api.run_query.assert_called_once_with( request={ @@ -622,6 +632,11 @@ def test_iterator__next_page_in_transaction(): _next_page_helper(txn_id) +def test_iterator__next_page_w_read_time(): + read_time = datetime.datetime.utcfromtimestamp(1641058200.123456) + _next_page_helper(read_time=read_time) + + def test_iterator__next_page_no_more(): from google.cloud.datastore.query import Query diff --git a/packages/google-cloud-datastore/tests/unit/test_transaction.py b/packages/google-cloud-datastore/tests/unit/test_transaction.py index 3e78a6a31ac3..178bb4f17277 100644 --- a/packages/google-cloud-datastore/tests/unit/test_transaction.py +++ b/packages/google-cloud-datastore/tests/unit/test_transaction.py @@ -44,6 +44,34 @@ def test_transaction_constructor_read_only(): assert xact._options == options +def test_transaction_constructor_w_read_time(): + from datetime import datetime + + project = "PROJECT" + id_ = 850302 + read_time = datetime.utcfromtimestamp(1641058200.123456) + ds_api = _make_datastore_api(xact=id_) + client = _Client(project, datastore_api=ds_api) + options = _make_options(read_only=True, read_time=read_time) + + xact = _make_transaction(client, read_only=True, read_time=read_time) + + assert xact._options == options + + +def test_transaction_constructor_read_write_w_read_time(): + from datetime import datetime + + project = "PROJECT" + id_ = 850302 + read_time = datetime.utcfromtimestamp(1641058200.123456) + ds_api = _make_datastore_api(xact=id_) + client = _Client(project, datastore_api=ds_api) + + with pytest.raises(ValueError): + _make_transaction(client, read_only=False, read_time=read_time) + + def test_transaction_current(): from google.cloud.datastore_v1.types import datastore as datastore_pb2 @@ -128,6 +156,24 @@ def test_transaction_begin_w_readonly(): ds_api.begin_transaction.assert_called_once_with(request=expected_request) +def test_transaction_begin_w_read_time(): + from datetime import datetime + + project = "PROJECT" + id_ = 889 + read_time = datetime.utcfromtimestamp(1641058200.123456) + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api) + xact = _make_transaction(client, read_only=True, read_time=read_time) + + xact.begin() + + assert xact.id == id_ + + expected_request = _make_begin_request(project, read_only=True, read_time=read_time) + ds_api.begin_transaction.assert_called_once_with(request=expected_request) + + def test_transaction_begin_w_retry_w_timeout(): project = "PROJECT" id_ = 889 @@ -413,13 +459,20 @@ def __exit__(self, *args): self._client._pop_batch() -def _make_options(read_only=False, previous_transaction=None): +def _make_options(read_only=False, previous_transaction=None, read_time=None): from google.cloud.datastore_v1.types import TransactionOptions + from google.protobuf.timestamp_pb2 import Timestamp kw = {} if read_only: - kw["read_only"] = TransactionOptions.ReadOnly() + read_only_kw = {} + if read_time is not None: + read_time_pb = Timestamp() + read_time_pb.FromDatetime(read_time) + read_only_kw["read_time"] = read_time_pb + + kw["read_only"] = TransactionOptions.ReadOnly(**read_only_kw) return TransactionOptions(**kw) @@ -430,8 +483,8 @@ def _make_transaction(client, **kw): return Transaction(client, **kw) -def _make_begin_request(project, read_only=False): - expected_options = _make_options(read_only=read_only) +def _make_begin_request(project, read_only=False, read_time=None): + expected_options = _make_options(read_only=read_only, read_time=read_time) return { "project_id": project, "transaction_options": expected_options, From cc70a7ed435624002216e5428b10e23f5f3d685f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 15 Jul 2022 12:46:40 -0700 Subject: [PATCH 436/611] chore(main): release 2.8.0 (#338) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index cebb6a807353..bd6f4be70c41 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.8.0](https://github.com/googleapis/python-datastore/compare/v2.7.2...v2.8.0) (2022-07-15) + + +### Features + +* Add read_time support for get and query ([#334](https://github.com/googleapis/python-datastore/issues/334)) ([58b4b74](https://github.com/googleapis/python-datastore/commit/58b4b74c968a0e50702303ec37377e9c7e05347d)) + ## [2.7.2](https://github.com/googleapis/python-datastore/compare/v2.7.1...v2.7.2) (2022-07-10) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 52126b8e5a38..0a9aecb37574 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.7.2" +__version__ = "2.8.0" From 296461792bace7fddfaaf4b28886776fcb1672e0 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sun, 24 Jul 2022 02:33:14 +0200 Subject: [PATCH 437/611] chore(deps): update all dependencies to v4 (#325) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies to v4 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * revert Co-authored-by: Owl Bot Co-authored-by: Mariatta Wijaya Co-authored-by: Anthonios Partheniou --- packages/google-cloud-datastore/.github/workflows/mypy.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/.github/workflows/mypy.yml b/packages/google-cloud-datastore/.github/workflows/mypy.yml index f9f07f4de171..c63242630acd 100644 --- a/packages/google-cloud-datastore/.github/workflows/mypy.yml +++ b/packages/google-cloud-datastore/.github/workflows/mypy.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.8" - name: Install nox From 0b588b115b9d21b1528c2b373062d4658c1bd3c6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 29 Jul 2022 14:10:03 -0400 Subject: [PATCH 438/611] chore: resolve issue with prerelease presubmit [autoapprove] (#339) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): fix prerelease session [autoapprove] Source-Link: https://github.com/googleapis/synthtool/commit/1b9ad7694e44ddb4d9844df55ff7af77b51a4435 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:9db98b055a7f8bd82351238ccaacfd3cda58cdf73012ab58b8da146368330021 * update replacement in owlbot.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +-- packages/google-cloud-datastore/noxfile.py | 35 +++++++++++-------- packages/google-cloud-datastore/owlbot.py | 24 +++++-------- 3 files changed, 30 insertions(+), 33 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 58fcbeeed649..0eb02fda4c09 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:c8878270182edaab99f2927969d4f700c3af265accd472c3425deedff2b7fd93 -# created: 2022-07-14T01:58:16.015625351Z + digest: sha256:9db98b055a7f8bd82351238ccaacfd3cda58cdf73012ab58b8da146368330021 +# created: 2022-07-25T16:02:49.174178716Z diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index a27a6a659ba8..f98798778199 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -363,7 +363,8 @@ def prerelease_deps(session): # Install all dependencies session.install("-e", ".[all, tests, tracing]") - session.install(*UNIT_TEST_STANDARD_DEPENDENCIES) + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES @@ -392,12 +393,6 @@ def prerelease_deps(session): session.install(*constraints_deps) - if os.path.exists("samples/snippets/requirements.txt"): - session.install("-r", "samples/snippets/requirements.txt") - - if os.path.exists("samples/snippets/requirements-test.txt"): - session.install("-r", "samples/snippets/requirements-test.txt") - prerel_deps = [ "protobuf", # dependency of grpc @@ -434,11 +429,21 @@ def prerelease_deps(session): system_test_folder_path = os.path.join("tests", "system") # Only run system tests if found. - if os.path.exists(system_test_path) or os.path.exists(system_test_folder_path): - session.run("py.test", "tests/system") - - snippets_test_path = os.path.join("samples", "snippets") - - # Only run samples tests if found. - if os.path.exists(snippets_test_path): - session.run("py.test", "samples/snippets") + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + env=env, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + env=env, + *session.posargs, + ) diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index ca00e0ca0e5e..e19fe0ad082c 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -137,26 +137,18 @@ def system(session, disable_grpc): """, ) -assert 1 == s.replace( +assert 2 == s.replace( "noxfile.py", - """\ - system_test_path, -""", - """\ - system_test_path, - env=env, -""", + """system_test_path,\n""", + """system_test_path, + env=env,\n""", ) -assert 1 == s.replace( +assert 2 == s.replace( "noxfile.py", - """\ - system_test_folder_path, -""", - """\ - system_test_folder_path, - env=env, -""", + """system_test_folder_path,\n""", + """system_test_folder_path, + env=env,\n""", ) # Add nox session to exercise doctests From d58a2bee3668a159b0f9c871de6597d2711d0a6c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 2 Aug 2022 15:00:11 +0200 Subject: [PATCH 439/611] chore(deps): update all dependencies (#340) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * revert * revert Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../samples/snippets/requirements-test.txt | 8 ++++---- .../samples/snippets/requirements.txt | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt index f15210891bcc..9fdbe021770a 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ -backoff==1.11.1; python_version < "3.7" -backoff==2.0.0; python_version >= "3.7" -pytest==7.0.1 -flaky==3.7.0 \ No newline at end of file +backoff===1.11.1; python_version < "3.7" +backoff==2.1.2; python_version >= "3.7" +pytest==7.1.2 +flaky==3.7.0 diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index d5e16c3771aa..4e48d18693f1 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.7.0 \ No newline at end of file +google-cloud-datastore==2.8.0 \ No newline at end of file From 03b1d81a28b3fdf39e9ec9e4ea456117862d0fd9 Mon Sep 17 00:00:00 2001 From: Mariatta Wijaya Date: Tue, 9 Aug 2022 13:30:46 -0700 Subject: [PATCH 440/611] docs: Move the schedule_export samples from python-docs-samples (#344) Moved from https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/datastore/schedule-export --- .../snippets/schedule-export/README.md | 5 + .../samples/snippets/schedule-export/main.py | 57 ++++ .../snippets/schedule-export/noxfile.py | 312 ++++++++++++++++++ .../schedule-export/noxfile_config.py | 42 +++ .../schedule-export/requirements-test.txt | 1 + .../snippets/schedule-export/requirements.txt | 1 + .../schedule-export/schedule_export_test.py | 73 ++++ 7 files changed, 491 insertions(+) create mode 100644 packages/google-cloud-datastore/samples/snippets/schedule-export/README.md create mode 100644 packages/google-cloud-datastore/samples/snippets/schedule-export/main.py create mode 100644 packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py create mode 100644 packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile_config.py create mode 100644 packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt create mode 100644 packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt create mode 100644 packages/google-cloud-datastore/samples/snippets/schedule-export/schedule_export_test.py diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/README.md b/packages/google-cloud-datastore/samples/snippets/schedule-export/README.md new file mode 100644 index 000000000000..a8501cddc34b --- /dev/null +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/README.md @@ -0,0 +1,5 @@ +# Scheduling Datastore exports with Cloud Functions and Cloud Scheduler + +This sample application demonstrates how to schedule exports of your Datastore entities. To deploy this sample, see: + +[Scheduling exports](https://cloud.google.com/datastore/docs/schedule-export) diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/main.py b/packages/google-cloud-datastore/samples/snippets/schedule-export/main.py new file mode 100644 index 000000000000..5c0eba699d07 --- /dev/null +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/main.py @@ -0,0 +1,57 @@ +# Copyright 2021 Google LLC All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import json +import os + +from google.cloud import datastore_admin_v1 + +project_id = os.environ.get("GCP_PROJECT") +client = datastore_admin_v1.DatastoreAdminClient() + + +def datastore_export(event, context): + """Triggers a Datastore export from a Cloud Scheduler job. + + Args: + event (dict): event[data] must contain a json object encoded in + base-64. Cloud Scheduler encodes payloads in base-64 by default. + Object must include a 'bucket' value and can include 'kinds' + and 'namespaceIds' values. + context (google.cloud.functions.Context): The Cloud Functions event + metadata. + """ + if "data" in event: + # Triggered via Cloud Scheduler, decode the inner data field of the json payload. + json_data = json.loads(base64.b64decode(event["data"]).decode("utf-8")) + else: + # Otherwise, for instance if triggered via the Cloud Console on a Cloud Function, the event is the data. + json_data = event + + bucket = json_data["bucket"] + entity_filter = datastore_admin_v1.EntityFilter() + + if "kinds" in json_data: + entity_filter.kinds = json_data["kinds"] + + if "namespaceIds" in json_data: + entity_filter.namespace_ids = json_data["namespaceIds"] + + export_request = datastore_admin_v1.ExportEntitiesRequest( + project_id=project_id, output_url_prefix=bucket, entity_filter=entity_filter + ) + operation = client.export_entities(request=export_request) + response = operation.result() + print(response) diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py b/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py new file mode 100644 index 000000000000..5fcb9d7461f2 --- /dev/null +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py @@ -0,0 +1,312 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import glob +import os +from pathlib import Path +import sys +from typing import Callable, Dict, List, Optional + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" + +# Copy `noxfile_config.py` to your directory and modify it instead. + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars() -> Dict[str, str]: + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to test samples. +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + +# +# Style Checks +# + + +def _determine_local_import_names(start_dir: str) -> List[str]: + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + ".", + ] + session.run("flake8", *args) + + +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + +# +# format = isort + black +# + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) + + if len(test_list) == 0: + print("No tests found, skipping directory.") + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session: nox.sessions.Session) -> None: + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root() -> Optional[str]: + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session: nox.sessions.Session, path: str) -> None: + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile_config.py b/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile_config.py new file mode 100644 index 000000000000..7bf43541d126 --- /dev/null +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile_config.py @@ -0,0 +1,42 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# You can copy this file into your directory, then it will be imported from +# the noxfile.py. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "BUILD_SPECIFIC_GCLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt new file mode 100644 index 000000000000..6a3d7bca6791 --- /dev/null +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt @@ -0,0 +1 @@ +pytest==7.1.2 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt new file mode 100644 index 000000000000..560d22f04b4f --- /dev/null +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -0,0 +1 @@ +google-cloud-datastore==2.8.0 diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/schedule_export_test.py b/packages/google-cloud-datastore/samples/snippets/schedule-export/schedule_export_test.py new file mode 100644 index 000000000000..48d9147c9235 --- /dev/null +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/schedule_export_test.py @@ -0,0 +1,73 @@ +# Copyright 2019 Google LLC All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +from unittest.mock import Mock + +import main + +mock_context = Mock() +mock_context.event_id = "617187464135194" +mock_context.timestamp = "2020-04-15T22:09:03.761Z" + + +def test_datastore_export(capsys): + # Test an export without an entity filter + bucket = "gs://my-bucket" + json_string = '{{ "bucket": "{bucket}" }}'.format(bucket=bucket) + + # Encode data like Cloud Scheduler + data = bytes(json_string, "utf-8") + data_encoded = base64.b64encode(data) + event = {"data": data_encoded} + + # Mock the Datastore service + mockDatastore = Mock() + main.client = mockDatastore + + # Call tested function + main.datastore_export(event, mock_context) + out, err = capsys.readouterr() + export_args = mockDatastore.export_entities.call_args[1] + # Assert request includes test values + assert export_args["request"].output_url_prefix == bucket + + +def test_datastore_export_entity_filter(capsys): + # Test an export with an entity filter + bucket = "gs://my-bucket" + kinds = "Users,Tasks" + namespaceIds = "Customer831,Customer157" + json_string = '{{ "bucket": "{bucket}", "kinds": "{kinds}", "namespaceIds": "{namespaceIds}" }}'.format( + bucket=bucket, kinds=kinds, namespaceIds=namespaceIds + ) + + # Encode data like Cloud Scheduler + data = bytes(json_string, "utf-8") + data_encoded = base64.b64encode(data) + event = {"data": data_encoded} + + # Mock the Datastore service + mockDatastore = Mock() + main.client = mockDatastore + + # Call tested function + main.datastore_export(event, mock_context) + out, err = capsys.readouterr() + export_args = mockDatastore.export_entities.call_args[1] + # Assert request includes test values + + assert export_args["request"].output_url_prefix == bucket + assert export_args["request"].entity_filter.kinds == kinds + assert export_args["request"].entity_filter.namespace_ids == namespaceIds From 18e31b1a31574662f0a13d98471dfb2118732d7f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 9 Aug 2022 21:41:07 -0400 Subject: [PATCH 441/611] chore(deps): update actions/setup-python action to v4 [autoapprove] (#343) * chore(deps): update actions/setup-python action to v4 [autoapprove] Source-Link: https://github.com/googleapis/synthtool/commit/8e55b327bae44b6640c7ab4be91df85fc4d6fe8a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:c6c965a4bf40c19011b11f87dbc801a66d3a23fbc6704102be064ef31c51f1c3 * resolve issue with prerelease session Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google-cloud-datastore/.github/.OwlBot.lock.yaml | 4 ++-- .../google-cloud-datastore/.github/workflows/docs.yml | 4 ++-- .../google-cloud-datastore/.github/workflows/lint.yml | 2 +- .../.github/workflows/unittest.yml | 4 ++-- packages/google-cloud-datastore/noxfile.py | 1 + packages/google-cloud-datastore/owlbot.py | 11 +++++++++++ 6 files changed, 19 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 0eb02fda4c09..c701359fc58c 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:9db98b055a7f8bd82351238ccaacfd3cda58cdf73012ab58b8da146368330021 -# created: 2022-07-25T16:02:49.174178716Z + digest: sha256:c6c965a4bf40c19011b11f87dbc801a66d3a23fbc6704102be064ef31c51f1c3 +# created: 2022-08-09T15:58:56.463048506Z diff --git a/packages/google-cloud-datastore/.github/workflows/docs.yml b/packages/google-cloud-datastore/.github/workflows/docs.yml index b46d7305d8cf..7092a139aed3 100644 --- a/packages/google-cloud-datastore/.github/workflows/docs.yml +++ b/packages/google-cloud-datastore/.github/workflows/docs.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install nox @@ -26,7 +26,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install nox diff --git a/packages/google-cloud-datastore/.github/workflows/lint.yml b/packages/google-cloud-datastore/.github/workflows/lint.yml index f512a4960beb..d2aee5b7d8ec 100644 --- a/packages/google-cloud-datastore/.github/workflows/lint.yml +++ b/packages/google-cloud-datastore/.github/workflows/lint.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install nox diff --git a/packages/google-cloud-datastore/.github/workflows/unittest.yml b/packages/google-cloud-datastore/.github/workflows/unittest.yml index 5531b0141297..87ade4d54362 100644 --- a/packages/google-cloud-datastore/.github/workflows/unittest.yml +++ b/packages/google-cloud-datastore/.github/workflows/unittest.yml @@ -13,7 +13,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} - name: Install nox @@ -39,7 +39,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install coverage diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index f98798778199..bc90eba537f9 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -428,6 +428,7 @@ def prerelease_deps(session): system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") + env = {} # Only run system tests if found. if os.path.exists(system_test_path): session.run( diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index e19fe0ad082c..40f95a2206c8 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -137,6 +137,17 @@ def system(session, disable_grpc): """, ) +assert 1 == s.replace( + "noxfile.py", + """\ + # Only run system tests if found. + """, + """\ + env = {} + # Only run system tests if found. + """, +) + assert 2 == s.replace( "noxfile.py", """system_test_path,\n""", From cf49ec37af9fecd2654443a6e6327e21eeea3b42 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 11 Aug 2022 14:32:12 -0400 Subject: [PATCH 442/611] fix(deps): allow protobuf < 5.0.0 (#347) fix(deps): require proto-plus >= 1.22.0 --- packages/google-cloud-datastore/setup.py | 4 ++-- packages/google-cloud-datastore/testing/constraints-3.7.txt | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index b966d7a7b5e8..23131ef30491 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -37,8 +37,8 @@ # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 "google-cloud-core >= 1.4.0, <3.0.0dev", - "proto-plus >= 1.15.0, <2.0.0dev", - "protobuf >= 3.19.0, <4.0.0dev", + "proto-plus >= 1.22.0, <2.0.0dev", + "protobuf >= 3.19.0, <5.0.0dev", ] extras = {"libcst": "libcst >= 0.2.5"} diff --git a/packages/google-cloud-datastore/testing/constraints-3.7.txt b/packages/google-cloud-datastore/testing/constraints-3.7.txt index b6789fb79133..e3a84dccc73f 100644 --- a/packages/google-cloud-datastore/testing/constraints-3.7.txt +++ b/packages/google-cloud-datastore/testing/constraints-3.7.txt @@ -7,6 +7,6 @@ # Then this file should have foo==1.14.0 google-api-core==1.31.5 google-cloud-core==1.4.0 -proto-plus==1.15.0 +proto-plus==1.22.0 libcst==0.2.5 protobuf==3.19.0 From 93c79300f52f32978f3beec5c0747e08f4c58dc2 Mon Sep 17 00:00:00 2001 From: Juan Lara Date: Fri, 12 Aug 2022 16:24:29 +0000 Subject: [PATCH 443/611] docs(samples): Add an example of using read_time in queries and get() (#342) * Add an example of using read_time in queries and get() * Fix test for query_with_readtime --- .../samples/snippets/snippets.py | 23 ++++++++++++++++++- .../samples/snippets/snippets_test.py | 6 +++++ 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/samples/snippets/snippets.py b/packages/google-cloud-datastore/samples/snippets/snippets.py index 40dd5aad5951..7d2130a8a7b4 100644 --- a/packages/google-cloud-datastore/samples/snippets/snippets.py +++ b/packages/google-cloud-datastore/samples/snippets/snippets.py @@ -12,6 +12,7 @@ # limitations under the License. import argparse +from datetime import datetime, timedelta, timezone from pprint import pprint from google.cloud import datastore # noqa: I100 @@ -56,11 +57,31 @@ def not_in_query(client): return list(query.fetch()) +def query_with_readtime(client): + # [START datastore_snapshot_read] + # Create a read time of 120 seconds in the past + read_time = datetime.now(timezone.utc) - timedelta(seconds=120) + + # Fetch an entity at time read_time + task_key = client.key('Task', 'sampletask') + entity = client.get(task_key, read_time=read_time) + + # Query Task entities at time read_time + query = client.query(kind="Task") + tasks = query.fetch(read_time=read_time, limit=10) + # [END datastore_snapshot_read] + + results = list(tasks) + results.append(entity) + + return results + + def main(project_id): client = datastore.Client(project_id) for name, function in globals().items(): - if name in ("main", "_preamble", "defaultdict") or not callable(function): + if name in ("main", "_preamble", "defaultdict", "datetime", "timezone", "timedelta") or not callable(function): continue print(name) diff --git a/packages/google-cloud-datastore/samples/snippets/snippets_test.py b/packages/google-cloud-datastore/samples/snippets/snippets_test.py index 27607c07c12a..58e75a599b8b 100644 --- a/packages/google-cloud-datastore/samples/snippets/snippets_test.py +++ b/packages/google-cloud-datastore/samples/snippets/snippets_test.py @@ -66,3 +66,9 @@ def test_not_in_query(self, client): tasks = snippets.not_in_query(client) client.entities_to_delete.extend(tasks) assert tasks is not None + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_query_with_readtime(self, client): + tasks = snippets.query_with_readtime(client) + client.entities_to_delete.extend(tasks) + assert tasks is not None From 7428967ae1c0f0f83431f2e3b17085d5eee36f31 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 16 Aug 2022 14:08:11 -0400 Subject: [PATCH 444/611] chore(main): release 2.8.1 (#345) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 14 ++++++++++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index bd6f4be70c41..632345525ae8 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,20 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.8.1](https://github.com/googleapis/python-datastore/compare/v2.8.0...v2.8.1) (2022-08-12) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#347](https://github.com/googleapis/python-datastore/issues/347)) ([de13860](https://github.com/googleapis/python-datastore/commit/de13860bface16c59a4085ecf9374c61c176629b)) +* **deps:** require proto-plus >= 1.22.0 ([de13860](https://github.com/googleapis/python-datastore/commit/de13860bface16c59a4085ecf9374c61c176629b)) + + +### Documentation + +* Move the schedule_export samples from python-docs-samples ([#344](https://github.com/googleapis/python-datastore/issues/344)) ([2174800](https://github.com/googleapis/python-datastore/commit/21748009b35fbbfb2a2730d792e793472235dfd3)) +* **samples:** Add an example of using read_time in queries and get() ([#342](https://github.com/googleapis/python-datastore/issues/342)) ([ffc5f17](https://github.com/googleapis/python-datastore/commit/ffc5f176cf106e2180fe5f1475ab24010ff7e1e0)) + ## [2.8.0](https://github.com/googleapis/python-datastore/compare/v2.7.2...v2.8.0) (2022-07-15) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 0a9aecb37574..7e8f51a6d2c4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.8.0" +__version__ = "2.8.1" From 62be3e790c68f9ef00c353bc6f0d52920a6299df Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 17 Aug 2022 16:26:43 +0200 Subject: [PATCH 445/611] chore(deps): update dependency google-cloud-datastore to v2.8.1 (#348) --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index 4e48d18693f1..7a47161ee8eb 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.8.0 \ No newline at end of file +google-cloud-datastore==2.8.1 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index 560d22f04b4f..3b582d56d05a 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.8.0 +google-cloud-datastore==2.8.1 From 3962d7cecfbf50d319d409f01a632369f49c2892 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 24 Aug 2022 19:06:47 -0400 Subject: [PATCH 446/611] chore: remove 'pip install' statements from python_library templates [autoapprove] (#351) Source-Link: https://github.com/googleapis/synthtool/commit/69fabaee9eca28af7ecaa02c86895e606fbbebd6 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:562802bfac02e012a6ac34eda282f81d06e77326b82a32d7bbb1369ff552b387 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/publish-docs.sh | 4 +- .../google-cloud-datastore/.kokoro/release.sh | 5 +- .../.kokoro/requirements.in | 8 + .../.kokoro/requirements.txt | 464 ++++++++++++++++++ packages/google-cloud-datastore/renovate.json | 2 +- 6 files changed, 477 insertions(+), 10 deletions(-) create mode 100644 packages/google-cloud-datastore/.kokoro/requirements.in create mode 100644 packages/google-cloud-datastore/.kokoro/requirements.txt diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index c701359fc58c..c6acdf3f90c4 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:c6c965a4bf40c19011b11f87dbc801a66d3a23fbc6704102be064ef31c51f1c3 -# created: 2022-08-09T15:58:56.463048506Z + digest: sha256:562802bfac02e012a6ac34eda282f81d06e77326b82a32d7bbb1369ff552b387 +# created: 2022-08-24T17:07:22.006876712Z diff --git a/packages/google-cloud-datastore/.kokoro/publish-docs.sh b/packages/google-cloud-datastore/.kokoro/publish-docs.sh index 8acb14e802b0..1c4d62370042 100755 --- a/packages/google-cloud-datastore/.kokoro/publish-docs.sh +++ b/packages/google-cloud-datastore/.kokoro/publish-docs.sh @@ -21,14 +21,12 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3 -m pip install --user --upgrade --quiet nox +python3 -m pip install --require-hashes -r .kokoro/requirements.txt python3 -m nox --version # build docs nox -s docs -python3 -m pip install --user gcp-docuploader - # create metadata python3 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ diff --git a/packages/google-cloud-datastore/.kokoro/release.sh b/packages/google-cloud-datastore/.kokoro/release.sh index fc6f080213d6..9d472b6e595d 100755 --- a/packages/google-cloud-datastore/.kokoro/release.sh +++ b/packages/google-cloud-datastore/.kokoro/release.sh @@ -16,12 +16,9 @@ set -eo pipefail # Start the releasetool reporter -python3 -m pip install gcp-releasetool +python3 -m pip install --require-hashes -r .kokoro/requirements.txt python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script -# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. -python3 -m pip install --upgrade twine wheel setuptools - # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 diff --git a/packages/google-cloud-datastore/.kokoro/requirements.in b/packages/google-cloud-datastore/.kokoro/requirements.in new file mode 100644 index 000000000000..7718391a34d7 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/requirements.in @@ -0,0 +1,8 @@ +gcp-docuploader +gcp-releasetool +importlib-metadata +typing-extensions +twine +wheel +setuptools +nox \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt new file mode 100644 index 000000000000..c4b824f247e3 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -0,0 +1,464 @@ +# +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==2.0.0 \ + --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \ + --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e + # via nox +attrs==22.1.0 \ + --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ + --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c + # via gcp-releasetool +bleach==5.0.1 \ + --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ + --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c + # via readme-renderer +cachetools==5.2.0 \ + --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ + --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db + # via google-auth +certifi==2022.6.15 \ + --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ + --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 + # via requests +cffi==1.15.1 \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 + # via cryptography +charset-normalizer==2.1.1 \ + --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ + --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f + # via requests +click==8.0.4 \ + --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ + --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb + # via + # gcp-docuploader + # gcp-releasetool +colorlog==6.6.0 \ + --hash=sha256:344f73204009e4c83c5b6beb00b3c45dc70fcdae3c80db919e0a4171d006fde8 \ + --hash=sha256:351c51e866c86c3217f08e4b067a7974a678be78f07f85fc2d55b8babde6d94e + # via + # gcp-docuploader + # nox +commonmark==0.9.1 \ + --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ + --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 + # via rich +cryptography==37.0.4 \ + --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ + --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ + --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ + --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ + --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ + --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ + --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ + --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ + --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ + --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ + --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ + --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ + --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ + --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ + --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ + --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ + --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ + --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ + --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ + --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ + --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ + --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 + # via + # gcp-releasetool + # secretstorage +distlib==0.3.5 \ + --hash=sha256:a7f75737c70be3b25e2bee06288cec4e4c221de18455b2dd037fe2a795cab2fe \ + --hash=sha256:b710088c59f06338ca514800ad795a132da19fda270e3ce4affc74abf955a26c + # via virtualenv +docutils==0.19 \ + --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ + --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc + # via readme-renderer +filelock==3.8.0 \ + --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ + --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 + # via virtualenv +gcp-docuploader==0.6.3 \ + --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ + --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b + # via -r requirements.in +gcp-releasetool==1.8.6 \ + --hash=sha256:42e51ab8e2e789bc8e22a03c09352962cd3452951c801a2230d564816630304a \ + --hash=sha256:a3518b79d1b243c494eac392a01c7fd65187fd6d52602dcab9b529bc934d4da1 + # via -r requirements.in +google-api-core==2.8.2 \ + --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ + --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 + # via + # google-cloud-core + # google-cloud-storage +google-auth==2.11.0 \ + --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ + --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb + # via + # gcp-releasetool + # google-api-core + # google-cloud-core + # google-cloud-storage +google-cloud-core==2.3.2 \ + --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ + --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a + # via google-cloud-storage +google-cloud-storage==2.5.0 \ + --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ + --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 + # via gcp-docuploader +google-crc32c==1.3.0 \ + --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ + --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ + --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ + --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ + --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ + --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ + --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ + --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ + --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ + --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ + --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ + --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ + --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ + --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ + --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ + --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ + --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ + --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ + --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ + --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ + --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ + --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ + --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ + --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ + --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ + --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ + --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ + --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ + --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ + --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ + --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ + --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ + --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ + --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ + --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ + --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ + --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ + --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ + --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ + --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ + --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ + --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ + --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 + # via google-resumable-media +google-resumable-media==2.3.3 \ + --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ + --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 + # via google-cloud-storage +googleapis-common-protos==1.56.4 \ + --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ + --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 + # via google-api-core +idna==3.3 \ + --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ + --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d + # via requests +importlib-metadata==4.12.0 \ + --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ + --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 + # via + # -r requirements.in + # twine +jeepney==0.8.0 \ + --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ + --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 + # via + # keyring + # secretstorage +jinja2==3.1.2 \ + --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ + --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 + # via gcp-releasetool +keyring==23.8.2 \ + --hash=sha256:0d9973f8891850f1ade5f26aafd06bb16865fbbae3fc56b0defb6a14a2624003 \ + --hash=sha256:10d2a8639663fe2090705a00b8c47c687cacdf97598ea9c11456679fa974473a + # via + # gcp-releasetool + # twine +markupsafe==2.1.1 \ + --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ + --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ + --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ + --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ + --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ + --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ + --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ + --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ + --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ + --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ + --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ + --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ + --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ + --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ + --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ + --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ + --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ + --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ + --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ + --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ + --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ + --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ + --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ + --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ + --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ + --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ + --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ + --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ + --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ + --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ + --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ + --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ + --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ + --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ + --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ + --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ + --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ + --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ + --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ + --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 + # via jinja2 +nox==2022.8.7 \ + --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ + --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c + # via -r requirements.in +packaging==21.3 \ + --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ + --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 + # via + # gcp-releasetool + # nox +pkginfo==1.8.3 \ + --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ + --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c + # via twine +platformdirs==2.5.2 \ + --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ + --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 + # via virtualenv +protobuf==3.20.1 \ + --hash=sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf \ + --hash=sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f \ + --hash=sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f \ + --hash=sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7 \ + --hash=sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996 \ + --hash=sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067 \ + --hash=sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c \ + --hash=sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7 \ + --hash=sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9 \ + --hash=sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c \ + --hash=sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739 \ + --hash=sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91 \ + --hash=sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c \ + --hash=sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153 \ + --hash=sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9 \ + --hash=sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388 \ + --hash=sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e \ + --hash=sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab \ + --hash=sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde \ + --hash=sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531 \ + --hash=sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8 \ + --hash=sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7 \ + --hash=sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20 \ + --hash=sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3 + # via + # gcp-docuploader + # gcp-releasetool + # google-api-core +py==1.11.0 \ + --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ + --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 + # via nox +pyasn1==0.4.8 \ + --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ + --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.2.8 \ + --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ + --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 + # via google-auth +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi +pygments==2.13.0 \ + --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \ + --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42 + # via + # readme-renderer + # rich +pyjwt==2.4.0 \ + --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ + --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba + # via gcp-releasetool +pyparsing==3.0.9 \ + --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ + --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc + # via packaging +pyperclip==1.8.2 \ + --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 + # via gcp-releasetool +python-dateutil==2.8.2 \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 + # via gcp-releasetool +readme-renderer==37.0 \ + --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ + --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 + # via twine +requests==2.28.1 \ + --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ + --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 + # via + # gcp-releasetool + # google-api-core + # google-cloud-storage + # requests-toolbelt + # twine +requests-toolbelt==0.9.1 \ + --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ + --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 + # via twine +rfc3986==2.0.0 \ + --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ + --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c + # via twine +rich==12.5.1 \ + --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ + --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca + # via twine +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +secretstorage==3.3.3 \ + --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ + --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 + # via keyring +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via + # bleach + # gcp-docuploader + # google-auth + # python-dateutil +twine==4.0.1 \ + --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ + --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 + # via -r requirements.in +typing-extensions==4.3.0 \ + --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ + --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 + # via -r requirements.in +urllib3==1.26.12 \ + --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ + --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 + # via + # requests + # twine +virtualenv==20.16.3 \ + --hash=sha256:4193b7bc8a6cd23e4eb251ac64f29b4398ab2c233531e66e40b19a6b7b0d30c1 \ + --hash=sha256:d86ea0bb50e06252d79e6c241507cb904fcd66090c3271381372d6221a3970f9 + # via nox +webencodings==0.5.1 \ + --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ + --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 + # via bleach +wheel==0.37.1 \ + --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ + --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 + # via -r requirements.in +zipp==3.8.1 \ + --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ + --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +setuptools==65.2.0 \ + --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ + --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 + # via -r requirements.in diff --git a/packages/google-cloud-datastore/renovate.json b/packages/google-cloud-datastore/renovate.json index c21036d385e5..566a70f3cc3c 100644 --- a/packages/google-cloud-datastore/renovate.json +++ b/packages/google-cloud-datastore/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } From 4163d3866c801a65b31718711385439d3a6d4d52 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 29 Aug 2022 15:39:18 -0400 Subject: [PATCH 447/611] chore(python): exclude `grpcio==1.49.0rc1` in tests (#353) Source-Link: https://github.com/googleapis/synthtool/commit/c4dd5953003d13b239f872d329c3146586bb417e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ce3c1686bc81145c81dd269bd12c4025c6b275b22d14641358827334fddb1d72 Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-datastore/.kokoro/requirements.txt | 6 +++--- packages/google-cloud-datastore/noxfile.py | 7 +++++-- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index c6acdf3f90c4..23e106b65770 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:562802bfac02e012a6ac34eda282f81d06e77326b82a32d7bbb1369ff552b387 -# created: 2022-08-24T17:07:22.006876712Z + digest: sha256:ce3c1686bc81145c81dd269bd12c4025c6b275b22d14641358827334fddb1d72 +# created: 2022-08-29T17:28:30.441852797Z diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index c4b824f247e3..4b29ef247bed 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -136,9 +136,9 @@ cryptography==37.0.4 \ # via # gcp-releasetool # secretstorage -distlib==0.3.5 \ - --hash=sha256:a7f75737c70be3b25e2bee06288cec4e4c221de18455b2dd037fe2a795cab2fe \ - --hash=sha256:b710088c59f06338ca514800ad795a132da19fda270e3ce4affc74abf955a26c +distlib==0.3.6 \ + --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ + --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e # via virtualenv docutils==0.19 \ --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index bc90eba537f9..80ff3c444f0a 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -200,7 +200,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") + # Exclude version 1.49.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/pull/30642 + session.install("--pre", "grpcio!=1.49.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -398,7 +400,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - "grpcio", + # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 + "grpcio!=1.49.0rc1", "grpcio-status", "google-api-core", "proto-plus", From d36b6a51de74e7dd4f83fff4b3875bcd0d111f70 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 1 Sep 2022 15:29:07 -0400 Subject: [PATCH 448/611] ci(python): fix path to requirements.txt in release script (#354) Source-Link: https://github.com/googleapis/synthtool/commit/fdba3ed145bdb2f4f3eff434d4284b1d03b80d34 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:1f0dbd02745fb7cf255563dab5968345989308544e52b7f460deadd5e78e63b0 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 3 +-- .../google-cloud-datastore/.kokoro/release.sh | 2 +- .../.kokoro/requirements.txt | 24 +++++++++---------- 3 files changed, 14 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 23e106b65770..0d9eb2af9352 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ce3c1686bc81145c81dd269bd12c4025c6b275b22d14641358827334fddb1d72 -# created: 2022-08-29T17:28:30.441852797Z + digest: sha256:1f0dbd02745fb7cf255563dab5968345989308544e52b7f460deadd5e78e63b0 diff --git a/packages/google-cloud-datastore/.kokoro/release.sh b/packages/google-cloud-datastore/.kokoro/release.sh index 9d472b6e595d..0235f8de618f 100755 --- a/packages/google-cloud-datastore/.kokoro/release.sh +++ b/packages/google-cloud-datastore/.kokoro/release.sh @@ -16,7 +16,7 @@ set -eo pipefail # Start the releasetool reporter -python3 -m pip install --require-hashes -r .kokoro/requirements.txt +python3 -m pip install --require-hashes -r github/python-datastore/.kokoro/requirements.txt python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script # Disable buffering, so that the logs stream through. diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index 4b29ef247bed..92b2f727e777 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -100,9 +100,9 @@ click==8.0.4 \ # via # gcp-docuploader # gcp-releasetool -colorlog==6.6.0 \ - --hash=sha256:344f73204009e4c83c5b6beb00b3c45dc70fcdae3c80db919e0a4171d006fde8 \ - --hash=sha256:351c51e866c86c3217f08e4b067a7974a678be78f07f85fc2d55b8babde6d94e +colorlog==6.7.0 \ + --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ + --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 # via # gcp-docuploader # nox @@ -152,9 +152,9 @@ gcp-docuploader==0.6.3 \ --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b # via -r requirements.in -gcp-releasetool==1.8.6 \ - --hash=sha256:42e51ab8e2e789bc8e22a03c09352962cd3452951c801a2230d564816630304a \ - --hash=sha256:a3518b79d1b243c494eac392a01c7fd65187fd6d52602dcab9b529bc934d4da1 +gcp-releasetool==1.8.7 \ + --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ + --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d # via -r requirements.in google-api-core==2.8.2 \ --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ @@ -251,9 +251,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.8.2 \ - --hash=sha256:0d9973f8891850f1ade5f26aafd06bb16865fbbae3fc56b0defb6a14a2624003 \ - --hash=sha256:10d2a8639663fe2090705a00b8c47c687cacdf97598ea9c11456679fa974473a +keyring==23.9.0 \ + --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ + --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db # via # gcp-releasetool # twine @@ -440,9 +440,9 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.3 \ - --hash=sha256:4193b7bc8a6cd23e4eb251ac64f29b4398ab2c233531e66e40b19a6b7b0d30c1 \ - --hash=sha256:d86ea0bb50e06252d79e6c241507cb904fcd66090c3271381372d6221a3970f9 +virtualenv==20.16.4 \ + --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ + --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ From c3e0f619e99e145095b6d188f68f29f21a7a6615 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 2 Sep 2022 00:38:35 +0000 Subject: [PATCH 449/611] chore(python): update .kokoro/requirements.txt (#355) Source-Link: https://github.com/googleapis/synthtool/commit/703554a14c7479542335b62fa69279f93a9e38ec Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:94961fdc5c9ca6d13530a6a414a49d2f607203168215d074cdb0a1df9ec31c0b --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/.kokoro/requirements.txt | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 0d9eb2af9352..2fa0f7c4fe15 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:1f0dbd02745fb7cf255563dab5968345989308544e52b7f460deadd5e78e63b0 + digest: sha256:94961fdc5c9ca6d13530a6a414a49d2f607203168215d074cdb0a1df9ec31c0b diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index 92b2f727e777..385f2d4d6106 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -241,6 +241,10 @@ importlib-metadata==4.12.0 \ # via # -r requirements.in # twine +jaraco-classes==3.2.2 \ + --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ + --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 + # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 @@ -299,6 +303,10 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 +more-itertools==8.14.0 \ + --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ + --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 + # via jaraco-classes nox==2022.8.7 \ --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c From da186a001dbe276cb8e1fe651ce85c0bb85d1c50 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 2 Sep 2022 18:40:18 +0000 Subject: [PATCH 450/611] chore(python): exclude setup.py in renovate config (#358) Source-Link: https://github.com/googleapis/synthtool/commit/56da63e80c384a871356d1ea6640802017f213b4 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/renovate.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 2fa0f7c4fe15..b8dcb4a4af99 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:94961fdc5c9ca6d13530a6a414a49d2f607203168215d074cdb0a1df9ec31c0b + digest: sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 diff --git a/packages/google-cloud-datastore/renovate.json b/packages/google-cloud-datastore/renovate.json index 566a70f3cc3c..39b2a0ec9296 100644 --- a/packages/google-cloud-datastore/renovate.json +++ b/packages/google-cloud-datastore/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } From ec84e24b54619b7524f1b81c1689adbf6c2ea10b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 6 Sep 2022 17:44:00 +0200 Subject: [PATCH 451/611] chore(deps): update dependency pytest to v7.1.3 (#359) --- .../samples/snippets/requirements-test.txt | 2 +- .../samples/snippets/schedule-export/requirements-test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt index 9fdbe021770a..90b34de7bb0d 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff===1.11.1; python_version < "3.7" backoff==2.1.2; python_version >= "3.7" -pytest==7.1.2 +pytest==7.1.3 flaky==3.7.0 diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt index 6a3d7bca6791..f97bae64aa54 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.2 \ No newline at end of file +pytest==7.1.3 \ No newline at end of file From cef5ad1f1da907871978b1649bcf5c1ebd81160c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 13 Sep 2022 16:14:24 +0000 Subject: [PATCH 452/611] chore: detect samples tests in nested directories (#360) Source-Link: https://github.com/googleapis/synthtool/commit/50db768f450a50d7c1fd62513c113c9bb96fd434 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e09366bdf0fd9c8976592988390b24d53583dd9f002d476934da43725adbb978 --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/samples/snippets/noxfile.py | 4 ++-- .../samples/snippets/schedule-export/noxfile.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index b8dcb4a4af99..aa547962eb0a 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 + digest: sha256:e09366bdf0fd9c8976592988390b24d53583dd9f002d476934da43725adbb978 diff --git a/packages/google-cloud-datastore/samples/snippets/noxfile.py b/packages/google-cloud-datastore/samples/snippets/noxfile.py index 5fcb9d7461f2..0398d72ff690 100644 --- a/packages/google-cloud-datastore/samples/snippets/noxfile.py +++ b/packages/google-cloud-datastore/samples/snippets/noxfile.py @@ -207,8 +207,8 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("*_test.py") + glob.glob("test_*.py") - test_list.extend(glob.glob("tests")) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: print("No tests found, skipping directory.") diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py b/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py index 5fcb9d7461f2..0398d72ff690 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py @@ -207,8 +207,8 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("*_test.py") + glob.glob("test_*.py") - test_list.extend(glob.glob("tests")) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: print("No tests found, skipping directory.") From 894015dbc2a93f61e00fc18e9c8baef49e231b12 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 3 Oct 2022 11:34:32 -0400 Subject: [PATCH 453/611] fix(deps): require protobuf >= 3.20.2 (#365) * chore: exclude requirements.txt file from renovate-bot Source-Link: https://github.com/googleapis/synthtool/commit/f58d3135a2fab20e225d98741dbc06d57459b816 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 * update constraints files * fix(deps): require protobuf 3.20.2 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/requirements.txt | 49 +++++++++---------- packages/google-cloud-datastore/setup.py | 2 +- .../testing/constraints-3.6.txt | 12 ----- .../testing/constraints-3.7.txt | 2 +- 5 files changed, 27 insertions(+), 40 deletions(-) delete mode 100644 packages/google-cloud-datastore/testing/constraints-3.6.txt diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index aa547962eb0a..3815c983cb16 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e09366bdf0fd9c8976592988390b24d53583dd9f002d476934da43725adbb978 + digest: sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index 385f2d4d6106..d15994bac93c 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -325,31 +325,30 @@ platformdirs==2.5.2 \ --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 # via virtualenv -protobuf==3.20.1 \ - --hash=sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf \ - --hash=sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f \ - --hash=sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f \ - --hash=sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7 \ - --hash=sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996 \ - --hash=sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067 \ - --hash=sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c \ - --hash=sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7 \ - --hash=sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9 \ - --hash=sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c \ - --hash=sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739 \ - --hash=sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91 \ - --hash=sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c \ - --hash=sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153 \ - --hash=sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9 \ - --hash=sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388 \ - --hash=sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e \ - --hash=sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab \ - --hash=sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde \ - --hash=sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531 \ - --hash=sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8 \ - --hash=sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7 \ - --hash=sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20 \ - --hash=sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3 +protobuf==3.20.2 \ + --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ + --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ + --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ + --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ + --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ + --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ + --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ + --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ + --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ + --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ + --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ + --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ + --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ + --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ + --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ + --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ + --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ + --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ + --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ + --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ + --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ + --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ + --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 # via # gcp-docuploader # gcp-releasetool diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 23131ef30491..4001b3e5dc49 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -38,7 +38,7 @@ # https://github.com/googleapis/google-cloud-python/issues/10566 "google-cloud-core >= 1.4.0, <3.0.0dev", "proto-plus >= 1.22.0, <2.0.0dev", - "protobuf >= 3.19.0, <5.0.0dev", + "protobuf >= 3.20.2, <5.0.0dev", ] extras = {"libcst": "libcst >= 0.2.5"} diff --git a/packages/google-cloud-datastore/testing/constraints-3.6.txt b/packages/google-cloud-datastore/testing/constraints-3.6.txt deleted file mode 100644 index b6789fb79133..000000000000 --- a/packages/google-cloud-datastore/testing/constraints-3.6.txt +++ /dev/null @@ -1,12 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List *all* library dependencies and extras in this file. -# Pin the version to the lower bound. -# -# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -# Then this file should have foo==1.14.0 -google-api-core==1.31.5 -google-cloud-core==1.4.0 -proto-plus==1.15.0 -libcst==0.2.5 -protobuf==3.19.0 diff --git a/packages/google-cloud-datastore/testing/constraints-3.7.txt b/packages/google-cloud-datastore/testing/constraints-3.7.txt index e3a84dccc73f..5b0708c7532c 100644 --- a/packages/google-cloud-datastore/testing/constraints-3.7.txt +++ b/packages/google-cloud-datastore/testing/constraints-3.7.txt @@ -9,4 +9,4 @@ google-api-core==1.31.5 google-cloud-core==1.4.0 proto-plus==1.22.0 libcst==0.2.5 -protobuf==3.19.0 +protobuf==3.20.2 From 15d626a7d637494a957ce4a9bb97ae8444fe693e Mon Sep 17 00:00:00 2001 From: Juan Lara Date: Mon, 3 Oct 2022 21:32:47 +0000 Subject: [PATCH 454/611] samples: Update the read_time snippet. (#363) Co-authored-by: Anthonios Partheniou --- .../samples/snippets/snippets.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/snippets.py b/packages/google-cloud-datastore/samples/snippets/snippets.py index 7d2130a8a7b4..b37020c066b7 100644 --- a/packages/google-cloud-datastore/samples/snippets/snippets.py +++ b/packages/google-cloud-datastore/samples/snippets/snippets.py @@ -58,18 +58,18 @@ def not_in_query(client): def query_with_readtime(client): - # [START datastore_snapshot_read] - # Create a read time of 120 seconds in the past - read_time = datetime.now(timezone.utc) - timedelta(seconds=120) + # [START datastore_stale_read] + # Create a read time of 15 seconds in the past + read_time = datetime.now(timezone.utc) - timedelta(seconds=15) - # Fetch an entity at time read_time + # Fetch an entity with read_time task_key = client.key('Task', 'sampletask') entity = client.get(task_key, read_time=read_time) - # Query Task entities at time read_time + # Query Task entities with read_time query = client.query(kind="Task") tasks = query.fetch(read_time=read_time, limit=10) - # [END datastore_snapshot_read] + # [END datastore_stale_read] results = list(tasks) results.append(entity) From 296bc34647b71c5d64b182d1ca249bae1401b7a3 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 3 Oct 2022 21:11:31 -0400 Subject: [PATCH 455/611] chore(main): release 2.8.2 (#367) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 632345525ae8..3c02bebce32d 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.8.2](https://github.com/googleapis/python-datastore/compare/v2.8.1...v2.8.2) (2022-10-03) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#365](https://github.com/googleapis/python-datastore/issues/365)) ([4e3e81f](https://github.com/googleapis/python-datastore/commit/4e3e81fdcee75501ef3b7c9ee7d915a9857c4cce)) + ## [2.8.1](https://github.com/googleapis/python-datastore/compare/v2.8.0...v2.8.1) (2022-08-12) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 7e8f51a6d2c4..839a77a1cf5a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.8.1" +__version__ = "2.8.2" From 016d56794bc36c47e9df63f6e3c2ae351240af42 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 4 Oct 2022 15:35:09 +0200 Subject: [PATCH 456/611] chore(deps): update dependency google-cloud-datastore to v2.8.2 (#369) --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index 7a47161ee8eb..2d1b568e6476 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.8.1 \ No newline at end of file +google-cloud-datastore==2.8.2 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index 3b582d56d05a..8a7a99765cb3 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.8.1 +google-cloud-datastore==2.8.2 From dfc2638216d337e809ee24e99f40d963ebf73a76 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 4 Oct 2022 15:51:06 -0700 Subject: [PATCH 457/611] feat: add datastore aggregation query APIs (#306) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.65.2 PiperOrigin-RevId: 444333013 Source-Link: https://github.com/googleapis/googleapis/commit/f91b6cf82e929280f6562f6110957c654bd9e2e6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/16eb36095c294e712c74a1bf23550817b42174e5 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTZlYjM2MDk1YzI5NGU3MTJjNzRhMWJmMjM1NTA4MTdiNDIxNzRlNSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * docs: Document encoding of non-UTF-8 key strings in the Cloud Datastore API PiperOrigin-RevId: 450535507 Source-Link: https://github.com/googleapis/googleapis/commit/2f003c03e819c5faa3018c3e0e964e9aeef9c833 Source-Link: https://github.com/googleapis/googleapis-gen/commit/7d78e9ed96dbea6bf5e0179c0ee16561007616ac Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2Q3OGU5ZWQ5NmRiZWE2YmY1ZTAxNzljMGVlMTY1NjEwMDc2MTZhYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: use gapic-generator-python 1.0.0 PiperOrigin-RevId: 451250442 Source-Link: https://github.com/googleapis/googleapis/commit/cca5e8181f6442b134e8d4d206fbe9e0e74684ba Source-Link: https://github.com/googleapis/googleapis-gen/commit/0b219da161a8bdcc3c6f7b2efcd82105182a30ca Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMGIyMTlkYTE2MWE4YmRjYzNjNmY3YjJlZmNkODIxMDUxODJhMzBjYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * docs: clarifications for `DatastoreFirestoreMigraitonMetadata` feat: new enum value `OPTIMISTIC_WITH_ENTITY_GROUPS` for `ConcurrencyMode` PiperOrigin-RevId: 452220654 Source-Link: https://github.com/googleapis/googleapis/commit/454ad5ec43997b2b2fff15b2c0d86677fecdc60e Source-Link: https://github.com/googleapis/googleapis-gen/commit/45e72318c728b40c2131df1b5650965b68eb3bed Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDVlNzIzMThjNzI4YjQwYzIxMzFkZjFiNTY1MDk2NWI2OGViM2JlZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: add audience parameter PiperOrigin-RevId: 456827138 Source-Link: https://github.com/googleapis/googleapis/commit/23f1a157189581734c7a77cddfeb7c5bc1e440ae Source-Link: https://github.com/googleapis/googleapis-gen/commit/4075a8514f676691ec156688a5bbf183aa9893ce Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDA3NWE4NTE0ZjY3NjY5MWVjMTU2Njg4YTViYmYxODNhYTk4OTNjZSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: use gapic-generator-python 1.1.1 PiperOrigin-RevId: 459095142 Source-Link: https://github.com/googleapis/googleapis/commit/4f1be992601ed740a581a32cedc4e7b6c6a27793 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ae686d9cde4fc3e36d0ac02efb8643b15890c1ed Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWU2ODZkOWNkZTRmYzNlMzZkMGFjMDJlZmI4NjQzYjE1ODkwYzFlZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(deps): require google-api-core 2.8.0 * fix(deps): require google-api-core>=1.32.0,>=2.8.0 * feat: Add "or" query support (only in the preview API surface) docs: Clarify wording around default database_id usage PiperOrigin-RevId: 462444231 Source-Link: https://github.com/googleapis/googleapis/commit/b9cbfce1714be863ce3e405fbc6b409105298206 Source-Link: https://github.com/googleapis/googleapis-gen/commit/934e426ab421006f20454568821b61bb30979b10 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOTM0ZTQyNmFiNDIxMDA2ZjIwNDU0NTY4ODIxYjYxYmIzMDk3OWIxMCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore(bazel): update protobuf to v3.21.3 chore(bazel): update gax-java to 2.18.4 PiperOrigin-RevId: 463115700 Source-Link: https://github.com/googleapis/googleapis/commit/52130a9c3c289e6bc4ab1784bdde6081abdf3dd9 Source-Link: https://github.com/googleapis/googleapis-gen/commit/6a4d9d9bb3afb20b0f5fa4f5d9f6740b1d0eb19a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNmE0ZDlkOWJiM2FmYjIwYjBmNWZhNGY1ZDlmNjc0MGIxZDBlYjE5YSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: use gapic-generator-python 1.2.0 PiperOrigin-RevId: 467286830 Source-Link: https://github.com/googleapis/googleapis/commit/e6e875a456c046e94eeb5a76211daa046a8e72c9 Source-Link: https://github.com/googleapis/googleapis-gen/commit/0295ea14d9cd4d47ddb23b9ebd39a31e2035e28f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDI5NWVhMTRkOWNkNGQ0N2RkYjIzYjllYmQzOWEzMWUyMDM1ZTI4ZiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: update count up_to field type in aggregation queries PiperOrigin-RevId: 469555077 Source-Link: https://github.com/googleapis/googleapis/commit/7c3a38d624bcb786a7258ab3878cbe5517ade414 Source-Link: https://github.com/googleapis/googleapis-gen/commit/cb40e6e892a9c33da0b2e6a6335d5f1e45a870da Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2I0MGU2ZTg5MmE5YzMzZGEwYjJlNmE2MzM1ZDVmMWU0NWE4NzBkYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Bump gapic-generator-python version to 1.3.0 PiperOrigin-RevId: 472561635 Source-Link: https://github.com/googleapis/googleapis/commit/332ecf599f8e747d8d1213b77ae7db26eff12814 Source-Link: https://github.com/googleapis/googleapis-gen/commit/4313d682880fd9d7247291164d4e9d3d5bd9f177 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDMxM2Q2ODI4ODBmZDlkNzI0NzI5MTE2NGQ0ZTlkM2Q1YmQ5ZjE3NyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: use gapic-generator-python 1.3.1 PiperOrigin-RevId: 472772457 Source-Link: https://github.com/googleapis/googleapis/commit/855b74d203deeb0f7a0215f9454cdde62a1f9b86 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b64b1e7da3e138f15ca361552ef0545e54891b4f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjY0YjFlN2RhM2UxMzhmMTVjYTM2MTU1MmVmMDU0NWU1NDg5MWI0ZiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix: integrate gapic-generator-python-1.4.1 and enable more py_test targets PiperOrigin-RevId: 473833416 Source-Link: https://github.com/googleapis/googleapis/commit/565a5508869557a3228b871101e4e4ebd8f93d11 Source-Link: https://github.com/googleapis/googleapis-gen/commit/1ee1a06c6de3ca8b843572c1fde0548f84236989 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMWVlMWEwNmM2ZGUzY2E4Yjg0MzU3MmMxZmRlMDU0OGY4NDIzNjk4OSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: add datastore aggregation query APIs PiperOrigin-RevId: 477890345 Source-Link: https://github.com/googleapis/googleapis/commit/82bf6749a84d54a098092ae3a0ec135475b18c31 Source-Link: https://github.com/googleapis/googleapis-gen/commit/5fb8115161ea14a5c551885c681c3627c2f66630 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNWZiODExNTE2MWVhMTRhNWM1NTE4ODVjNjgxYzM2MjdjMmY2NjYzMCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Remove the tests directory from noxfile mypy session * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Update owlbot for mypy session. * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou Co-authored-by: Mariatta Wijaya Co-authored-by: Mariatta Wijaya --- .../services/datastore_admin/async_client.py | 135 +- .../services/datastore_admin/client.py | 100 +- .../datastore_admin/transports/base.py | 16 +- .../datastore_admin/transports/grpc.py | 2 + .../transports/grpc_asyncio.py | 2 + .../types/datastore_admin.py | 2 +- .../datastore_admin_v1/types/migration.py | 1 + .../google/cloud/datastore_v1/__init__.py | 10 + .../cloud/datastore_v1/gapic_metadata.json | 10 + .../services/datastore/async_client.py | 458 +++++- .../datastore_v1/services/datastore/client.py | 408 ++++++ .../services/datastore/transports/base.py | 77 +- .../services/datastore/transports/grpc.py | 101 ++ .../datastore/transports/grpc_asyncio.py | 102 ++ .../cloud/datastore_v1/types/__init__.py | 12 + .../datastore_v1/types/aggregation_result.py | 96 ++ .../cloud/datastore_v1/types/datastore.py | 166 ++- .../google/cloud/datastore_v1/types/entity.py | 31 +- .../google/cloud/datastore_v1/types/query.py | 137 +- packages/google-cloud-datastore/noxfile.py | 2 +- packages/google-cloud-datastore/owlbot.py | 2 +- .../scripts/fixup_datastore_v1_keywords.py | 13 +- packages/google-cloud-datastore/setup.py | 8 +- .../testing/constraints-3.7.txt | 2 +- .../test_datastore_admin.py | 435 +++++- .../unit/gapic/datastore_v1/test_datastore.py | 1254 ++++++++++++++++- 26 files changed, 3477 insertions(+), 105 deletions(-) create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 0f6be699659e..2a7e46c48516 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -287,11 +287,18 @@ async def export_entities( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_admin_v1 - def sample_export_entities(): + async def sample_export_entities(): # Create a client - client = datastore_admin_v1.DatastoreAdminClient() + client = datastore_admin_v1.DatastoreAdminAsyncClient() # Initialize request argument(s) request = datastore_admin_v1.ExportEntitiesRequest( @@ -304,7 +311,7 @@ def sample_export_entities(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -409,6 +416,14 @@ def sample_export_entities(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = await rpc( request, @@ -450,11 +465,18 @@ async def import_entities( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_admin_v1 - def sample_import_entities(): + async def sample_import_entities(): # Create a client - client = datastore_admin_v1.DatastoreAdminClient() + client = datastore_admin_v1.DatastoreAdminAsyncClient() # Initialize request argument(s) request = datastore_admin_v1.ImportEntitiesRequest( @@ -467,7 +489,7 @@ def sample_import_entities(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -540,9 +562,6 @@ def sample_import_entities(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -576,6 +595,14 @@ def sample_import_entities(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = await rpc( request, @@ -621,11 +648,18 @@ async def create_index( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_admin_v1 - def sample_create_index(): + async def sample_create_index(): # Create a client - client = datastore_admin_v1.DatastoreAdminClient() + client = datastore_admin_v1.DatastoreAdminAsyncClient() # Initialize request argument(s) request = datastore_admin_v1.CreateIndexRequest( @@ -636,7 +670,7 @@ def sample_create_index(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -671,6 +705,14 @@ def sample_create_index(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = await rpc( request, @@ -715,11 +757,18 @@ async def delete_index( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_admin_v1 - def sample_delete_index(): + async def sample_delete_index(): # Create a client - client = datastore_admin_v1.DatastoreAdminClient() + client = datastore_admin_v1.DatastoreAdminAsyncClient() # Initialize request argument(s) request = datastore_admin_v1.DeleteIndexRequest( @@ -730,7 +779,7 @@ def sample_delete_index(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -765,6 +814,17 @@ def sample_delete_index(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("index_id", request.index_id), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -796,18 +856,25 @@ async def get_index( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_admin_v1 - def sample_get_index(): + async def sample_get_index(): # Create a client - client = datastore_admin_v1.DatastoreAdminClient() + client = datastore_admin_v1.DatastoreAdminAsyncClient() # Initialize request argument(s) request = datastore_admin_v1.GetIndexRequest( ) # Make the request - response = client.get_index(request=request) + response = await client.get_index(request=request) # Handle the response print(response) @@ -847,6 +914,17 @@ def sample_get_index(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("index_id", request.index_id), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -873,11 +951,18 @@ async def list_indexes( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_admin_v1 - def sample_list_indexes(): + async def sample_list_indexes(): # Create a client - client = datastore_admin_v1.DatastoreAdminClient() + client = datastore_admin_v1.DatastoreAdminAsyncClient() # Initialize request argument(s) request = datastore_admin_v1.ListIndexesRequest( @@ -887,7 +972,7 @@ def sample_list_indexes(): page_result = client.list_indexes(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -930,6 +1015,14 @@ def sample_list_indexes(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = await rpc( request, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index 8f5364a74505..201a69e8ebb8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -461,6 +461,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def export_entities( @@ -488,6 +489,13 @@ def export_entities( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_admin_v1 def sample_export_entities(): @@ -609,6 +617,14 @@ def sample_export_entities(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.export_entities] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = rpc( request, @@ -650,6 +666,13 @@ def import_entities( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_admin_v1 def sample_import_entities(): @@ -740,9 +763,6 @@ def sample_import_entities(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -775,6 +795,14 @@ def sample_import_entities(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.import_entities] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = rpc( request, @@ -820,6 +848,13 @@ def create_index( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_admin_v1 def sample_create_index(): @@ -871,6 +906,14 @@ def sample_create_index(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_index] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = rpc( request, @@ -915,6 +958,13 @@ def delete_index( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_admin_v1 def sample_delete_index(): @@ -966,6 +1016,17 @@ def sample_delete_index(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_index] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("index_id", request.index_id), + ) + ), + ) + # Send the request. response = rpc( request, @@ -997,6 +1058,13 @@ def get_index( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_admin_v1 def sample_get_index(): @@ -1039,6 +1107,17 @@ def sample_get_index(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_index] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("index_id", request.index_id), + ) + ), + ) + # Send the request. response = rpc( request, @@ -1065,6 +1144,13 @@ def list_indexes( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_admin_v1 def sample_list_indexes(): @@ -1113,6 +1199,14 @@ def sample_list_indexes(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_indexes] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = rpc( request, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index 618a990c4312..cf9fd9506b81 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -60,6 +60,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -87,11 +88,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -112,6 +108,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -124,6 +125,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index e41933662adc..f15228fab95a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -118,6 +118,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -214,6 +215,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index 46f848877361..b5ec28142eae 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -163,6 +163,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -259,6 +260,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py index 82bacec17632..1bffdf3da635 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -570,7 +570,7 @@ class DatastoreFirestoreMigrationMetadata(proto.Message): deviation from the LRO design pattern. This singleton resource can be accessed at: - ``projects/{project_id}/datastore-firestore-migration`` + "projects/{project_id}/operations/datastore-firestore-migration" Attributes: migration_state (google.cloud.datastore_admin_v1.types.MigrationState): diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py index 97d4145f665d..cbd4f4f71fae 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py @@ -98,6 +98,7 @@ class ConcurrencyMode(proto.Enum): CONCURRENCY_MODE_UNSPECIFIED = 0 PESSIMISTIC = 1 OPTIMISTIC = 2 + OPTIMISTIC_WITH_ENTITY_GROUPS = 3 class PrepareStepDetails(proto.Message): r"""Details for the ``PREPARE`` step. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py index 881df4caf73e..83e7464d0960 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py @@ -17,6 +17,8 @@ from .services.datastore import DatastoreClient from .services.datastore import DatastoreAsyncClient +from .types.aggregation_result import AggregationResult +from .types.aggregation_result import AggregationResultBatch from .types.datastore import AllocateIdsRequest from .types.datastore import AllocateIdsResponse from .types.datastore import BeginTransactionRequest @@ -32,6 +34,8 @@ from .types.datastore import ReserveIdsResponse from .types.datastore import RollbackRequest from .types.datastore import RollbackResponse +from .types.datastore import RunAggregationQueryRequest +from .types.datastore import RunAggregationQueryResponse from .types.datastore import RunQueryRequest from .types.datastore import RunQueryResponse from .types.datastore import TransactionOptions @@ -40,6 +44,7 @@ from .types.entity import Key from .types.entity import PartitionId from .types.entity import Value +from .types.query import AggregationQuery from .types.query import CompositeFilter from .types.query import EntityResult from .types.query import Filter @@ -55,6 +60,9 @@ __all__ = ( "DatastoreAsyncClient", + "AggregationQuery", + "AggregationResult", + "AggregationResultBatch", "AllocateIdsRequest", "AllocateIdsResponse", "ArrayValue", @@ -87,6 +95,8 @@ "ReserveIdsResponse", "RollbackRequest", "RollbackResponse", + "RunAggregationQueryRequest", + "RunAggregationQueryResponse", "RunQueryRequest", "RunQueryResponse", "TransactionOptions", diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_metadata.json b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_metadata.json index 5da47e53b941..ddde2b762f17 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_metadata.json +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_metadata.json @@ -40,6 +40,11 @@ "rollback" ] }, + "RunAggregationQuery": { + "methods": [ + "run_aggregation_query" + ] + }, "RunQuery": { "methods": [ "run_query" @@ -80,6 +85,11 @@ "rollback" ] }, + "RunAggregationQuery": { + "methods": [ + "run_aggregation_query" + ] + }, "RunQuery": { "methods": [ "run_query" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index ab4d60cc5ef6..c6e6f2dff143 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -31,9 +31,11 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.cloud.datastore_v1.types import aggregation_result from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.longrunning import operations_pb2 from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DatastoreGrpcAsyncIOTransport @@ -218,11 +220,18 @@ async def lookup( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_v1 - def sample_lookup(): + async def sample_lookup(): # Create a client - client = datastore_v1.DatastoreClient() + client = datastore_v1.DatastoreAsyncClient() # Initialize request argument(s) request = datastore_v1.LookupRequest( @@ -230,7 +239,7 @@ def sample_lookup(): ) # Make the request - response = client.lookup(request=request) + response = await client.lookup(request=request) # Handle the response print(response) @@ -309,6 +318,14 @@ def sample_lookup(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = await rpc( request, @@ -332,11 +349,18 @@ async def run_query( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_v1 - def sample_run_query(): + async def sample_run_query(): # Create a client - client = datastore_v1.DatastoreClient() + client = datastore_v1.DatastoreAsyncClient() # Initialize request argument(s) request = datastore_v1.RunQueryRequest( @@ -344,7 +368,7 @@ def sample_run_query(): ) # Make the request - response = client.run_query(request=request) + response = await client.run_query(request=request) # Handle the response print(response) @@ -386,6 +410,106 @@ def sample_run_query(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def run_aggregation_query( + self, + request: Union[datastore.RunAggregationQueryRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.RunAggregationQueryResponse: + r"""Runs an aggregation query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datastore_v1 + + async def sample_run_aggregation_query(): + # Create a client + client = datastore_v1.DatastoreAsyncClient() + + # Initialize request argument(s) + request = datastore_v1.RunAggregationQueryRequest( + project_id="project_id_value", + ) + + # Make the request + response = await client.run_aggregation_query(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datastore_v1.types.RunAggregationQueryRequest, dict]): + The request object. The request for + [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datastore_v1.types.RunAggregationQueryResponse: + The response for + [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. + + """ + # Create or coerce a protobuf request object. + request = datastore.RunAggregationQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_aggregation_query, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = await rpc( request, @@ -410,11 +534,18 @@ async def begin_transaction( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_v1 - def sample_begin_transaction(): + async def sample_begin_transaction(): # Create a client - client = datastore_v1.DatastoreClient() + client = datastore_v1.DatastoreAsyncClient() # Initialize request argument(s) request = datastore_v1.BeginTransactionRequest( @@ -422,7 +553,7 @@ def sample_begin_transaction(): ) # Make the request - response = client.begin_transaction(request=request) + response = await client.begin_transaction(request=request) # Handle the response print(response) @@ -475,6 +606,14 @@ def sample_begin_transaction(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = await rpc( request, @@ -503,11 +642,18 @@ async def commit( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_v1 - def sample_commit(): + async def sample_commit(): # Create a client - client = datastore_v1.DatastoreClient() + client = datastore_v1.DatastoreAsyncClient() # Initialize request argument(s) request = datastore_v1.CommitRequest( @@ -516,7 +662,7 @@ def sample_commit(): ) # Make the request - response = client.commit(request=request) + response = await client.commit(request=request) # Handle the response print(response) @@ -610,6 +756,14 @@ def sample_commit(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = await rpc( request, @@ -635,11 +789,18 @@ async def rollback( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_v1 - def sample_rollback(): + async def sample_rollback(): # Create a client - client = datastore_v1.DatastoreClient() + client = datastore_v1.DatastoreAsyncClient() # Initialize request argument(s) request = datastore_v1.RollbackRequest( @@ -648,7 +809,7 @@ def sample_rollback(): ) # Make the request - response = client.rollback(request=request) + response = await client.rollback(request=request) # Handle the response print(response) @@ -711,6 +872,14 @@ def sample_rollback(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = await rpc( request, @@ -737,11 +906,18 @@ async def allocate_ids( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_v1 - def sample_allocate_ids(): + async def sample_allocate_ids(): # Create a client - client = datastore_v1.DatastoreClient() + client = datastore_v1.DatastoreAsyncClient() # Initialize request argument(s) request = datastore_v1.AllocateIdsRequest( @@ -749,7 +925,7 @@ def sample_allocate_ids(): ) # Make the request - response = client.allocate_ids(request=request) + response = await client.allocate_ids(request=request) # Handle the response print(response) @@ -813,6 +989,14 @@ def sample_allocate_ids(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = await rpc( request, @@ -839,11 +1023,18 @@ async def reserve_ids( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_v1 - def sample_reserve_ids(): + async def sample_reserve_ids(): # Create a client - client = datastore_v1.DatastoreClient() + client = datastore_v1.DatastoreAsyncClient() # Initialize request argument(s) request = datastore_v1.ReserveIdsRequest( @@ -851,7 +1042,7 @@ def sample_reserve_ids(): ) # Make the request - response = client.reserve_ids(request=request) + response = await client.reserve_ids(request=request) # Handle the response print(response) @@ -924,6 +1115,14 @@ def sample_reserve_ids(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = await rpc( request, @@ -935,6 +1134,223 @@ def sample_reserve_ids(): # Done; return the response. return response + async def list_operations( + self, + request: operations_pb2.ListOperationsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: operations_pb2.GetOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: operations_pb2.DeleteOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: operations_pb2.CancelOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self): return self diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index 5b012a2f31f4..4d0b8f3471a3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -34,9 +34,11 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.cloud.datastore_v1.types import aggregation_result from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.longrunning import operations_pb2 from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DatastoreGrpcTransport @@ -409,6 +411,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def lookup( @@ -426,6 +429,13 @@ def lookup( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_v1 def sample_lookup(): @@ -507,6 +517,14 @@ def sample_lookup(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.lookup] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = rpc( request, @@ -530,6 +548,13 @@ def run_query( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_v1 def sample_run_query(): @@ -575,6 +600,97 @@ def sample_run_query(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.run_query] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_aggregation_query( + self, + request: Union[datastore.RunAggregationQueryRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.RunAggregationQueryResponse: + r"""Runs an aggregation query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datastore_v1 + + def sample_run_aggregation_query(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.RunAggregationQueryRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.run_aggregation_query(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datastore_v1.types.RunAggregationQueryRequest, dict]): + The request object. The request for + [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datastore_v1.types.RunAggregationQueryResponse: + The response for + [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a datastore.RunAggregationQueryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datastore.RunAggregationQueryRequest): + request = datastore.RunAggregationQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_aggregation_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = rpc( request, @@ -599,6 +715,13 @@ def begin_transaction( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_v1 def sample_begin_transaction(): @@ -664,6 +787,14 @@ def sample_begin_transaction(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.begin_transaction] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = rpc( request, @@ -692,6 +823,13 @@ def commit( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_v1 def sample_commit(): @@ -799,6 +937,14 @@ def sample_commit(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.commit] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = rpc( request, @@ -824,6 +970,13 @@ def rollback( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_v1 def sample_rollback(): @@ -900,6 +1053,14 @@ def sample_rollback(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.rollback] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = rpc( request, @@ -926,6 +1087,13 @@ def allocate_ids( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_v1 def sample_allocate_ids(): @@ -1002,6 +1170,14 @@ def sample_allocate_ids(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.allocate_ids] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = rpc( request, @@ -1028,6 +1204,13 @@ def reserve_ids( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import datastore_v1 def sample_reserve_ids(): @@ -1103,6 +1286,14 @@ def sample_reserve_ids(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.reserve_ids] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("project_id", request.project_id),) + ), + ) + # Send the request. response = rpc( request, @@ -1127,6 +1318,223 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: operations_pb2.ListOperationsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: operations_pb2.GetOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: operations_pb2.DeleteOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: operations_pb2.CancelOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py index 22a4c167dd2e..d628468bb158 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -26,6 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.cloud.datastore_v1.types import datastore +from google.longrunning import operations_pb2 try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -57,6 +58,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -84,11 +86,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -109,6 +106,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -121,6 +123,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -154,6 +161,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.run_aggregation_query: gapic_v1.method.wrap_method( + self.run_aggregation_query, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.begin_transaction: gapic_v1.method.wrap_method( self.begin_transaction, default_timeout=60.0, @@ -218,6 +240,18 @@ def run_query( ]: raise NotImplementedError() + @property + def run_aggregation_query( + self, + ) -> Callable[ + [datastore.RunAggregationQueryRequest], + Union[ + datastore.RunAggregationQueryResponse, + Awaitable[datastore.RunAggregationQueryResponse], + ], + ]: + raise NotImplementedError() + @property def begin_transaction( self, @@ -266,6 +300,39 @@ def reserve_ids( ]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py index 16938b68a459..4f160c453307 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -25,6 +25,7 @@ import grpc # type: ignore from google.cloud.datastore_v1.types import datastore +from google.longrunning import operations_pb2 from .base import DatastoreTransport, DEFAULT_CLIENT_INFO @@ -64,6 +65,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -159,6 +161,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: @@ -283,6 +286,34 @@ def run_query( ) return self._stubs["run_query"] + @property + def run_aggregation_query( + self, + ) -> Callable[ + [datastore.RunAggregationQueryRequest], datastore.RunAggregationQueryResponse + ]: + r"""Return a callable for the run aggregation query method over gRPC. + + Runs an aggregation query. + + Returns: + Callable[[~.RunAggregationQueryRequest], + ~.RunAggregationQueryResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_aggregation_query" not in self._stubs: + self._stubs["run_aggregation_query"] = self.grpc_channel.unary_unary( + "/google.datastore.v1.Datastore/RunAggregationQuery", + request_serializer=datastore.RunAggregationQueryRequest.serialize, + response_deserializer=datastore.RunAggregationQueryResponse.deserialize, + ) + return self._stubs["run_aggregation_query"] + @property def begin_transaction( self, @@ -419,6 +450,76 @@ def reserve_ids( def close(self): self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index f539e84f33c9..529c22062084 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -25,6 +25,7 @@ from grpc.experimental import aio # type: ignore from google.cloud.datastore_v1.types import datastore +from google.longrunning import operations_pb2 from .base import DatastoreTransport, DEFAULT_CLIENT_INFO from .grpc import DatastoreGrpcTransport @@ -109,6 +110,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -204,6 +206,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: @@ -288,6 +291,35 @@ def run_query( ) return self._stubs["run_query"] + @property + def run_aggregation_query( + self, + ) -> Callable[ + [datastore.RunAggregationQueryRequest], + Awaitable[datastore.RunAggregationQueryResponse], + ]: + r"""Return a callable for the run aggregation query method over gRPC. + + Runs an aggregation query. + + Returns: + Callable[[~.RunAggregationQueryRequest], + Awaitable[~.RunAggregationQueryResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_aggregation_query" not in self._stubs: + self._stubs["run_aggregation_query"] = self.grpc_channel.unary_unary( + "/google.datastore.v1.Datastore/RunAggregationQuery", + request_serializer=datastore.RunAggregationQueryRequest.serialize, + response_deserializer=datastore.RunAggregationQueryResponse.deserialize, + ) + return self._stubs["run_aggregation_query"] + @property def begin_transaction( self, @@ -431,5 +463,75 @@ def reserve_ids( def close(self): return self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + __all__ = ("DatastoreGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py index eb4fc8c2a755..ca7dae214ce3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .aggregation_result import ( + AggregationResult, + AggregationResultBatch, +) from .datastore import ( AllocateIdsRequest, AllocateIdsResponse, @@ -29,6 +33,8 @@ ReserveIdsResponse, RollbackRequest, RollbackResponse, + RunAggregationQueryRequest, + RunAggregationQueryResponse, RunQueryRequest, RunQueryResponse, TransactionOptions, @@ -41,6 +47,7 @@ Value, ) from .query import ( + AggregationQuery, CompositeFilter, EntityResult, Filter, @@ -56,6 +63,8 @@ ) __all__ = ( + "AggregationResult", + "AggregationResultBatch", "AllocateIdsRequest", "AllocateIdsResponse", "BeginTransactionRequest", @@ -71,6 +80,8 @@ "ReserveIdsResponse", "RollbackRequest", "RollbackResponse", + "RunAggregationQueryRequest", + "RunAggregationQueryResponse", "RunQueryRequest", "RunQueryResponse", "TransactionOptions", @@ -79,6 +90,7 @@ "Key", "PartitionId", "Value", + "AggregationQuery", "CompositeFilter", "EntityResult", "Filter", diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py new file mode 100644 index 000000000000..17020a63ccd7 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.datastore_v1.types import entity +from google.cloud.datastore_v1.types import query +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.datastore.v1", + manifest={ + "AggregationResult", + "AggregationResultBatch", + }, +) + + +class AggregationResult(proto.Message): + r"""The result of a single bucket from a Datastore aggregation query. + + The keys of ``aggregate_properties`` are the same for all results in + an aggregation query, unlike entity queries which can have different + fields present for each result. + + Attributes: + aggregate_properties (Mapping[str, google.cloud.datastore_v1.types.Value]): + The result of the aggregation functions, ex: + ``COUNT(*) AS total_entities``. + + The key is the + [alias][google.datastore.v1.AggregationQuery.Aggregation.alias] + assigned to the aggregation function on input and the size + of this map equals the number of aggregation functions in + the query. + """ + + aggregate_properties = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message=entity.Value, + ) + + +class AggregationResultBatch(proto.Message): + r"""A batch of aggregation results produced by an aggregation + query. + + Attributes: + aggregation_results (Sequence[google.cloud.datastore_v1.types.AggregationResult]): + The aggregation results for this batch. + more_results (google.cloud.datastore_v1.types.QueryResultBatch.MoreResultsType): + The state of the query after the current batch. Only + COUNT(*) aggregations are supported in the initial launch. + Therefore, expected result type is limited to + ``NO_MORE_RESULTS``. + read_time (google.protobuf.timestamp_pb2.Timestamp): + Read timestamp this batch was returned from. + In a single transaction, subsequent query result + batches for the same query can have a greater + timestamp. Each batch's read timestamp is valid + for all preceding batches. + """ + + aggregation_results = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AggregationResult", + ) + more_results = proto.Field( + proto.ENUM, + number=2, + enum=query.QueryResultBatch.MoreResultsType, + ) + read_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index f4907298ca5e..42c87f72c0a3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -15,6 +15,7 @@ # import proto # type: ignore +from google.cloud.datastore_v1.types import aggregation_result from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query as gd_query from google.protobuf import timestamp_pb2 # type: ignore @@ -27,6 +28,8 @@ "LookupResponse", "RunQueryRequest", "RunQueryResponse", + "RunAggregationQueryRequest", + "RunAggregationQueryResponse", "BeginTransactionRequest", "BeginTransactionResponse", "RollbackRequest", @@ -53,6 +56,11 @@ class LookupRequest(proto.Message): project_id (str): Required. The ID of the project against which to make the request. + database_id (str): + The ID of the database against which to make + the request. + '(default)' is not allowed; please use empty + string '' to refer the default database. read_options (google.cloud.datastore_v1.types.ReadOptions): The options for this lookup request. keys (Sequence[google.cloud.datastore_v1.types.Key]): @@ -63,6 +71,10 @@ class LookupRequest(proto.Message): proto.STRING, number=8, ) + database_id = proto.Field( + proto.STRING, + number=9, + ) read_options = proto.Field( proto.MESSAGE, number=1, @@ -135,6 +147,11 @@ class RunQueryRequest(proto.Message): project_id (str): Required. The ID of the project against which to make the request. + database_id (str): + The ID of the database against which to make + the request. + '(default)' is not allowed; please use empty + string '' to refer the default database. partition_id (google.cloud.datastore_v1.types.PartitionId): Entities are partitioned into subsets, identified by a partition ID. Queries are scoped @@ -148,7 +165,8 @@ class RunQueryRequest(proto.Message): This field is a member of `oneof`_ ``query_type``. gql_query (google.cloud.datastore_v1.types.GqlQuery): - The GQL query to run. + The GQL query to run. This query must be a + non-aggregation query. This field is a member of `oneof`_ ``query_type``. """ @@ -157,6 +175,10 @@ class RunQueryRequest(proto.Message): proto.STRING, number=8, ) + database_id = proto.Field( + proto.STRING, + number=9, + ) partition_id = proto.Field( proto.MESSAGE, number=2, @@ -205,6 +227,102 @@ class RunQueryResponse(proto.Message): ) +class RunAggregationQueryRequest(proto.Message): + r"""The request for + [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project_id (str): + Required. The ID of the project against which + to make the request. + database_id (str): + The ID of the database against which to make + the request. + '(default)' is not allowed; please use empty + string '' to refer the default database. + partition_id (google.cloud.datastore_v1.types.PartitionId): + Entities are partitioned into subsets, + identified by a partition ID. Queries are scoped + to a single partition. This partition ID is + normalized with the standard default context + partition ID. + read_options (google.cloud.datastore_v1.types.ReadOptions): + The options for this query. + aggregation_query (google.cloud.datastore_v1.types.AggregationQuery): + The query to run. + + This field is a member of `oneof`_ ``query_type``. + gql_query (google.cloud.datastore_v1.types.GqlQuery): + The GQL query to run. This query must be an + aggregation query. + + This field is a member of `oneof`_ ``query_type``. + """ + + project_id = proto.Field( + proto.STRING, + number=8, + ) + database_id = proto.Field( + proto.STRING, + number=9, + ) + partition_id = proto.Field( + proto.MESSAGE, + number=2, + message=entity.PartitionId, + ) + read_options = proto.Field( + proto.MESSAGE, + number=1, + message="ReadOptions", + ) + aggregation_query = proto.Field( + proto.MESSAGE, + number=3, + oneof="query_type", + message=gd_query.AggregationQuery, + ) + gql_query = proto.Field( + proto.MESSAGE, + number=7, + oneof="query_type", + message=gd_query.GqlQuery, + ) + + +class RunAggregationQueryResponse(proto.Message): + r"""The response for + [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. + + Attributes: + batch (google.cloud.datastore_v1.types.AggregationResultBatch): + A batch of aggregation results. Always + present. + query (google.cloud.datastore_v1.types.AggregationQuery): + The parsed form of the ``GqlQuery`` from the request, if it + was set. + """ + + batch = proto.Field( + proto.MESSAGE, + number=1, + message=aggregation_result.AggregationResultBatch, + ) + query = proto.Field( + proto.MESSAGE, + number=2, + message=gd_query.AggregationQuery, + ) + + class BeginTransactionRequest(proto.Message): r"""The request for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. @@ -213,6 +331,11 @@ class BeginTransactionRequest(proto.Message): project_id (str): Required. The ID of the project against which to make the request. + database_id (str): + The ID of the database against which to make + the request. + '(default)' is not allowed; please use empty + string '' to refer the default database. transaction_options (google.cloud.datastore_v1.types.TransactionOptions): Options for a new transaction. """ @@ -221,6 +344,10 @@ class BeginTransactionRequest(proto.Message): proto.STRING, number=8, ) + database_id = proto.Field( + proto.STRING, + number=9, + ) transaction_options = proto.Field( proto.MESSAGE, number=10, @@ -251,6 +378,11 @@ class RollbackRequest(proto.Message): project_id (str): Required. The ID of the project against which to make the request. + database_id (str): + The ID of the database against which to make + the request. + '(default)' is not allowed; please use empty + string '' to refer the default database. transaction (bytes): Required. The transaction identifier, returned by a call to [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. @@ -260,6 +392,10 @@ class RollbackRequest(proto.Message): proto.STRING, number=8, ) + database_id = proto.Field( + proto.STRING, + number=9, + ) transaction = proto.Field( proto.BYTES, number=1, @@ -285,6 +421,11 @@ class CommitRequest(proto.Message): project_id (str): Required. The ID of the project against which to make the request. + database_id (str): + The ID of the database against which to make + the request. + '(default)' is not allowed; please use empty + string '' to refer the default database. mode (google.cloud.datastore_v1.types.CommitRequest.Mode): The type of commit to perform. Defaults to ``TRANSACTIONAL``. @@ -321,6 +462,10 @@ class Mode(proto.Enum): proto.STRING, number=8, ) + database_id = proto.Field( + proto.STRING, + number=9, + ) mode = proto.Field( proto.ENUM, number=5, @@ -379,6 +524,11 @@ class AllocateIdsRequest(proto.Message): project_id (str): Required. The ID of the project against which to make the request. + database_id (str): + The ID of the database against which to make + the request. + '(default)' is not allowed; please use empty + string '' to refer the default database. keys (Sequence[google.cloud.datastore_v1.types.Key]): Required. A list of keys with incomplete key paths for which to allocate IDs. No key may be @@ -389,6 +539,10 @@ class AllocateIdsRequest(proto.Message): proto.STRING, number=8, ) + database_id = proto.Field( + proto.STRING, + number=9, + ) keys = proto.RepeatedField( proto.MESSAGE, number=1, @@ -423,8 +577,10 @@ class ReserveIdsRequest(proto.Message): Required. The ID of the project against which to make the request. database_id (str): - If not empty, the ID of the database against - which to make the request. + The ID of the database against which to make + the request. + '(default)' is not allowed; please use empty + string '' to refer the default database. keys (Sequence[google.cloud.datastore_v1.types.Key]): Required. A list of keys with complete key paths whose numeric IDs should not be @@ -602,8 +758,8 @@ class ReadOptions(proto.Message): Attributes: read_consistency (google.cloud.datastore_v1.types.ReadOptions.ReadConsistency): - The non-transactional read consistency to use. Cannot be set - to ``STRONG`` for global queries. + The non-transactional read consistency to + use. This field is a member of `oneof`_ ``consistency_type``. transaction (bytes): diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index e949a56a95b0..d9b8febe6d95 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -57,6 +57,9 @@ class PartitionId(proto.Message): project_id (str): The ID of the project to which the entities belong. + database_id (str): + If not empty, the ID of the database to which + the entities belong. namespace_id (str): If not empty, the ID of the namespace to which the entities belong. @@ -66,6 +69,10 @@ class PartitionId(proto.Message): proto.STRING, number=2, ) + database_id = proto.Field( + proto.STRING, + number=3, + ) namespace_id = proto.Field( proto.STRING, number=4, @@ -121,9 +128,15 @@ class PathElement(proto.Message): Attributes: kind (str): - The kind of the entity. A kind matching regex ``__.*__`` is - reserved/read-only. A kind must not contain more than 1500 - bytes when UTF-8 encoded. Cannot be ``""``. + The kind of the entity. + + A kind matching regex ``__.*__`` is reserved/read-only. A + kind must not contain more than 1500 bytes when UTF-8 + encoded. Cannot be ``""``. + + Must be valid UTF-8 bytes. Legacy values that are not valid + UTF-8 are encoded as ``__bytes__`` where ```` is the + base-64 encoding of the bytes. id (int): The auto-allocated ID of the entity. Never equal to zero. Values less than zero are @@ -132,9 +145,15 @@ class PathElement(proto.Message): This field is a member of `oneof`_ ``id_type``. name (str): - The name of the entity. A name matching regex ``__.*__`` is - reserved/read-only. A name must not be more than 1500 bytes - when UTF-8 encoded. Cannot be ``""``. + The name of the entity. + + A name matching regex ``__.*__`` is reserved/read-only. A + name must not be more than 1500 bytes when UTF-8 encoded. + Cannot be ``""``. + + Must be valid UTF-8 bytes. Legacy values that are not valid + UTF-8 are encoded as ``__bytes__`` where ```` is the + base-64 encoding of the bytes. This field is a member of `oneof`_ ``id_type``. """ diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index 1179efcef561..a3f6b75d46a6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -25,6 +25,7 @@ manifest={ "EntityResult", "Query", + "AggregationQuery", "KindExpression", "PropertyReference", "Projection", @@ -188,6 +189,137 @@ class Query(proto.Message): ) +class AggregationQuery(proto.Message): + r"""Datastore query for running an aggregation over a + [Query][google.datastore.v1.Query]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + nested_query (google.cloud.datastore_v1.types.Query): + Nested query for aggregation + + This field is a member of `oneof`_ ``query_type``. + aggregations (Sequence[google.cloud.datastore_v1.types.AggregationQuery.Aggregation]): + Optional. Series of aggregations to apply over the results + of the ``nested_query``. + + Requires: + + - A minimum of one and maximum of five aggregations per + query. + """ + + class Aggregation(proto.Message): + r"""Defines a aggregation that produces a single result. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + count (google.cloud.datastore_v1.types.AggregationQuery.Aggregation.Count): + Count aggregator. + + This field is a member of `oneof`_ ``operator``. + alias (str): + Optional. Optional name of the property to store the result + of the aggregation. + + If not provided, Datastore will pick a default name + following the format ``property_``. For + example: + + :: + + AGGREGATE + COUNT_UP_TO(1) AS count_up_to_1, + COUNT_UP_TO(2), + COUNT_UP_TO(3) AS count_up_to_3, + COUNT_UP_TO(4) + OVER ( + ... + ); + + becomes: + + :: + + AGGREGATE + COUNT_UP_TO(1) AS count_up_to_1, + COUNT_UP_TO(2) AS property_1, + COUNT_UP_TO(3) AS count_up_to_3, + COUNT_UP_TO(4) AS property_2 + OVER ( + ... + ); + + Requires: + + - Must be unique across all aggregation aliases. + - Conform to [entity property + name][google.datastore.v1.Entity.properties] limitations. + """ + + class Count(proto.Message): + r"""Count of entities that match the query. + + The ``COUNT(*)`` aggregation function operates on the entire entity + so it does not require a field reference. + + Attributes: + up_to (google.protobuf.wrappers_pb2.Int64Value): + Optional. Optional constraint on the maximum number of + entities to count. + + This provides a way to set an upper bound on the number of + entities to scan, limiting latency and cost. + + Unspecified is interpreted as no bound. + + If a zero value is provided, a count result of zero should + always be expected. + + High-Level Example: + + :: + + AGGREGATE COUNT_UP_TO(1000) OVER ( SELECT * FROM k ); + + Requires: + + - Must be non-negative when present. + """ + + up_to = proto.Field( + proto.MESSAGE, + number=1, + message=wrappers_pb2.Int64Value, + ) + + count = proto.Field( + proto.MESSAGE, + number=1, + oneof="operator", + message="AggregationQuery.Aggregation.Count", + ) + alias = proto.Field( + proto.STRING, + number=7, + ) + + nested_query = proto.Field( + proto.MESSAGE, + number=1, + oneof="query_type", + message="Query", + ) + aggregations = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=Aggregation, + ) + + class KindExpression(proto.Message): r"""A representation of a kind. @@ -305,7 +437,10 @@ class CompositeFilter(proto.Message): The operator for combining multiple filters. filters (Sequence[google.cloud.datastore_v1.types.Filter]): The list of filters to combine. - Must contain at least one filter. + + Requires: + + - At least one filter is present. """ class Operator(proto.Enum): diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 80ff3c444f0a..cce65bdd8066 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -129,7 +129,7 @@ def mypy(session): session.install( "mypy", "types-setuptools", "types-mock", "types-protobuf", "types-requests" ) - session.run("mypy", "google/", "tests/") + session.run("mypy", "google/") @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 40f95a2206c8..530adcf2058c 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -279,7 +279,7 @@ def mypy(session): session.install( "mypy", "types-setuptools", "types-mock", "types-protobuf", "types-requests" ) - session.run("mypy", "google/", "tests/") + session.run("mypy", "google/") @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py index 4f5265b60140..82a24a4f9c91 100644 --- a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py @@ -39,13 +39,14 @@ def partition( class datastoreCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'allocate_ids': ('project_id', 'keys', ), - 'begin_transaction': ('project_id', 'transaction_options', ), - 'commit': ('project_id', 'mode', 'transaction', 'mutations', ), - 'lookup': ('project_id', 'keys', 'read_options', ), + 'allocate_ids': ('project_id', 'keys', 'database_id', ), + 'begin_transaction': ('project_id', 'database_id', 'transaction_options', ), + 'commit': ('project_id', 'database_id', 'mode', 'transaction', 'mutations', ), + 'lookup': ('project_id', 'keys', 'database_id', 'read_options', ), 'reserve_ids': ('project_id', 'keys', 'database_id', ), - 'rollback': ('project_id', 'transaction', ), - 'run_query': ('project_id', 'partition_id', 'read_options', 'query', 'gql_query', ), + 'rollback': ('project_id', 'transaction', 'database_id', ), + 'run_aggregation_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'aggregation_query', 'gql_query', ), + 'run_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'query', 'gql_query', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 4001b3e5dc49..1de1269cf5d7 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -29,13 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - # NOTE: Maintainers, please do not require google-api-core>=2.x.x - # Until this issue is closed - # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", - # NOTE: Maintainers, please do not require google-api-core>=2.x.x - # Until this issue is closed - # https://github.com/googleapis/google-cloud-python/issues/10566 + "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "google-cloud-core >= 1.4.0, <3.0.0dev", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf >= 3.20.2, <5.0.0dev", diff --git a/packages/google-cloud-datastore/testing/constraints-3.7.txt b/packages/google-cloud-datastore/testing/constraints-3.7.txt index 5b0708c7532c..868dedf223e1 100644 --- a/packages/google-cloud-datastore/testing/constraints-3.7.txt +++ b/packages/google-cloud-datastore/testing/constraints-3.7.txt @@ -5,7 +5,7 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.31.5 +google-api-core==1.32.0 google-cloud-core==1.4.0 proto-plus==1.22.0 libcst==0.2.5 diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index fd1fc14c4834..13c26fa9ae11 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -14,14 +14,20 @@ # limitations under the License. # import os -import mock + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock import grpc from grpc.experimental import aio import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers from google.api_core import client_options from google.api_core import exceptions as core_exceptions @@ -228,6 +234,7 @@ def test_datastore_admin_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -245,6 +252,7 @@ def test_datastore_admin_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -262,6 +270,7 @@ def test_datastore_admin_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -291,6 +300,25 @@ def test_datastore_admin_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -358,6 +386,7 @@ def test_datastore_admin_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -392,6 +421,7 @@ def test_datastore_admin_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -414,6 +444,7 @@ def test_datastore_admin_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -528,6 +559,7 @@ def test_datastore_admin_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -566,6 +598,7 @@ def test_datastore_admin_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -586,6 +619,7 @@ def test_datastore_admin_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -624,6 +658,7 @@ def test_datastore_admin_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -741,6 +776,67 @@ async def test_export_entities_async_from_dict(): await test_export_entities_async(request_type=dict) +def test_export_entities_field_headers(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore_admin.ExportEntitiesRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_entities), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_entities_field_headers_async(): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore_admin.ExportEntitiesRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_entities), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.export_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + def test_export_entities_flattened(): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -936,6 +1032,67 @@ async def test_import_entities_async_from_dict(): await test_import_entities_async(request_type=dict) +def test_import_entities_field_headers(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore_admin.ImportEntitiesRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_entities), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_import_entities_field_headers_async(): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore_admin.ImportEntitiesRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_entities), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.import_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + def test_import_entities_flattened(): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1131,6 +1288,67 @@ async def test_create_index_async_from_dict(): await test_create_index_async(request_type=dict) +def test_create_index_field_headers(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore_admin.CreateIndexRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_index), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_index_field_headers_async(): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore_admin.CreateIndexRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_index), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -1214,6 +1432,69 @@ async def test_delete_index_async_from_dict(): await test_delete_index_async(request_type=dict) +def test_delete_index_field_headers(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore_admin.DeleteIndexRequest() + + request.project_id = "project_id_value" + request.index_id = "index_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value&index_id=index_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_index_field_headers_async(): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore_admin.DeleteIndexRequest() + + request.project_id = "project_id_value" + request.index_id = "index_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value&index_id=index_id_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -1319,6 +1600,67 @@ async def test_get_index_async_from_dict(): await test_get_index_async(request_type=dict) +def test_get_index_field_headers(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore_admin.GetIndexRequest() + + request.project_id = "project_id_value" + request.index_id = "index_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_index), "__call__") as call: + call.return_value = index.Index() + client.get_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value&index_id=index_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_index_field_headers_async(): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore_admin.GetIndexRequest() + + request.project_id = "project_id_value" + request.index_id = "index_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_index), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(index.Index()) + await client.get_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value&index_id=index_id_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -1408,6 +1750,67 @@ async def test_list_indexes_async_from_dict(): await test_list_indexes_async(request_type=dict) +def test_list_indexes_field_headers(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore_admin.ListIndexesRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: + call.return_value = datastore_admin.ListIndexesResponse() + client.list_indexes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_indexes_field_headers_async(): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore_admin.ListIndexesRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore_admin.ListIndexesResponse() + ) + await client.list_indexes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + def test_list_indexes_pager(transport_name: str = "grpc"): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials, @@ -1446,11 +1849,14 @@ def test_list_indexes_pager(transport_name: str = "grpc"): ) metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project_id", ""),)), + ) pager = client.list_indexes(request={}) assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, index.Index) for i in results) @@ -1831,6 +2237,28 @@ def test_datastore_admin_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatastoreAdminGrpcTransport, + transports.DatastoreAdminGrpcAsyncIOTransport, + ], +) +def test_datastore_admin_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2310,4 +2738,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 4106b21769cd..7448690a5338 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -14,14 +14,20 @@ # limitations under the License. # import os -import mock + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock import grpc from grpc.experimental import aio import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers from google.api_core import client_options from google.api_core import exceptions as core_exceptions @@ -34,9 +40,11 @@ from google.cloud.datastore_v1.services.datastore import DatastoreAsyncClient from google.cloud.datastore_v1.services.datastore import DatastoreClient from google.cloud.datastore_v1.services.datastore import transports +from google.cloud.datastore_v1.types import aggregation_result from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.longrunning import operations_pb2 from google.oauth2 import service_account from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -214,6 +222,7 @@ def test_datastore_client_client_options(client_class, transport_class, transpor quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -231,6 +240,7 @@ def test_datastore_client_client_options(client_class, transport_class, transpor quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -248,6 +258,7 @@ def test_datastore_client_client_options(client_class, transport_class, transpor quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -277,6 +288,25 @@ def test_datastore_client_client_options(client_class, transport_class, transpor quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -342,6 +372,7 @@ def test_datastore_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -376,6 +407,7 @@ def test_datastore_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -398,6 +430,7 @@ def test_datastore_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -508,6 +541,7 @@ def test_datastore_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -541,6 +575,7 @@ def test_datastore_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -559,6 +594,7 @@ def test_datastore_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -592,6 +628,7 @@ def test_datastore_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -709,6 +746,67 @@ async def test_lookup_async_from_dict(): await test_lookup_async(request_type=dict) +def test_lookup_field_headers(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore.LookupRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lookup), "__call__") as call: + call.return_value = datastore.LookupResponse() + client.lookup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_lookup_field_headers_async(): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore.LookupRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lookup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.LookupResponse() + ) + await client.lookup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + def test_lookup_flattened(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -926,6 +1024,221 @@ async def test_run_query_async_from_dict(): await test_run_query_async(request_type=dict) +def test_run_query_field_headers(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore.RunQueryRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + call.return_value = datastore.RunQueryResponse() + client.run_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_run_query_field_headers_async(): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore.RunQueryRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RunQueryResponse() + ) + await client.run_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + datastore.RunAggregationQueryRequest, + dict, + ], +) +def test_run_aggregation_query(request_type, transport: str = "grpc"): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.RunAggregationQueryResponse() + response = client.run_aggregation_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.RunAggregationQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.RunAggregationQueryResponse) + + +def test_run_aggregation_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + client.run_aggregation_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.RunAggregationQueryRequest() + + +@pytest.mark.asyncio +async def test_run_aggregation_query_async( + transport: str = "grpc_asyncio", request_type=datastore.RunAggregationQueryRequest +): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RunAggregationQueryResponse() + ) + response = await client.run_aggregation_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.RunAggregationQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.RunAggregationQueryResponse) + + +@pytest.mark.asyncio +async def test_run_aggregation_query_async_from_dict(): + await test_run_aggregation_query_async(request_type=dict) + + +def test_run_aggregation_query_field_headers(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore.RunAggregationQueryRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + call.return_value = datastore.RunAggregationQueryResponse() + client.run_aggregation_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_run_aggregation_query_field_headers_async(): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore.RunAggregationQueryRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RunAggregationQueryResponse() + ) + await client.run_aggregation_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -1021,6 +1334,71 @@ async def test_begin_transaction_async_from_dict(): await test_begin_transaction_async(request_type=dict) +def test_begin_transaction_field_headers(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore.BeginTransactionRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + call.return_value = datastore.BeginTransactionResponse() + client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_begin_transaction_field_headers_async(): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore.BeginTransactionRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.BeginTransactionResponse() + ) + await client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + def test_begin_transaction_flattened(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1196,29 +1574,90 @@ async def test_commit_async_from_dict(): await test_commit_async(request_type=dict) -def test_commit_flattened(): +def test_commit_field_headers(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore.CommitRequest() + + request.project_id = "project_id_value" + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: - # Designate an appropriate return value for the call. call.return_value = datastore.CommitResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.commit( - project_id="project_id_value", - mode=datastore.CommitRequest.Mode.TRANSACTIONAL, - transaction=b"transaction_blob", - mutations=[ - datastore.Mutation( - insert=entity.Entity( - key=entity.Key( - partition_id=entity.PartitionId( - project_id="project_id_value" - ) - ) + client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_commit_field_headers_async(): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore.CommitRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.CommitResponse() + ) + await client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + +def test_commit_flattened(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datastore.CommitResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.commit( + project_id="project_id_value", + mode=datastore.CommitRequest.Mode.TRANSACTIONAL, + transaction=b"transaction_blob", + mutations=[ + datastore.Mutation( + insert=entity.Entity( + key=entity.Key( + partition_id=entity.PartitionId( + project_id="project_id_value" + ) + ) ) ) ], @@ -1443,6 +1882,67 @@ async def test_rollback_async_from_dict(): await test_rollback_async(request_type=dict) +def test_rollback_field_headers(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore.RollbackRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + call.return_value = datastore.RollbackResponse() + client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_rollback_field_headers_async(): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore.RollbackRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RollbackResponse() + ) + await client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + def test_rollback_flattened(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1618,6 +2118,67 @@ async def test_allocate_ids_async_from_dict(): await test_allocate_ids_async(request_type=dict) +def test_allocate_ids_field_headers(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore.AllocateIdsRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + call.return_value = datastore.AllocateIdsResponse() + client.allocate_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_allocate_ids_field_headers_async(): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore.AllocateIdsRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.AllocateIdsResponse() + ) + await client.allocate_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + def test_allocate_ids_flattened(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1813,6 +2374,67 @@ async def test_reserve_ids_async_from_dict(): await test_reserve_ids_async(request_type=dict) +def test_reserve_ids_field_headers(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore.ReserveIdsRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + call.return_value = datastore.ReserveIdsResponse() + client.reserve_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_reserve_ids_field_headers_async(): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datastore.ReserveIdsRequest() + + request.project_id = "project_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.ReserveIdsResponse() + ) + await client.reserve_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value", + ) in kw["metadata"] + + def test_reserve_ids_flattened(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2064,11 +2686,16 @@ def test_datastore_base_transport(): methods = ( "lookup", "run_query", + "run_aggregation_query", "begin_transaction", "commit", "rollback", "allocate_ids", "reserve_ids", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -2159,6 +2786,28 @@ def test_datastore_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatastoreGrpcTransport, + transports.DatastoreGrpcAsyncIOTransport, + ], +) +def test_datastore_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2533,6 +3182,574 @@ async def test_transport_close_async(): close.assert_called_once() +def test_delete_operation(transport: str = "grpc"): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "grpc": "_grpc_channel", @@ -2593,4 +3810,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) From 2e5d40ee9f5a5660b923b287d8cb853215c133f9 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 6 Oct 2022 15:40:06 +0200 Subject: [PATCH 458/611] chore(deps): update dependency backoff to v2.2.1 (#371) --- .../samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt index 90b34de7bb0d..1315ece85a43 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff===1.11.1; python_version < "3.7" -backoff==2.1.2; python_version >= "3.7" +backoff==2.2.1; python_version >= "3.7" pytest==7.1.3 flaky==3.7.0 From 2d6dc0403728f5ea28b2a964666ac954762b9040 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 10 Oct 2022 13:23:21 -0400 Subject: [PATCH 459/611] fix(deps): allow protobuf 3.19.5 (#372) * fix(deps): allow protobuf 3.19.5 * explicitly exclude protobuf 4.21.0 --- packages/google-cloud-datastore/setup.py | 2 +- packages/google-cloud-datastore/testing/constraints-3.7.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 1de1269cf5d7..b0e45e9edd08 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -32,7 +32,7 @@ "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "google-cloud-core >= 1.4.0, <3.0.0dev", "proto-plus >= 1.22.0, <2.0.0dev", - "protobuf >= 3.20.2, <5.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = {"libcst": "libcst >= 0.2.5"} diff --git a/packages/google-cloud-datastore/testing/constraints-3.7.txt b/packages/google-cloud-datastore/testing/constraints-3.7.txt index 868dedf223e1..5ee7f9eed70a 100644 --- a/packages/google-cloud-datastore/testing/constraints-3.7.txt +++ b/packages/google-cloud-datastore/testing/constraints-3.7.txt @@ -9,4 +9,4 @@ google-api-core==1.32.0 google-cloud-core==1.4.0 proto-plus==1.22.0 libcst==0.2.5 -protobuf==3.20.2 +protobuf==3.19.5 From ef86d24436ff88c8d8b6473406fbd3ec81dbb497 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 18 Oct 2022 15:12:44 +0200 Subject: [PATCH 460/611] chore(deps): update dependency google-cloud-datastore to v2.8.3 (#375) --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index 2d1b568e6476..09934d289cb6 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.8.2 \ No newline at end of file +google-cloud-datastore==2.8.3 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index 8a7a99765cb3..075e3cf24f87 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.8.2 +google-cloud-datastore==2.8.3 From b8d43b2d05719f51479891659824c9e4b01ed9fb Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 18 Oct 2022 16:52:06 -0400 Subject: [PATCH 461/611] chore(main): release 2.9.0 (#370) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 12 ++++++++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 3c02bebce32d..b609fb19f13b 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.9.0](https://github.com/googleapis/python-datastore/compare/v2.8.2...v2.9.0) (2022-10-18) + + +### Features + +* Add datastore aggregation query APIs ([#306](https://github.com/googleapis/python-datastore/issues/306)) ([96d98e5](https://github.com/googleapis/python-datastore/commit/96d98e5204d71dfeb9547052cbdd073292ae4c6a)) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#372](https://github.com/googleapis/python-datastore/issues/372)) ([9305154](https://github.com/googleapis/python-datastore/commit/9305154e8853f9038506c86163dfb4cde36c0d8e)) + ## [2.8.2](https://github.com/googleapis/python-datastore/compare/v2.8.1...v2.8.2) (2022-10-03) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 839a77a1cf5a..b2a8c5535e20 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.8.2" +__version__ = "2.9.0" From d09f6e3a4ef3665071b9d4e185f89e79c1cb571d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 19 Oct 2022 03:41:55 +0200 Subject: [PATCH 462/611] chore(deps): update dependency google-cloud-datastore to v2.9.0 (#376) --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index 09934d289cb6..a3caccb020d5 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.8.3 \ No newline at end of file +google-cloud-datastore==2.9.0 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index 075e3cf24f87..3918f942b816 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.8.3 +google-cloud-datastore==2.9.0 From 1b08000aa989cad2a847d62383c5a05f393e0bc6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 26 Oct 2022 12:46:34 +0200 Subject: [PATCH 463/611] chore(deps): update dependency pytest to v7.2.0 (#377) --- .../samples/snippets/requirements-test.txt | 2 +- .../samples/snippets/schedule-export/requirements-test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt index 1315ece85a43..b34edda37662 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff===1.11.1; python_version < "3.7" backoff==2.2.1; python_version >= "3.7" -pytest==7.1.3 +pytest==7.2.0 flaky==3.7.0 diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt index f97bae64aa54..89cb815c988e 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.3 \ No newline at end of file +pytest==7.2.0 \ No newline at end of file From 1bbc84fc2cf01956d007ed252ed66ec38e3ad588 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Mon, 31 Oct 2022 13:54:11 -0400 Subject: [PATCH 464/611] chore: remove duplicate import line (#379) --- packages/google-cloud-datastore/google/cloud/datastore/entity.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/entity.py b/packages/google-cloud-datastore/google/cloud/datastore/entity.py index b1148865fd4c..e4d72f15c4d7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/entity.py @@ -44,7 +44,6 @@ class Entity(dict): import uuid - from google.cloud import datastore from google.cloud import datastore unique = str(uuid.uuid4())[0:8] From f699a4d3675d82d2ce9c68357bf50df632e165b1 Mon Sep 17 00:00:00 2001 From: Mariatta Wijaya Date: Tue, 1 Nov 2022 10:35:29 -0700 Subject: [PATCH 465/611] feat: Support the Count aggregation query (#368) * feat: Support the Count aggregation query * Add `run_aggregation_query` interface to the http client. Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../docs/aggregations.rst | 6 + .../google-cloud-datastore/docs/index.rst | 1 + .../google/cloud/datastore/_http.py | 33 ++ .../google/cloud/datastore/aggregation.py | 431 ++++++++++++++++++ .../google/cloud/datastore/client.py | 82 ++++ .../tests/system/test_aggregation_query.py | 214 +++++++++ .../tests/unit/test__http.py | 92 ++++ .../tests/unit/test_aggregation.py | 415 +++++++++++++++++ .../tests/unit/test_client.py | 36 ++ 9 files changed, 1310 insertions(+) create mode 100644 packages/google-cloud-datastore/docs/aggregations.rst create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/aggregation.py create mode 100644 packages/google-cloud-datastore/tests/system/test_aggregation_query.py create mode 100644 packages/google-cloud-datastore/tests/unit/test_aggregation.py diff --git a/packages/google-cloud-datastore/docs/aggregations.rst b/packages/google-cloud-datastore/docs/aggregations.rst new file mode 100644 index 000000000000..d287fbc59dfc --- /dev/null +++ b/packages/google-cloud-datastore/docs/aggregations.rst @@ -0,0 +1,6 @@ +Aggregations +~~~~~~~~~~~~ + +.. automodule:: google.cloud.datastore.aggregation + :members: + :show-inheritance: diff --git a/packages/google-cloud-datastore/docs/index.rst b/packages/google-cloud-datastore/docs/index.rst index 4866c89142ba..890ec56aee80 100644 --- a/packages/google-cloud-datastore/docs/index.rst +++ b/packages/google-cloud-datastore/docs/index.rst @@ -18,6 +18,7 @@ API Reference entities keys queries + aggregations transactions batches helpers diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 60b8af894767..61209e98f870 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -280,6 +280,39 @@ def run_query(self, request, retry=None, timeout=None): timeout=timeout, ) + def run_aggregation_query(self, request, retry=None, timeout=None): + """Perform a ``runAggregationQuery`` request. + + :type request: :class:`_datastore_pb2.BeginTransactionRequest` or dict + :param request: + Parameter bundle for API request. + + :type retry: :class:`google.api_core.retry.Retry` + :param retry: (Optional) retry policy for the request + + :type timeout: float or tuple(float, float) + :param timeout: (Optional) timeout for the request + + :rtype: :class:`.datastore_pb2.RunAggregationQueryResponse` + :returns: The returned protobuf response object. + """ + request_pb = _make_request_pb( + request, _datastore_pb2.RunAggregationQueryRequest + ) + project_id = request_pb.project_id + + return _rpc( + self.client._http, + project_id, + "runAggregationQuery", + self.client._base_url, + self.client._client_info, + request_pb, + _datastore_pb2.RunAggregationQueryResponse, + retry=retry, + timeout=timeout, + ) + def begin_transaction(self, request, retry=None, timeout=None): """Perform a ``beginTransaction`` request. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py b/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py new file mode 100644 index 000000000000..bb75d94ec09b --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py @@ -0,0 +1,431 @@ +# # Copyright 2022 Google LLC +# # +# # Licensed under the Apache License, Version 2.0 (the "License"); +# # you may not use this file except in compliance with the License. +# # You may obtain a copy of the License at +# # +# # http://www.apache.org/licenses/LICENSE-2.0 +# # +# # Unless required by applicable law or agreed to in writing, software +# # distributed under the License is distributed on an "AS IS" BASIS, +# # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# # See the License for the specific language governing permissions and +# # limitations under the License. +# +# """Create / interact with Google Cloud Datastore aggregation queries.""" +import abc +from abc import ABC + +from google.api_core import page_iterator + +from google.cloud.datastore_v1.types import entity as entity_pb2 +from google.cloud.datastore_v1.types import query as query_pb2 +from google.cloud.datastore import helpers +from google.cloud.datastore.query import _pb_from_query + + +_NOT_FINISHED = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED +_NO_MORE_RESULTS = query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS + +_FINISHED = ( + _NO_MORE_RESULTS, + query_pb2.QueryResultBatch.MoreResultsType.MORE_RESULTS_AFTER_LIMIT, + query_pb2.QueryResultBatch.MoreResultsType.MORE_RESULTS_AFTER_CURSOR, +) + + +class BaseAggregation(ABC): + """ + Base class representing an Aggregation operation in Datastore + """ + + @abc.abstractmethod + def _to_pb(self): + """ + Convert this instance to the protobuf representation + """ + + +class CountAggregation(BaseAggregation): + """ + Representation of a "Count" aggregation query. + + :type alias: str + :param alias: The alias for the aggregation. + + :type value: int + :param value: The resulting value from the aggregation. + + """ + + def __init__(self, alias=None): + self.alias = alias + + def _to_pb(self): + """ + Convert this instance to the protobuf representation + """ + aggregation_pb = query_pb2.AggregationQuery.Aggregation() + aggregation_pb.count = query_pb2.AggregationQuery.Aggregation.Count() + aggregation_pb.alias = self.alias + return aggregation_pb + + +class AggregationResult(object): + """ + A class representing result from Aggregation Query + + :type alias: str + :param alias: The alias for the aggregation. + + :type value: int + :param value: The resulting value from the aggregation. + + """ + + def __init__(self, alias, value): + self.alias = alias + self.value = value + + def __repr__(self): + return "" % (self.alias, self.value) + + +class AggregationQuery(object): + """An Aggregation query against the Cloud Datastore. + + This class serves as an abstraction for creating aggregations over query + in the Cloud Datastore. + + :type client: :class:`google.cloud.datastore.client.Client` + :param client: The client used to connect to Datastore. + + :type query: :class:`google.cloud.datastore.query.Query` + :param query: The query used for aggregations. + """ + + def __init__( + self, + client, + query, + ): + + self._client = client + self._nested_query = query + self._aggregations = [] + + @property + def project(self): + """Get the project for this AggregationQuery. + + :rtype: str + :returns: The project for the query. + """ + return self._nested_query._project or self._client.project + + @property + def namespace(self): + """The nested query's namespace + + :rtype: str or None + :returns: the namespace assigned to this query + """ + return self._nested_query._namespace or self._client.namespace + + def _to_pb(self): + """ + Returns the protobuf representation for this Aggregation Query + """ + pb = query_pb2.AggregationQuery() + pb.nested_query = _pb_from_query(self._nested_query) + for aggregation in self._aggregations: + aggregation_pb = aggregation._to_pb() + pb.aggregations.append(aggregation_pb) + return pb + + def count(self, alias=None): + """ + Adds a count over the nested query + + :type alias: str + :param alias: (Optional) The alias for the count + """ + count_aggregation = CountAggregation(alias=alias) + self._aggregations.append(count_aggregation) + return self + + def add_aggregation(self, aggregation): + """ + Adds an aggregation operation to the nested query + + :type aggregation: :class:`google.cloud.datastore.aggregation.BaseAggregation` + :param aggregation: An aggregation operation, e.g. a CountAggregation + """ + self._aggregations.append(aggregation) + + def add_aggregations(self, aggregations): + """ + Adds a list of aggregations to the nested query + :type aggregations: list + :param aggregations: a list of aggregation operations + """ + self._aggregations.extend(aggregations) + + def fetch( + self, + client=None, + eventual=False, + retry=None, + timeout=None, + read_time=None, + ): + """Execute the Aggregation Query; return an iterator for the aggregation results. + + For example: + + .. testsetup:: aggregation-query-fetch + + import uuid + + from google.cloud import datastore + + unique = str(uuid.uuid4())[0:8] + client = datastore.Client(namespace='ns{}'.format(unique)) + + + .. doctest:: aggregation-query-fetch + + >>> andy = datastore.Entity(client.key('Person', 1234)) + >>> andy['name'] = 'Andy' + >>> sally = datastore.Entity(client.key('Person', 2345)) + >>> sally['name'] = 'Sally' + >>> bobby = datastore.Entity(client.key('Person', 3456)) + >>> bobby['name'] = 'Bobby' + >>> client.put_multi([andy, sally, bobby]) + >>> query = client.query(kind='Andy') + >>> aggregation_query = client.aggregation_query(query) + >>> result = aggregation_query.count(alias="total").fetch() + >>> result + + + .. testcleanup:: aggregation-query-fetch + + client.delete(andy.key) + + :type client: :class:`google.cloud.datastore.client.Client` + :param client: (Optional) client used to connect to datastore. + If not supplied, uses the query's value. + + :type eventual: bool + :param eventual: (Optional) Defaults to strongly consistent (False). + Setting True will use eventual consistency, + but cannot be used inside a transaction or + with read_time, otherwise will raise + ValueError. + + :type retry: :class:`google.api_core.retry.Retry` + :param retry: + A retry object used to retry requests. If ``None`` is specified, + requests will be retried using a default configuration. + + :type timeout: float + :param timeout: + Time, in seconds, to wait for the request to complete. + Note that if ``retry`` is specified, the timeout applies + to each individual attempt. + + :type read_time: datetime + :param read_time: + (Optional) use read_time read consistency, cannot be used inside a + transaction or with eventual consistency, or will raise ValueError. + + :rtype: :class:`AggregationIterator` + :returns: The iterator for the aggregation query. + """ + if client is None: + client = self._client + + return AggregationResultIterator( + self, + client, + eventual=eventual, + retry=retry, + timeout=timeout, + read_time=read_time, + ) + + +class AggregationResultIterator(page_iterator.Iterator): + """Represent the state of a given execution of a Query. + + :type aggregation_query: :class:`~google.cloud.datastore.aggregation.AggregationQuery` + :param aggregation_query: AggregationQuery object holding permanent configuration (i.e. + things that don't change on with each page in + a results set). + + :type client: :class:`~google.cloud.datastore.client.Client` + :param client: The client used to make a request. + + :type eventual: bool + :param eventual: (Optional) Defaults to strongly consistent (False). + Setting True will use eventual consistency, + but cannot be used inside a transaction or + with read_time, otherwise will raise ValueError. + + :type retry: :class:`google.api_core.retry.Retry` + :param retry: + A retry object used to retry requests. If ``None`` is specified, + requests will be retried using a default configuration. + + :type timeout: float + :param timeout: + Time, in seconds, to wait for the request to complete. + Note that if ``retry`` is specified, the timeout applies + to each individual attempt. + + :type read_time: datetime + :param read_time: (Optional) Runs the query with read time consistency. + Cannot be used with eventual consistency or inside a + transaction, otherwise will raise ValueError. This feature is in private preview. + """ + + def __init__( + self, + aggregation_query, + client, + eventual=False, + retry=None, + timeout=None, + read_time=None, + ): + super(AggregationResultIterator, self).__init__( + client=client, + item_to_value=_item_to_aggregation_result, + ) + + self._aggregation_query = aggregation_query + self._eventual = eventual + self._retry = retry + self._timeout = timeout + self._read_time = read_time + # The attributes below will change over the life of the iterator. + self._more_results = True + + def _build_protobuf(self): + """Build a query protobuf. + + Relies on the current state of the iterator. + + :rtype: + :class:`.query_pb2.AggregationQuery.Aggregation` + :returns: The aggregation_query protobuf object for the current + state of the iterator. + """ + pb = self._aggregation_query._to_pb() + return pb + + def _process_query_results(self, response_pb): + """Process the response from a datastore query. + + :type response_pb: :class:`.datastore_pb2.RunQueryResponse` + :param response_pb: The protobuf response from a ``runQuery`` request. + + :rtype: iterable + :returns: The next page of entity results. + :raises ValueError: If ``more_results`` is an unexpected value. + """ + + if response_pb.batch.more_results == _NOT_FINISHED: + self._more_results = True + elif response_pb.batch.more_results in _FINISHED: + self._more_results = False + else: + raise ValueError("Unexpected value returned for `more_results`.") + + return [ + result.aggregate_properties + for result in response_pb.batch.aggregation_results + ] + + def _next_page(self): + """Get the next page in the iterator. + + :rtype: :class:`~google.cloud.iterator.Page` + :returns: The next page in the iterator (or :data:`None` if + there are no pages left). + """ + if not self._more_results: + return None + + query_pb = self._build_protobuf() + transaction = self.client.current_transaction + if transaction is None: + transaction_id = None + else: + transaction_id = transaction.id + read_options = helpers.get_read_options( + self._eventual, transaction_id, self._read_time + ) + + partition_id = entity_pb2.PartitionId( + project_id=self._aggregation_query.project, + namespace_id=self._aggregation_query.namespace, + ) + + kwargs = {} + + if self._retry is not None: + kwargs["retry"] = self._retry + + if self._timeout is not None: + kwargs["timeout"] = self._timeout + + response_pb = self.client._datastore_api.run_aggregation_query( + request={ + "project_id": self._aggregation_query.project, + "partition_id": partition_id, + "read_options": read_options, + "aggregation_query": query_pb, + }, + **kwargs, + ) + + while response_pb.batch.more_results == _NOT_FINISHED: + # We haven't finished processing. A likely reason is we haven't + # skipped all of the results yet. Don't return any results. + # Instead, rerun query, adjusting offsets. Datastore doesn't process + # more than 1000 skipped results in a query. + old_query_pb = query_pb + query_pb = query_pb2.AggregationQuery() + query_pb._pb.CopyFrom(old_query_pb._pb) # copy for testability + + response_pb = self.client._datastore_api.run_aggregation_query( + request={ + "project_id": self._aggregation_query.project, + "partition_id": partition_id, + "read_options": read_options, + "aggregation_query": query_pb, + }, + **kwargs, + ) + + item_pbs = self._process_query_results(response_pb) + return page_iterator.Page(self, item_pbs, self.item_to_value) + + +# pylint: disable=unused-argument +def _item_to_aggregation_result(iterator, pb): + """Convert a raw protobuf aggregation result to the native object. + + :type iterator: :class:`~google.api_core.page_iterator.Iterator` + :param iterator: The iterator that is currently in use. + + :type pb: + :class:`proto.marshal.collections.maps.MapComposite` + :param pb: The aggregation properties pb from the aggregation query result + + :rtype: :class:`google.cloud.datastore.aggregation.AggregationResult` + :returns: The list of AggregationResults + """ + results = [AggregationResult(alias=k, value=pb[k].integer_value) for k in pb.keys()] + return results diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 212ba1d4cbff..e90a3415c5ac 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -28,6 +28,8 @@ from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key from google.cloud.datastore.query import Query +from google.cloud.datastore.aggregation import AggregationQuery + from google.cloud.datastore.transaction import Transaction try: @@ -837,6 +839,86 @@ def do_something_with(entity): kwargs["namespace"] = self.namespace return Query(self, **kwargs) + def aggregation_query(self, query): + """Proxy to :class:`google.cloud.datastore.aggregation.AggregationQuery`. + + Using aggregation_query to count over a query: + + .. testsetup:: aggregation_query + + import uuid + + from google.cloud import datastore + from google.cloud.datastore.aggregation import CountAggregation + + unique = str(uuid.uuid4())[0:8] + client = datastore.Client(namespace='ns{}'.format(unique)) + + def do_something_with(entity): + pass + + .. doctest:: aggregation_query + + >>> query = client.query(kind='MyKind') + >>> aggregation_query = client.aggregation_query(query) + >>> aggregation_query.count(alias='total') + + >>> aggregation_query.fetch() + + + Adding an aggregation to the aggregation_query + + .. doctest:: aggregation_query + + >>> query = client.query(kind='MyKind') + >>> aggregation_query.add_aggregation(CountAggregation(alias='total')) + >>> aggregation_query.fetch() + + + Adding multiple aggregations to the aggregation_query + + .. doctest:: aggregation_query + + >>> query = client.query(kind='MyKind') + >>> total_count = CountAggregation(alias='total') + >>> all_count = CountAggregation(alias='all') + >>> aggregation_query.add_aggregations([total_count, all_count]) + >>> aggregation_query.fetch() + + + + Using the aggregation_query iterator + + .. doctest:: aggregation_query + + >>> query = client.query(kind='MyKind') + >>> aggregation_query = client.aggregation_query(query) + >>> aggregation_query.count(alias='total') + + >>> aggregation_query_iter = aggregation_query.fetch() + >>> for aggregation_result in aggregation_query_iter: + ... do_something_with(aggregation_result) + + or manually page through results + + .. doctest:: aggregation_query + + >>> aggregation_query_iter = aggregation_query.fetch() + >>> pages = aggregation_query_iter.pages + >>> + >>> first_page = next(pages) + >>> first_page_entities = list(first_page) + >>> aggregation_query_iter.next_page_token is None + True + + :param kwargs: Parameters for initializing and instance of + :class:`~google.cloud.datastore.aggregation.AggregationQuery`. + + :rtype: :class:`~google.cloud.datastore.aggregation.AggregationQuery` + :returns: An AggregationQuery object. + """ + return AggregationQuery(self, query) + def reserve_ids_sequential(self, complete_key, num_ids, retry=None, timeout=None): """Reserve a list of IDs sequentially from a complete key. diff --git a/packages/google-cloud-datastore/tests/system/test_aggregation_query.py b/packages/google-cloud-datastore/tests/system/test_aggregation_query.py new file mode 100644 index 000000000000..3e5120da9bdf --- /dev/null +++ b/packages/google-cloud-datastore/tests/system/test_aggregation_query.py @@ -0,0 +1,214 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from google.api_core import exceptions +from test_utils.retry import RetryErrors + +from .utils import clear_datastore +from .utils import populate_datastore +from . import _helpers + + +retry_503 = RetryErrors(exceptions.ServiceUnavailable) + + +def _make_iterator(aggregation_query, **kw): + # Do retry for errors raised during initial API call + return retry_503(aggregation_query.fetch)(**kw) + + +def _pull_iterator(aggregation_query, **kw): + return list(_make_iterator(aggregation_query, **kw)) + + +def _do_fetch(aggregation_query, **kw): + # Do retry for errors raised during iteration + return retry_503(_pull_iterator)(aggregation_query, **kw) + + +@pytest.fixture(scope="session") +def aggregation_query_client(datastore_client): + return _helpers.clone_client(datastore_client, namespace=None) + + +@pytest.fixture(scope="session") +def ancestor_key(aggregation_query_client, in_emulator): + + # In the emulator, re-populating the datastore is cheap. + if in_emulator: + populate_datastore.add_characters(client=aggregation_query_client) + + ancestor_key = aggregation_query_client.key(*populate_datastore.ANCESTOR) + + yield ancestor_key + + # In the emulator, destroy the query entities. + if in_emulator: + clear_datastore.remove_all_entities(client=aggregation_query_client) + + +def _make_query(aggregation_query_client, ancestor_key): + return aggregation_query_client.query(kind="Character", ancestor=ancestor_key) + + +@pytest.fixture(scope="function") +def nested_query(aggregation_query_client, ancestor_key): + return _make_query(aggregation_query_client, ancestor_key) + + +def test_aggregation_query_default(aggregation_query_client, nested_query): + query = nested_query + + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.count() + result = _do_fetch(aggregation_query) + assert len(result) == 1 + for r in result[0]: + assert r.alias == "property_1" + assert r.value == 8 + + +def test_aggregation_query_with_alias(aggregation_query_client, nested_query): + query = nested_query + + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.count(alias="total") + result = _do_fetch(aggregation_query) + assert len(result) == 1 + for r in result[0]: + assert r.alias == "total" + assert r.value > 0 + + +def test_aggregation_query_multiple_aggregations( + aggregation_query_client, nested_query +): + query = nested_query + + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.count(alias="total") + aggregation_query.count(alias="all") + result = _do_fetch(aggregation_query) + assert len(result) == 1 + for r in result[0]: + assert r.alias in ["all", "total"] + assert r.value > 0 + + +def test_aggregation_query_add_aggregation(aggregation_query_client, nested_query): + from google.cloud.datastore.aggregation import CountAggregation + + query = nested_query + + aggregation_query = aggregation_query_client.aggregation_query(query) + count_aggregation = CountAggregation(alias="total") + aggregation_query.add_aggregation(count_aggregation) + result = _do_fetch(aggregation_query) + assert len(result) == 1 + for r in result[0]: + assert r.alias == "total" + assert r.value > 0 + + +def test_aggregation_query_add_aggregations(aggregation_query_client, nested_query): + from google.cloud.datastore.aggregation import CountAggregation + + query = nested_query + + aggregation_query = aggregation_query_client.aggregation_query(query) + count_aggregation_1 = CountAggregation(alias="total") + count_aggregation_2 = CountAggregation(alias="all") + aggregation_query.add_aggregations([count_aggregation_1, count_aggregation_2]) + result = _do_fetch(aggregation_query) + assert len(result) == 1 + for r in result[0]: + assert r.alias in ["total", "all"] + assert r.value > 0 + + +def test_aggregation_query_add_aggregations_duplicated_alias( + aggregation_query_client, nested_query +): + from google.cloud.datastore.aggregation import CountAggregation + from google.api_core.exceptions import BadRequest + + query = nested_query + + aggregation_query = aggregation_query_client.aggregation_query(query) + count_aggregation_1 = CountAggregation(alias="total") + count_aggregation_2 = CountAggregation(alias="total") + aggregation_query.add_aggregations([count_aggregation_1, count_aggregation_2]) + with pytest.raises(BadRequest): + _do_fetch(aggregation_query) + + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.add_aggregation(count_aggregation_1) + aggregation_query.add_aggregation(count_aggregation_2) + with pytest.raises(BadRequest): + _do_fetch(aggregation_query) + + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.count(alias="total") + aggregation_query.count(alias="total") + with pytest.raises(BadRequest): + _do_fetch(aggregation_query) + + +def test_aggregation_query_with_nested_query_filtered( + aggregation_query_client, nested_query +): + query = nested_query + + query.add_filter("appearances", ">=", 20) + expected_matches = 6 + + # We expect 6, but allow the query to get 1 extra. + entities = _do_fetch(query, limit=expected_matches + 1) + + assert len(entities) == expected_matches + + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.count(alias="total") + result = _do_fetch(aggregation_query) + assert len(result) == 1 + + for r in result[0]: + assert r.alias == "total" + assert r.value == 6 + + +def test_aggregation_query_with_nested_query_multiple_filters( + aggregation_query_client, nested_query +): + query = nested_query + + query.add_filter("appearances", ">=", 26) + query = query.add_filter("family", "=", "Stark") + expected_matches = 4 + + # We expect 4, but allow the query to get 1 extra. + entities = _do_fetch(query, limit=expected_matches + 1) + + assert len(entities) == expected_matches + + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.count(alias="total") + result = _do_fetch(aggregation_query) + assert len(result) == 1 + + for r in result[0]: + assert r.alias == "total" + assert r.value == 4 diff --git a/packages/google-cloud-datastore/tests/unit/test__http.py b/packages/google-cloud-datastore/tests/unit/test__http.py index a03397d5098e..f9e0a29f7bc9 100644 --- a/packages/google-cloud-datastore/tests/unit/test__http.py +++ b/packages/google-cloud-datastore/tests/unit/test__http.py @@ -557,6 +557,98 @@ def test_api_run_query_w_namespace_nonempty_result(): _run_query_helper(namespace=namespace, found=1) +def _run_aggregation_query_helper( + transaction=None, + retry=None, + timeout=None, +): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 + from google.cloud.datastore_v1.types import aggregation_result + + project = "PROJECT" + kind = "Nonesuch" + query_pb = query_pb2.Query(kind=[query_pb2.KindExpression(name=kind)]) + + aggregation_query_pb = query_pb2.AggregationQuery() + aggregation_query_pb.nested_query = query_pb + count_aggregation = query_pb2.AggregationQuery.Aggregation() + count_aggregation.alias = "total" + aggregation_query_pb.aggregations.append(count_aggregation) + partition_kw = {"project_id": project} + + partition_id = entity_pb2.PartitionId(**partition_kw) + + options_kw = {} + + if transaction is not None: + options_kw["transaction"] = transaction + read_options = datastore_pb2.ReadOptions(**options_kw) + + batch_kw = { + "more_results": query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS, + } + rsp_pb = datastore_pb2.RunAggregationQueryResponse( + batch=aggregation_result.AggregationResultBatch(**batch_kw) + ) + + http = _make_requests_session( + [_make_response(content=rsp_pb._pb.SerializeToString())] + ) + client_info = _make_client_info() + client = mock.Mock( + _http=http, + _base_url="test.invalid", + _client_info=client_info, + spec=["_http", "_base_url", "_client_info"], + ) + ds_api = _make_http_datastore_api(client) + request = { + "project_id": project, + "partition_id": partition_id, + "read_options": read_options, + "aggregation_query": aggregation_query_pb, + } + kwargs = _retry_timeout_kw(retry, timeout, http) + + response = ds_api.run_aggregation_query(request=request, **kwargs) + + assert response == rsp_pb._pb + + uri = _build_expected_url(client._base_url, project, "runAggregationQuery") + request = _verify_protobuf_call( + http, + uri, + datastore_pb2.RunAggregationQueryRequest(), + retry=retry, + timeout=timeout, + ) + + assert request.partition_id == partition_id._pb + assert request.aggregation_query == aggregation_query_pb._pb + assert request.read_options == read_options._pb + + +def test_api_run_aggregation_query_simple(): + _run_aggregation_query_helper() + + +def test_api_run_aggregation_query_w_retry(): + retry = mock.MagicMock() + _run_aggregation_query_helper(retry=retry) + + +def test_api_run_aggregation_query_w_timeout(): + timeout = 5.0 + _run_aggregation_query_helper(timeout=timeout) + + +def test_api_run_aggregation_query_w_transaction(): + transaction = b"TRANSACTION" + _run_aggregation_query_helper(transaction=transaction) + + def _begin_transaction_helper(options=None, retry=None, timeout=None): from google.cloud.datastore_v1.types import datastore as datastore_pb2 diff --git a/packages/google-cloud-datastore/tests/unit/test_aggregation.py b/packages/google-cloud-datastore/tests/unit/test_aggregation.py new file mode 100644 index 000000000000..8b28a908facc --- /dev/null +++ b/packages/google-cloud-datastore/tests/unit/test_aggregation.py @@ -0,0 +1,415 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock +import pytest + +from google.cloud.datastore.aggregation import CountAggregation, AggregationQuery + +from tests.unit.test_query import _make_query, _make_client + +_PROJECT = "PROJECT" + + +def test_count_aggregation_to_pb(): + from google.cloud.datastore_v1.types import query as query_pb2 + + count_aggregation = CountAggregation(alias="total") + + expected_aggregation_query_pb = query_pb2.AggregationQuery.Aggregation() + expected_aggregation_query_pb.count = query_pb2.AggregationQuery.Aggregation.Count() + expected_aggregation_query_pb.alias = count_aggregation.alias + assert count_aggregation._to_pb() == expected_aggregation_query_pb + + +@pytest.fixture +def client(): + return _make_client() + + +def test_pb_over_query(client): + from google.cloud.datastore.query import _pb_from_query + + query = _make_query(client) + aggregation_query = _make_aggregation_query(client=client, query=query) + pb = aggregation_query._to_pb() + assert pb.nested_query == _pb_from_query(query) + assert pb.aggregations == [] + + +def test_pb_over_query_with_count(client): + from google.cloud.datastore.query import _pb_from_query + + query = _make_query(client) + aggregation_query = _make_aggregation_query(client=client, query=query) + + aggregation_query.count(alias="total") + pb = aggregation_query._to_pb() + assert pb.nested_query == _pb_from_query(query) + assert len(pb.aggregations) == 1 + assert pb.aggregations[0] == CountAggregation(alias="total")._to_pb() + + +def test_pb_over_query_with_add_aggregation(client): + from google.cloud.datastore.query import _pb_from_query + + query = _make_query(client) + aggregation_query = _make_aggregation_query(client=client, query=query) + + aggregation_query.add_aggregation(CountAggregation(alias="total")) + pb = aggregation_query._to_pb() + assert pb.nested_query == _pb_from_query(query) + assert len(pb.aggregations) == 1 + assert pb.aggregations[0] == CountAggregation(alias="total")._to_pb() + + +def test_pb_over_query_with_add_aggregations(client): + from google.cloud.datastore.query import _pb_from_query + + aggregations = [ + CountAggregation(alias="total"), + CountAggregation(alias="all"), + ] + + query = _make_query(client) + aggregation_query = _make_aggregation_query(client=client, query=query) + + aggregation_query.add_aggregations(aggregations) + pb = aggregation_query._to_pb() + assert pb.nested_query == _pb_from_query(query) + assert len(pb.aggregations) == 2 + assert pb.aggregations[0] == CountAggregation(alias="total")._to_pb() + assert pb.aggregations[1] == CountAggregation(alias="all")._to_pb() + + +def test_query_fetch_defaults_w_client_attr(client): + from google.cloud.datastore.aggregation import AggregationResultIterator + + query = _make_query(client) + aggregation_query = _make_aggregation_query(client=client, query=query) + iterator = aggregation_query.fetch() + + assert isinstance(iterator, AggregationResultIterator) + assert iterator._aggregation_query is aggregation_query + assert iterator.client is client + assert iterator._retry is None + assert iterator._timeout is None + + +def test_query_fetch_w_explicit_client_w_retry_w_timeout(client): + from google.cloud.datastore.aggregation import AggregationResultIterator + + other_client = _make_client() + query = _make_query(client) + aggregation_query = _make_aggregation_query(client=client, query=query) + retry = mock.Mock() + timeout = 100000 + + iterator = aggregation_query.fetch( + client=other_client, retry=retry, timeout=timeout + ) + + assert isinstance(iterator, AggregationResultIterator) + assert iterator._aggregation_query is aggregation_query + assert iterator.client is other_client + assert iterator._retry == retry + assert iterator._timeout == timeout + + +def test_iterator_constructor_defaults(): + query = object() + client = object() + aggregation_query = AggregationQuery(client=client, query=query) + iterator = _make_aggregation_iterator(aggregation_query, client) + + assert not iterator._started + assert iterator.client is client + assert iterator.page_number == 0 + assert iterator.num_results == 0 + assert iterator._aggregation_query is aggregation_query + assert iterator._more_results + assert iterator._retry is None + assert iterator._timeout is None + + +def test_iterator_constructor_explicit(): + query = object() + client = object() + aggregation_query = AggregationQuery(client=client, query=query) + retry = mock.Mock() + timeout = 100000 + + iterator = _make_aggregation_iterator( + aggregation_query, + client, + retry=retry, + timeout=timeout, + ) + + assert not iterator._started + assert iterator.client is client + assert iterator.page_number == 0 + assert iterator.num_results == 0 + assert iterator._aggregation_query is aggregation_query + assert iterator._more_results + assert iterator._retry == retry + assert iterator._timeout == timeout + + +def test_iterator__build_protobuf_empty(): + from google.cloud.datastore_v1.types import query as query_pb2 + + client = _Client(None) + query = _make_query(client) + aggregation_query = AggregationQuery(client=client, query=query) + iterator = _make_aggregation_iterator(aggregation_query, client) + + pb = iterator._build_protobuf() + expected_pb = query_pb2.AggregationQuery() + expected_pb.nested_query = query_pb2.Query() + assert pb == expected_pb + + +def test_iterator__build_protobuf_all_values(): + from google.cloud.datastore_v1.types import query as query_pb2 + + client = _Client(None) + query = _make_query(client) + aggregation_query = AggregationQuery(client=client, query=query) + + iterator = _make_aggregation_iterator(aggregation_query, client) + iterator.num_results = 4 + + pb = iterator._build_protobuf() + expected_pb = query_pb2.AggregationQuery() + expected_pb.nested_query = query_pb2.Query() + assert pb == expected_pb + + +def test_iterator__process_query_results(): + from google.cloud.datastore_v1.types import query as query_pb2 + from google.cloud.datastore.aggregation import AggregationResult + + iterator = _make_aggregation_iterator(None, None) + + aggregation_pbs = [AggregationResult(alias="total", value=1)] + + more_results_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED + response_pb = _make_aggregation_query_response(aggregation_pbs, more_results_enum) + result = iterator._process_query_results(response_pb) + assert result == [ + r.aggregate_properties for r in response_pb.batch.aggregation_results + ] + assert iterator._more_results + + +def test_iterator__process_query_results_finished_result(): + from google.cloud.datastore_v1.types import query as query_pb2 + from google.cloud.datastore.aggregation import AggregationResult + + iterator = _make_aggregation_iterator(None, None) + + aggregation_pbs = [AggregationResult(alias="total", value=1)] + + more_results_enum = query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS + response_pb = _make_aggregation_query_response(aggregation_pbs, more_results_enum) + result = iterator._process_query_results(response_pb) + assert result == [ + r.aggregate_properties for r in response_pb.batch.aggregation_results + ] + assert iterator._more_results is False + + +def test_iterator__process_query_results_unexpected_result(): + from google.cloud.datastore_v1.types import query as query_pb2 + from google.cloud.datastore.aggregation import AggregationResult + + iterator = _make_aggregation_iterator(None, None) + + aggregation_pbs = [AggregationResult(alias="total", value=1)] + + more_results_enum = ( + query_pb2.QueryResultBatch.MoreResultsType.MORE_RESULTS_TYPE_UNSPECIFIED + ) + response_pb = _make_aggregation_query_response(aggregation_pbs, more_results_enum) + with pytest.raises(ValueError): + iterator._process_query_results(response_pb) + + +def test_aggregation_iterator__next_page(): + _next_page_helper() + + +def test_iterator__next_page_w_retry(): + retry = mock.Mock() + _next_page_helper(retry=retry) + + +def test_iterator__next_page_w_timeout(): + _next_page_helper(timeout=100000) + + +def test_iterator__next_page_in_transaction(): + txn_id = b"1xo1md\xe2\x98\x83" + _next_page_helper(txn_id=txn_id) + + +def test_iterator__next_page_no_more(): + from google.cloud.datastore.query import Query + + ds_api = _make_datastore_api_for_aggregation() + client = _Client(None, datastore_api=ds_api) + query = Query(client) + + iterator = _make_aggregation_iterator(query, client) + iterator._more_results = False + page = iterator._next_page() + assert page is None + ds_api.run_aggregation_query.assert_not_called() + + +def _next_page_helper(txn_id=None, retry=None, timeout=None): + from google.api_core import page_iterator + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore_v1.types import query as query_pb2 + from google.cloud.datastore.aggregation import AggregationResult + + more_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED + aggregation_pbs = [AggregationResult(alias="total", value=1)] + + result_1 = _make_aggregation_query_response([], more_enum) + result_2 = _make_aggregation_query_response( + aggregation_pbs, query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS + ) + + project = "prujekt" + ds_api = _make_datastore_api_for_aggregation(result_1, result_2) + if txn_id is None: + client = _Client(project, datastore_api=ds_api) + else: + transaction = mock.Mock(id=txn_id, spec=["id"]) + client = _Client(project, datastore_api=ds_api, transaction=transaction) + + query = _make_query(client) + kwargs = {} + + if retry is not None: + kwargs["retry"] = retry + + if timeout is not None: + kwargs["timeout"] = timeout + + it_kwargs = kwargs.copy() # so it doesn't get overwritten later + + aggregation_query = AggregationQuery(client=client, query=query) + + iterator = _make_aggregation_iterator(aggregation_query, client, **it_kwargs) + page = iterator._next_page() + + assert isinstance(page, page_iterator.Page) + assert page._parent is iterator + + partition_id = entity_pb2.PartitionId(project_id=project) + if txn_id is not None: + read_options = datastore_pb2.ReadOptions(transaction=txn_id) + else: + read_options = datastore_pb2.ReadOptions() + + aggregation_query = AggregationQuery(client=client, query=query) + assert ds_api.run_aggregation_query.call_count == 2 + expected_call = mock.call( + request={ + "project_id": project, + "partition_id": partition_id, + "read_options": read_options, + "aggregation_query": aggregation_query._to_pb(), + }, + **kwargs + ) + assert ds_api.run_aggregation_query.call_args_list == ( + [expected_call, expected_call] + ) + + +def test__item_to_aggregation_result(): + from google.cloud.datastore.aggregation import _item_to_aggregation_result + from google.cloud.datastore.aggregation import AggregationResult + + with mock.patch( + "proto.marshal.collections.maps.MapComposite" + ) as map_composite_mock: + map_composite_mock.keys.return_value = {"total": {"integer_value": 1}} + + result = _item_to_aggregation_result(None, map_composite_mock) + + assert len(result) == 1 + assert type(result[0]) == AggregationResult + + assert result[0].alias == "total" + assert result[0].value == map_composite_mock.__getitem__().integer_value + + +class _Client(object): + def __init__(self, project, datastore_api=None, namespace=None, transaction=None): + self.project = project + self._datastore_api = datastore_api + self.namespace = namespace + self._transaction = transaction + + @property + def current_transaction(self): + return self._transaction + + +def _make_aggregation_query(*args, **kw): + from google.cloud.datastore.aggregation import AggregationQuery + + return AggregationQuery(*args, **kw) + + +def _make_aggregation_iterator(*args, **kw): + from google.cloud.datastore.aggregation import AggregationResultIterator + + return AggregationResultIterator(*args, **kw) + + +def _make_aggregation_query_response(aggregation_pbs, more_results_enum): + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore_v1.types import aggregation_result + + aggregation_results = [] + for aggr in aggregation_pbs: + result = aggregation_result.AggregationResult() + result.aggregate_properties.alias = aggr.alias + result.aggregate_properties.value = aggr.value + aggregation_results.append(result) + + return datastore_pb2.RunAggregationQueryResponse( + batch=aggregation_result.AggregationResultBatch( + aggregation_results=aggregation_results, + more_results=more_results_enum, + ) + ) + + +def _make_datastore_api_for_aggregation(*results): + if len(results) == 0: + run_aggregation_query = mock.Mock(return_value=None, spec=[]) + else: + run_aggregation_query = mock.Mock(side_effect=results, spec=[]) + + return mock.Mock( + run_aggregation_query=run_aggregation_query, spec=["run_aggregation_query"] + ) diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 2a15677a9646..3e35f74e4d17 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -1515,6 +1515,42 @@ def test_client_query_w_namespace_collision(): ) +def test_client_aggregation_query_w_defaults(): + creds = _make_credentials() + client = _make_client(credentials=creds) + query = client.query() + patch = mock.patch( + "google.cloud.datastore.client.AggregationQuery", spec=["__call__"] + ) + with patch as mock_klass: + aggregation_query = client.aggregation_query(query=query) + assert aggregation_query is mock_klass.return_value + mock_klass.assert_called_once_with(client, query) + + +def test_client_aggregation_query_w_namespace(): + namespace = object() + + creds = _make_credentials() + client = _make_client(namespace=namespace, credentials=creds) + query = client.query() + + aggregation_query = client.aggregation_query(query=query) + assert aggregation_query.namespace == namespace + + +def test_client_aggregation_query_w_namespace_collision(): + namespace1 = object() + namespace2 = object() + + creds = _make_credentials() + client = _make_client(namespace=namespace1, credentials=creds) + query = client.query(namespace=namespace2) + + aggregation_query = client.aggregation_query(query=query) + assert aggregation_query.namespace == namespace2 + + def test_client_reserve_ids_multi_w_partial_key(): incomplete_key = _Key(_Key.kind, None) creds = _make_credentials() From 72652aeaab710a579412e8a276bb9b598660dbed Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 1 Nov 2022 12:22:42 -0700 Subject: [PATCH 466/611] chore(main): release 2.10.0 (#380) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index b609fb19f13b..d4e6bd77eea3 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.10.0](https://github.com/googleapis/python-datastore/compare/v2.9.0...v2.10.0) (2022-11-01) + + +### Features + +* Support the Count aggregation query ([#368](https://github.com/googleapis/python-datastore/issues/368)) ([b400a9a](https://github.com/googleapis/python-datastore/commit/b400a9ac0d8f0c0da22fe92c2c229e2e90e21007)) + ## [2.9.0](https://github.com/googleapis/python-datastore/compare/v2.8.2...v2.9.0) (2022-10-18) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index b2a8c5535e20..13e710fccca2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.9.0" +__version__ = "2.10.0" From 646e55df502290e11dd24780a814ccd4ffb787b5 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 9 Nov 2022 20:53:05 +0100 Subject: [PATCH 467/611] chore(deps): update dependency google-cloud-datastore to v2.10.0 (#381) --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index a3caccb020d5..bc05d8eea4a4 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.9.0 \ No newline at end of file +google-cloud-datastore==2.10.0 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index 3918f942b816..3d3dda713fcb 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.9.0 +google-cloud-datastore==2.10.0 From 2f745e1271787f0a056d16bd717b28324289a589 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 17 Nov 2022 11:51:40 -0800 Subject: [PATCH 468/611] chore(python): update release script dependencies (#386) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): update release script dependencies Source-Link: https://github.com/googleapis/synthtool/commit/25083af347468dd5f90f69627420f7d452b6c50e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e6cbd61f1838d9ff6a31436dfc13717f372a7482a82fc1863ca954ec47bff8c8 * Remove unneeded replacement * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Replace DEFAULT_PYTHON_VERSION with "3.9" * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Mariatta Wijaya --- .../.github/.OwlBot.lock.yaml | 2 +- .../.github/workflows/docs.yml | 4 +- .../.github/workflows/lint.yml | 2 +- .../.github/workflows/unittest.yml | 2 +- .../.kokoro/docker/docs/Dockerfile | 12 +- .../.kokoro/requirements.in | 4 +- .../.kokoro/requirements.txt | 354 ++++++++++-------- packages/google-cloud-datastore/noxfile.py | 17 +- packages/google-cloud-datastore/owlbot.py | 6 +- 9 files changed, 222 insertions(+), 181 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 3815c983cb16..3f1ccc085ef7 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 + digest: sha256:e6cbd61f1838d9ff6a31436dfc13717f372a7482a82fc1863ca954ec47bff8c8 diff --git a/packages/google-cloud-datastore/.github/workflows/docs.yml b/packages/google-cloud-datastore/.github/workflows/docs.yml index 7092a139aed3..e97d89e484c9 100644 --- a/packages/google-cloud-datastore/.github/workflows/docs.yml +++ b/packages/google-cloud-datastore/.github/workflows/docs.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel @@ -28,7 +28,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/google-cloud-datastore/.github/workflows/lint.yml b/packages/google-cloud-datastore/.github/workflows/lint.yml index d2aee5b7d8ec..16d5a9e90f6d 100644 --- a/packages/google-cloud-datastore/.github/workflows/lint.yml +++ b/packages/google-cloud-datastore/.github/workflows/lint.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.8" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/google-cloud-datastore/.github/workflows/unittest.yml b/packages/google-cloud-datastore/.github/workflows/unittest.yml index 87ade4d54362..23000c05d9d8 100644 --- a/packages/google-cloud-datastore/.github/workflows/unittest.yml +++ b/packages/google-cloud-datastore/.github/workflows/unittest.yml @@ -41,7 +41,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.8" - name: Install coverage run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile index 238b87b9d1c9..f8137d0ae497 100644 --- a/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile @@ -60,16 +60,16 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.8.11 +###################### Install python 3.9.13 -# Download python 3.8.11 -RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz +# Download python 3.9.13 +RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz # Extract files -RUN tar -xvf Python-3.8.11.tgz +RUN tar -xvf Python-3.9.13.tgz -# Install python 3.8.11 -RUN ./Python-3.8.11/configure --enable-optimizations +# Install python 3.9.13 +RUN ./Python-3.9.13/configure --enable-optimizations RUN make altinstall ###################### Install pip diff --git a/packages/google-cloud-datastore/.kokoro/requirements.in b/packages/google-cloud-datastore/.kokoro/requirements.in index 7718391a34d7..cbd7e77f44db 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.in +++ b/packages/google-cloud-datastore/.kokoro/requirements.in @@ -5,4 +5,6 @@ typing-extensions twine wheel setuptools -nox \ No newline at end of file +nox +charset-normalizer<3 +click<8.1.0 diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index d15994bac93c..9c1b9be34e6b 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.6.15 \ - --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ - --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 +certifi==2022.9.24 \ + --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ + --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ @@ -93,11 +93,14 @@ cffi==1.15.1 \ charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via requests + # via + # -r requirements.in + # requests click==8.0.4 \ --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb # via + # -r requirements.in # gcp-docuploader # gcp-releasetool colorlog==6.7.0 \ @@ -110,29 +113,33 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==37.0.4 \ - --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ - --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ - --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ - --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ - --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ - --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ - --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ - --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ - --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ - --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ - --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ - --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ - --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ - --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ - --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ - --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ - --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ - --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ - --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ - --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ - --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ - --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 +cryptography==38.0.3 \ + --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ + --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ + --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ + --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ + --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ + --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ + --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ + --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ + --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ + --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ + --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ + --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ + --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ + --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ + --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ + --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ + --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ + --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ + --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ + --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ + --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ + --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ + --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ + --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ + --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ + --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 # via # gcp-releasetool # secretstorage @@ -148,23 +155,23 @@ filelock==3.8.0 \ --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 # via virtualenv -gcp-docuploader==0.6.3 \ - --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ - --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b +gcp-docuploader==0.6.4 \ + --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ + --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.8.7 \ - --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ - --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d +gcp-releasetool==1.10.0 \ + --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ + --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d # via -r requirements.in -google-api-core==2.8.2 \ - --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ - --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 +google-api-core==2.10.2 \ + --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ + --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e # via # google-cloud-core # google-cloud-storage -google-auth==2.11.0 \ - --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ - --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb +google-auth==2.14.1 \ + --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ + --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 # via # gcp-releasetool # google-api-core @@ -174,76 +181,102 @@ google-cloud-core==2.3.2 \ --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a # via google-cloud-storage -google-cloud-storage==2.5.0 \ - --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ - --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 +google-cloud-storage==2.6.0 \ + --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ + --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 # via gcp-docuploader -google-crc32c==1.3.0 \ - --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ - --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ - --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ - --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ - --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ - --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ - --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ - --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ - --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ - --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ - --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ - --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ - --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ - --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ - --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ - --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ - --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ - --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ - --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ - --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ - --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ - --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ - --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ - --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ - --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ - --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ - --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ - --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ - --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ - --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ - --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ - --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ - --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ - --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ - --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ - --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ - --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ - --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ - --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ - --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ - --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ - --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ - --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 +google-crc32c==1.5.0 \ + --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ + --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ + --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ + --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ + --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ + --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ + --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ + --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ + --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ + --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ + --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ + --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ + --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ + --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ + --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ + --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ + --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ + --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ + --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ + --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ + --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ + --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ + --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ + --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ + --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ + --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ + --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ + --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ + --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ + --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ + --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ + --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ + --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ + --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ + --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ + --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ + --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ + --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ + --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ + --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ + --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ + --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ + --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ + --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ + --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ + --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ + --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ + --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ + --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ + --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ + --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ + --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ + --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ + --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ + --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ + --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ + --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ + --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ + --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ + --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ + --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ + --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ + --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ + --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ + --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ + --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ + --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ + --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 # via google-resumable-media -google-resumable-media==2.3.3 \ - --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ - --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 +google-resumable-media==2.4.0 \ + --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ + --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f # via google-cloud-storage -googleapis-common-protos==1.56.4 \ - --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ - --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 +googleapis-common-protos==1.57.0 \ + --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ + --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c # via google-api-core -idna==3.3 \ - --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ - --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==4.12.0 \ - --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ - --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 +importlib-metadata==5.0.0 \ + --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ + --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 # via # -r requirements.in + # keyring # twine -jaraco-classes==3.2.2 \ - --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ - --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 +jaraco-classes==3.2.3 \ + --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ + --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -255,9 +288,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.9.0 \ - --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ - --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db +keyring==23.11.0 \ + --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ + --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 # via # gcp-releasetool # twine @@ -303,9 +336,9 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 -more-itertools==8.14.0 \ - --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ - --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 +more-itertools==9.0.0 \ + --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ + --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes nox==2022.8.7 \ --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ @@ -321,34 +354,33 @@ pkginfo==1.8.3 \ --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c # via twine -platformdirs==2.5.2 \ - --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ - --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 +platformdirs==2.5.4 \ + --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ + --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 # via virtualenv -protobuf==3.20.2 \ - --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ - --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ - --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ - --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ - --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ - --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ - --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ - --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ - --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ - --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ - --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ - --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ - --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ - --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ - --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ - --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ - --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ - --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ - --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ - --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ - --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ - --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ - --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 +protobuf==3.20.3 \ + --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ + --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ + --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ + --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ + --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ + --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ + --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ + --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ + --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ + --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ + --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ + --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ + --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ + --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ + --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ + --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ + --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ + --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ + --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ + --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ + --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ + --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee # via # gcp-docuploader # gcp-releasetool @@ -377,9 +409,9 @@ pygments==2.13.0 \ # via # readme-renderer # rich -pyjwt==2.4.0 \ - --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ - --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba +pyjwt==2.6.0 \ + --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ + --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 # via gcp-releasetool pyparsing==3.0.9 \ --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ @@ -392,9 +424,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.0 \ - --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ - --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 +readme-renderer==37.3 \ + --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ + --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine requests==2.28.1 \ --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ @@ -405,17 +437,17 @@ requests==2.28.1 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.9.1 \ - --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ - --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 +requests-toolbelt==0.10.1 \ + --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ + --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.5.1 \ - --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ - --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca +rich==12.6.0 \ + --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ + --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -437,9 +469,9 @@ twine==4.0.1 \ --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 # via -r requirements.in -typing-extensions==4.3.0 \ - --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ - --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 +typing-extensions==4.4.0 \ + --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ + --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in urllib3==1.26.12 \ --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ @@ -447,25 +479,25 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.4 \ - --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ - --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 +virtualenv==20.16.7 \ + --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ + --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 # via bleach -wheel==0.37.1 \ - --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ - --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 +wheel==0.38.4 \ + --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ + --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 # via -r requirements.in -zipp==3.8.1 \ - --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ - --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 +zipp==3.10.0 \ + --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ + --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.2.0 \ - --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ - --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 +setuptools==65.5.1 \ + --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ + --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f # via -r requirements.in diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index cce65bdd8066..9f36cbeb61a9 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -291,12 +291,16 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -313,7 +317,7 @@ def docs(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def doctests(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. @@ -324,13 +328,16 @@ def doctests(session): session.run("py.test", "tests/doctests.py") -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") session.install( - "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 530adcf2058c..47450e9e363a 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -179,11 +179,11 @@ def system(session, disable_grpc): assert 1 == s.replace( "noxfile.py", r"""\ -@nox.session\(python=DEFAULT_PYTHON_VERSION\) +@nox.session\(python="3.9"\) def docfx\(session\): """, """\ -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def doctests(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. @@ -194,7 +194,7 @@ def doctests(session): session.run("py.test", "tests/doctests.py") -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docfx(session): """, ) From ef155c7726b2e1aafcb5224892cfbe4dc27b4520 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 24 Nov 2022 16:34:30 -0800 Subject: [PATCH 469/611] chore(python): drop flake8-import-order in samples noxfile (#387) Source-Link: https://github.com/googleapis/synthtool/commit/6ed3a831cb9ff69ef8a504c353e098ec0192ad93 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3abfa0f1886adaf0b83f07cb117b24a639ea1cb9cffe56d43280b977033563eb Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 2 +- .../samples/snippets/noxfile.py | 26 +++---------------- .../snippets/schedule-export/noxfile.py | 26 +++---------------- 3 files changed, 7 insertions(+), 47 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 3f1ccc085ef7..bb21147e4c23 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e6cbd61f1838d9ff6a31436dfc13717f372a7482a82fc1863ca954ec47bff8c8 + digest: sha256:3abfa0f1886adaf0b83f07cb117b24a639ea1cb9cffe56d43280b977033563eb diff --git a/packages/google-cloud-datastore/samples/snippets/noxfile.py b/packages/google-cloud-datastore/samples/snippets/noxfile.py index 0398d72ff690..f5c32b22789b 100644 --- a/packages/google-cloud-datastore/samples/snippets/noxfile.py +++ b/packages/google-cloud-datastore/samples/snippets/noxfile.py @@ -18,7 +18,7 @@ import os from pathlib import Path import sys -from typing import Callable, Dict, List, Optional +from typing import Callable, Dict, Optional import nox @@ -109,22 +109,6 @@ def get_pytest_env_vars() -> Dict[str, str]: # -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - # Linting with flake8. # # We ignore the following rules: @@ -139,7 +123,6 @@ def _determine_local_import_names(start_dir: str) -> List[str]: "--show-source", "--builtin=gettext", "--max-complexity=20", - "--import-order-style=google", "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", "--max-line-length=88", @@ -149,14 +132,11 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8", "flake8-import-order") + session.install("flake8") else: - session.install("flake8", "flake8-import-order", "flake8-annotations") + session.install("flake8", "flake8-annotations") - local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), ".", ] session.run("flake8", *args) diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py b/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py index 0398d72ff690..f5c32b22789b 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py @@ -18,7 +18,7 @@ import os from pathlib import Path import sys -from typing import Callable, Dict, List, Optional +from typing import Callable, Dict, Optional import nox @@ -109,22 +109,6 @@ def get_pytest_env_vars() -> Dict[str, str]: # -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - # Linting with flake8. # # We ignore the following rules: @@ -139,7 +123,6 @@ def _determine_local_import_names(start_dir: str) -> List[str]: "--show-source", "--builtin=gettext", "--max-complexity=20", - "--import-order-style=google", "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", "--max-line-length=88", @@ -149,14 +132,11 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8", "flake8-import-order") + session.install("flake8") else: - session.install("flake8", "flake8-import-order", "flake8-annotations") + session.install("flake8", "flake8-annotations") - local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), ".", ] session.run("flake8", *args) From 639122e9644992b116bf5de2de5243a35ab909e7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 24 Nov 2022 16:45:59 -0800 Subject: [PATCH 470/611] chore: Update gapic-generator-python to v1.6.1 (#378) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.6.1 PiperOrigin-RevId: 488036204 Source-Link: https://github.com/googleapis/googleapis/commit/08f275f5c1c0d99056e1cb68376323414459ee19 Source-Link: https://github.com/googleapis/googleapis-gen/commit/555c0945e60649e38739ae64bc45719cdf72178f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTU1YzA5NDVlNjA2NDllMzg3MzlhZTY0YmM0NTcxOWNkZjcyMTc4ZiJ9 feat: new APIs added to reflect updates to the filestore service - Add ENTERPRISE Tier - Add snapshot APIs: RevertInstance, ListSnapshots, CreateSnapshot, DeleteSnapshot, UpdateSnapshot - Add multi-share APIs: ListShares, GetShare, CreateShare, DeleteShare, UpdateShare - Add ConnectMode to NetworkConfig (for Private Service Access support) - New status codes (SUSPENDED/SUSPENDING, REVERTING/RESUMING) - Add SuspensionReason (for KMS related suspension) - Add new fields to Instance information: max_capacity_gb, capacity_step_size_gb, max_share_count, capacity_gb, multi_share_enabled PiperOrigin-RevId: 487492758 Source-Link: https://github.com/googleapis/googleapis/commit/5be5981f50322cf0c7388595e0f31ac5d0693469 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ab0e217f560cc2c1afc11441c2eab6b6950efd2b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWIwZTIxN2Y1NjBjYzJjMWFmYzExNDQxYzJlYWI2YjY5NTBlZmQyYiJ9 chore: Update to gapic-generator-python 1.6.0 feat(python): Add typing to proto.Message based class attributes feat(python): Snippetgen handling of repeated enum field PiperOrigin-RevId: 487326846 Source-Link: https://github.com/googleapis/googleapis/commit/da380c77bb87ba0f752baf07605dd1db30e1f7e1 Source-Link: https://github.com/googleapis/googleapis-gen/commit/61ef5762ee6731a0cbbfea22fd0eecee51ab1c8e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjFlZjU3NjJlZTY3MzFhMGNiYmZlYTIyZmQwZWVjZWU1MWFiMWM4ZSJ9 chore: update to gapic-generator-python 1.5.0 feat: add support for `google.cloud..__version__` PiperOrigin-RevId: 484665853 Source-Link: https://github.com/googleapis/googleapis/commit/8eb249a19db926c2fbc4ecf1dc09c0e521a88b22 Source-Link: https://github.com/googleapis/googleapis-gen/commit/c8aa327b5f478865fc3fd91e3c2768e54e26ad44 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYzhhYTMyN2I1ZjQ3ODg2NWZjM2ZkOTFlM2MyNzY4ZTU0ZTI2YWQ0NCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Add gapic_version.py * Add release-please config and manifest. Update gapic_version and setup py. * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Rename release-please-config.json * add generated code for google/cloud/datastore_admin * configure release please to use manifest * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * configure release please to use manifest Co-authored-by: Owl Bot Co-authored-by: Mariatta Wijaya Co-authored-by: Anthonios Partheniou Co-authored-by: Mariatta Wijaya --- .../.github/release-please.yml | 1 + .../.release-please-manifest.json | 3 + .../google/cloud/datastore/gapic_version.py | 15 ++ .../google/cloud/datastore_admin/__init__.py | 76 +++++++++ .../cloud/datastore_admin/gapic_version.py | 16 ++ .../google/cloud/datastore_admin/py.typed | 2 + .../cloud/datastore_admin_v1/__init__.py | 4 + .../services/datastore_admin/async_client.py | 72 +++++---- .../services/datastore_admin/client.py | 66 ++++---- .../datastore_admin/transports/base.py | 2 +- .../datastore_admin/transports/grpc.py | 20 +-- .../transports/grpc_asyncio.py | 16 +- .../types/datastore_admin.py | 104 +++++++------ .../cloud/datastore_admin_v1/types/index.py | 20 +-- .../datastore_admin_v1/types/migration.py | 14 +- .../google/cloud/datastore_v1/__init__.py | 4 + .../services/datastore/async_client.py | 116 +++++++------- .../datastore_v1/services/datastore/client.py | 106 +++++++------ .../services/datastore/transports/base.py | 2 +- .../services/datastore/transports/grpc.py | 20 +-- .../datastore/transports/grpc_asyncio.py | 16 +- .../datastore_v1/types/aggregation_result.py | 14 +- .../cloud/datastore_v1/types/datastore.py | 146 +++++++++--------- .../google/cloud/datastore_v1/types/entity.py | 56 +++---- .../google/cloud/datastore_v1/types/query.py | 108 ++++++------- packages/google-cloud-datastore/owlbot.py | 3 +- .../release-please-config.json | 20 +++ packages/google-cloud-datastore/setup.py | 2 +- .../test_datastore_admin.py | 1 + 29 files changed, 623 insertions(+), 422 deletions(-) create mode 100644 packages/google-cloud-datastore/.release-please-manifest.json create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin/__init__.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin/py.typed create mode 100644 packages/google-cloud-datastore/release-please-config.json diff --git a/packages/google-cloud-datastore/.github/release-please.yml b/packages/google-cloud-datastore/.github/release-please.yml index 29601ad4692c..fe749ff6b15d 100644 --- a/packages/google-cloud-datastore/.github/release-please.yml +++ b/packages/google-cloud-datastore/.github/release-please.yml @@ -1,5 +1,6 @@ releaseType: python handleGHRelease: true +manifest: true # NOTE: this section is generated by synthtool.languages.python # See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py branches: diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json new file mode 100644 index 000000000000..a2cc302edb7b --- /dev/null +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -0,0 +1,3 @@ +{ + ".": "2.10.0" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py new file mode 100644 index 000000000000..60f4adfc31c5 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -0,0 +1,15 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +__version__ = "2.10.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/__init__.py new file mode 100644 index 000000000000..6e5cd0cd98b8 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/__init__.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.datastore_admin import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.datastore_admin_v1.services.datastore_admin.client import ( + DatastoreAdminClient, +) +from google.cloud.datastore_admin_v1.services.datastore_admin.async_client import ( + DatastoreAdminAsyncClient, +) + +from google.cloud.datastore_admin_v1.types.datastore_admin import CommonMetadata +from google.cloud.datastore_admin_v1.types.datastore_admin import CreateIndexRequest +from google.cloud.datastore_admin_v1.types.datastore_admin import ( + DatastoreFirestoreMigrationMetadata, +) +from google.cloud.datastore_admin_v1.types.datastore_admin import DeleteIndexRequest +from google.cloud.datastore_admin_v1.types.datastore_admin import EntityFilter +from google.cloud.datastore_admin_v1.types.datastore_admin import ExportEntitiesMetadata +from google.cloud.datastore_admin_v1.types.datastore_admin import ExportEntitiesRequest +from google.cloud.datastore_admin_v1.types.datastore_admin import ExportEntitiesResponse +from google.cloud.datastore_admin_v1.types.datastore_admin import GetIndexRequest +from google.cloud.datastore_admin_v1.types.datastore_admin import ImportEntitiesMetadata +from google.cloud.datastore_admin_v1.types.datastore_admin import ImportEntitiesRequest +from google.cloud.datastore_admin_v1.types.datastore_admin import IndexOperationMetadata +from google.cloud.datastore_admin_v1.types.datastore_admin import ListIndexesRequest +from google.cloud.datastore_admin_v1.types.datastore_admin import ListIndexesResponse +from google.cloud.datastore_admin_v1.types.datastore_admin import Progress +from google.cloud.datastore_admin_v1.types.datastore_admin import OperationType +from google.cloud.datastore_admin_v1.types.index import Index +from google.cloud.datastore_admin_v1.types.migration import MigrationProgressEvent +from google.cloud.datastore_admin_v1.types.migration import MigrationStateEvent +from google.cloud.datastore_admin_v1.types.migration import MigrationState +from google.cloud.datastore_admin_v1.types.migration import MigrationStep + +__all__ = ( + "DatastoreAdminClient", + "DatastoreAdminAsyncClient", + "CommonMetadata", + "CreateIndexRequest", + "DatastoreFirestoreMigrationMetadata", + "DeleteIndexRequest", + "EntityFilter", + "ExportEntitiesMetadata", + "ExportEntitiesRequest", + "ExportEntitiesResponse", + "GetIndexRequest", + "ImportEntitiesMetadata", + "ImportEntitiesRequest", + "IndexOperationMetadata", + "ListIndexesRequest", + "ListIndexesResponse", + "Progress", + "OperationType", + "Index", + "MigrationProgressEvent", + "MigrationStateEvent", + "MigrationState", + "MigrationStep", +) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py new file mode 100644 index 000000000000..00f0a8d0705e --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.10.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/py.typed b/packages/google-cloud-datastore/google/cloud/datastore_admin/py.typed new file mode 100644 index 000000000000..dc48a544f252 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-datastore-admin package uses inline types. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py index 4d0164cf72bd..4b0552ded8f5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.datastore_admin import gapic_version as package_version + +__version__ = package_version.__version__ + from .services.datastore_admin import DatastoreAdminClient from .services.datastore_admin import DatastoreAdminAsyncClient diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 2a7e46c48516..bd96febe7643 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -16,7 +16,17 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) import pkg_resources from google.api_core.client_options import ClientOptions @@ -218,9 +228,9 @@ def transport(self) -> DatastoreAdminTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, DatastoreAdminTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the datastore admin client. @@ -264,14 +274,14 @@ def __init__( async def export_entities( self, - request: Union[datastore_admin.ExportEntitiesRequest, dict] = None, + request: Optional[Union[datastore_admin.ExportEntitiesRequest, dict]] = None, *, - project_id: str = None, - labels: Mapping[str, str] = None, - entity_filter: datastore_admin.EntityFilter = None, - output_url_prefix: str = None, + project_id: Optional[str] = None, + labels: Optional[MutableMapping[str, str]] = None, + entity_filter: Optional[datastore_admin.EntityFilter] = None, + output_url_prefix: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Exports a copy of all or a subset of entities from @@ -317,7 +327,7 @@ async def sample_export_entities(): print(response) Args: - request (Union[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest, dict]): + request (Optional[Union[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest, dict]]): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. project_id (:class:`str`): @@ -327,7 +337,7 @@ async def sample_export_entities(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Mapping[str, str]`): + labels (:class:`MutableMapping[str, str]`): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -445,14 +455,14 @@ async def sample_export_entities(): async def import_entities( self, - request: Union[datastore_admin.ImportEntitiesRequest, dict] = None, + request: Optional[Union[datastore_admin.ImportEntitiesRequest, dict]] = None, *, - project_id: str = None, - labels: Mapping[str, str] = None, - input_url: str = None, - entity_filter: datastore_admin.EntityFilter = None, + project_id: Optional[str] = None, + labels: Optional[MutableMapping[str, str]] = None, + input_url: Optional[str] = None, + entity_filter: Optional[datastore_admin.EntityFilter] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Imports entities into Google Cloud Datastore. @@ -495,7 +505,7 @@ async def sample_import_entities(): print(response) Args: - request (Union[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest, dict]): + request (Optional[Union[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest, dict]]): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. project_id (:class:`str`): @@ -505,7 +515,7 @@ async def sample_import_entities(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Mapping[str, str]`): + labels (:class:`MutableMapping[str, str]`): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -624,10 +634,10 @@ async def sample_import_entities(): async def create_index( self, - request: Union[datastore_admin.CreateIndexRequest, dict] = None, + request: Optional[Union[datastore_admin.CreateIndexRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Creates the specified index. A newly created index's initial @@ -676,7 +686,7 @@ async def sample_create_index(): print(response) Args: - request (Union[google.cloud.datastore_admin_v1.types.CreateIndexRequest, dict]): + request (Optional[Union[google.cloud.datastore_admin_v1.types.CreateIndexRequest, dict]]): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.CreateIndex][google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -734,10 +744,10 @@ async def sample_create_index(): async def delete_index( self, - request: Union[datastore_admin.DeleteIndexRequest, dict] = None, + request: Optional[Union[datastore_admin.DeleteIndexRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Deletes an existing index. An index can only be deleted if it is @@ -785,7 +795,7 @@ async def sample_delete_index(): print(response) Args: - request (Union[google.cloud.datastore_admin_v1.types.DeleteIndexRequest, dict]): + request (Optional[Union[google.cloud.datastore_admin_v1.types.DeleteIndexRequest, dict]]): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.DeleteIndex][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex]. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -846,10 +856,10 @@ async def sample_delete_index(): async def get_index( self, - request: Union[datastore_admin.GetIndexRequest, dict] = None, + request: Optional[Union[datastore_admin.GetIndexRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> index.Index: r"""Gets an index. @@ -880,7 +890,7 @@ async def sample_get_index(): print(response) Args: - request (Union[google.cloud.datastore_admin_v1.types.GetIndexRequest, dict]): + request (Optional[Union[google.cloud.datastore_admin_v1.types.GetIndexRequest, dict]]): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.GetIndex][google.datastore.admin.v1.DatastoreAdmin.GetIndex]. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -938,10 +948,10 @@ async def sample_get_index(): async def list_indexes( self, - request: Union[datastore_admin.ListIndexesRequest, dict] = None, + request: Optional[Union[datastore_admin.ListIndexesRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListIndexesAsyncPager: r"""Lists the indexes that match the specified filters. @@ -976,7 +986,7 @@ async def sample_list_indexes(): print(response) Args: - request (Union[google.cloud.datastore_admin_v1.types.ListIndexesRequest, dict]): + request (Optional[Union[google.cloud.datastore_admin_v1.types.ListIndexesRequest, dict]]): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. retry (google.api_core.retry.Retry): Designation of what errors, if any, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index 201a69e8ebb8..4c00fb94b52e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -16,7 +16,18 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import pkg_resources from google.api_core import client_options as client_options_lib @@ -61,7 +72,7 @@ class DatastoreAdminClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[DatastoreAdminTransport]: """Returns an appropriate transport class. @@ -369,8 +380,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DatastoreAdminTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, DatastoreAdminTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the datastore admin client. @@ -384,7 +395,7 @@ def __init__( transport (Union[str, DatastoreAdminTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -414,6 +425,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -466,14 +478,14 @@ def __init__( def export_entities( self, - request: Union[datastore_admin.ExportEntitiesRequest, dict] = None, + request: Optional[Union[datastore_admin.ExportEntitiesRequest, dict]] = None, *, - project_id: str = None, - labels: Mapping[str, str] = None, - entity_filter: datastore_admin.EntityFilter = None, - output_url_prefix: str = None, + project_id: Optional[str] = None, + labels: Optional[MutableMapping[str, str]] = None, + entity_filter: Optional[datastore_admin.EntityFilter] = None, + output_url_prefix: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Exports a copy of all or a subset of entities from @@ -529,7 +541,7 @@ def sample_export_entities(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -646,14 +658,14 @@ def sample_export_entities(): def import_entities( self, - request: Union[datastore_admin.ImportEntitiesRequest, dict] = None, + request: Optional[Union[datastore_admin.ImportEntitiesRequest, dict]] = None, *, - project_id: str = None, - labels: Mapping[str, str] = None, - input_url: str = None, - entity_filter: datastore_admin.EntityFilter = None, + project_id: Optional[str] = None, + labels: Optional[MutableMapping[str, str]] = None, + input_url: Optional[str] = None, + entity_filter: Optional[datastore_admin.EntityFilter] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Imports entities into Google Cloud Datastore. @@ -706,7 +718,7 @@ def sample_import_entities(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -824,10 +836,10 @@ def sample_import_entities(): def create_index( self, - request: Union[datastore_admin.CreateIndexRequest, dict] = None, + request: Optional[Union[datastore_admin.CreateIndexRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Creates the specified index. A newly created index's initial @@ -935,10 +947,10 @@ def sample_create_index(): def delete_index( self, - request: Union[datastore_admin.DeleteIndexRequest, dict] = None, + request: Optional[Union[datastore_admin.DeleteIndexRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Deletes an existing index. An index can only be deleted if it is @@ -1048,10 +1060,10 @@ def sample_delete_index(): def get_index( self, - request: Union[datastore_admin.GetIndexRequest, dict] = None, + request: Optional[Union[datastore_admin.GetIndexRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> index.Index: r"""Gets an index. @@ -1131,10 +1143,10 @@ def sample_get_index(): def list_indexes( self, - request: Union[datastore_admin.ListIndexesRequest, dict] = None, + request: Optional[Union[datastore_admin.ListIndexesRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListIndexesPager: r"""Lists the indexes that match the specified filters. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index cf9fd9506b81..23175bd2e4f2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -54,7 +54,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index f15228fab95a..2eac944f3680 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -107,14 +107,14 @@ def __init__( self, *, host: str = "datastore.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -242,8 +242,8 @@ def __init__( def create_channel( cls, host: str = "datastore.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index b5ec28142eae..d0fc1d60c99b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -109,7 +109,7 @@ class DatastoreAdminGrpcAsyncIOTransport(DatastoreAdminTransport): def create_channel( cls, host: str = "datastore.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -152,15 +152,15 @@ def __init__( self, *, host: str = "datastore.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py index 1bffdf3da635..f7e3adcf62a4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.cloud.datastore_admin_v1.types import index as gda_index @@ -64,7 +66,7 @@ class CommonMetadata(proto.Message): operation_type (google.cloud.datastore_admin_v1.types.OperationType): The type of the operation. Can be used as a filter in ListOperationsRequest. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): The client-assigned labels which were provided when the operation was created. May also include additional labels. @@ -83,27 +85,27 @@ class State(proto.Enum): FAILED = 6 CANCELLED = 7 - start_time = proto.Field( + start_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, ) - end_time = proto.Field( + end_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) - operation_type = proto.Field( + operation_type: "OperationType" = proto.Field( proto.ENUM, number=3, enum="OperationType", ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=4, ) - state = proto.Field( + state: State = proto.Field( proto.ENUM, number=5, enum=State, @@ -123,11 +125,11 @@ class Progress(proto.Message): unavailable. """ - work_completed = proto.Field( + work_completed: int = proto.Field( proto.INT64, number=1, ) - work_estimated = proto.Field( + work_estimated: int = proto.Field( proto.INT64, number=2, ) @@ -141,7 +143,7 @@ class ExportEntitiesRequest(proto.Message): project_id (str): Required. Project ID against which to make the request. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Client-assigned labels. entity_filter (google.cloud.datastore_admin_v1.types.EntityFilter): Description of what data from the project is @@ -171,21 +173,21 @@ class ExportEntitiesRequest(proto.Message): without conflict. """ - project_id = proto.Field( + project_id: str = proto.Field( proto.STRING, number=1, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=2, ) - entity_filter = proto.Field( + entity_filter: "EntityFilter" = proto.Field( proto.MESSAGE, number=3, message="EntityFilter", ) - output_url_prefix = proto.Field( + output_url_prefix: str = proto.Field( proto.STRING, number=4, ) @@ -199,7 +201,7 @@ class ImportEntitiesRequest(proto.Message): project_id (str): Required. Project ID against which to make the request. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Client-assigned labels. input_url (str): Required. The full resource URL of the external storage @@ -225,20 +227,20 @@ class ImportEntitiesRequest(proto.Message): specified then all entities from the export are imported. """ - project_id = proto.Field( + project_id: str = proto.Field( proto.STRING, number=1, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=2, ) - input_url = proto.Field( + input_url: str = proto.Field( proto.STRING, number=3, ) - entity_filter = proto.Field( + entity_filter: "EntityFilter" = proto.Field( proto.MESSAGE, number=4, message="EntityFilter", @@ -258,7 +260,7 @@ class ExportEntitiesResponse(proto.Message): Only present if the operation completed successfully. """ - output_url = proto.Field( + output_url: str = proto.Field( proto.STRING, number=1, ) @@ -287,27 +289,27 @@ class ExportEntitiesMetadata(proto.Message): [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. """ - common = proto.Field( + common: "CommonMetadata" = proto.Field( proto.MESSAGE, number=1, message="CommonMetadata", ) - progress_entities = proto.Field( + progress_entities: "Progress" = proto.Field( proto.MESSAGE, number=2, message="Progress", ) - progress_bytes = proto.Field( + progress_bytes: "Progress" = proto.Field( proto.MESSAGE, number=3, message="Progress", ) - entity_filter = proto.Field( + entity_filter: "EntityFilter" = proto.Field( proto.MESSAGE, number=4, message="EntityFilter", ) - output_url_prefix = proto.Field( + output_url_prefix: str = proto.Field( proto.STRING, number=5, ) @@ -335,27 +337,27 @@ class ImportEntitiesMetadata(proto.Message): field. """ - common = proto.Field( + common: "CommonMetadata" = proto.Field( proto.MESSAGE, number=1, message="CommonMetadata", ) - progress_entities = proto.Field( + progress_entities: "Progress" = proto.Field( proto.MESSAGE, number=2, message="Progress", ) - progress_bytes = proto.Field( + progress_bytes: "Progress" = proto.Field( proto.MESSAGE, number=3, message="Progress", ) - entity_filter = proto.Field( + entity_filter: "EntityFilter" = proto.Field( proto.MESSAGE, number=4, message="EntityFilter", ) - input_url = proto.Field( + input_url: str = proto.Field( proto.STRING, number=5, ) @@ -380,9 +382,9 @@ class EntityFilter(proto.Message): The entire Baz namespace: kinds=[], namespace_ids=['Baz'] Attributes: - kinds (Sequence[str]): + kinds (MutableSequence[str]): If empty, then this represents all kinds. - namespace_ids (Sequence[str]): + namespace_ids (MutableSequence[str]): An empty list represents all namespaces. This is the preferred usage for projects that don't use namespaces. @@ -393,11 +395,11 @@ class EntityFilter(proto.Message): Each namespace in this list must be unique. """ - kinds = proto.RepeatedField( + kinds: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=1, ) - namespace_ids = proto.RepeatedField( + namespace_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) @@ -417,11 +419,11 @@ class CreateIndexRequest(proto.Message): deleted. """ - project_id = proto.Field( + project_id: str = proto.Field( proto.STRING, number=1, ) - index = proto.Field( + index: gda_index.Index = proto.Field( proto.MESSAGE, number=3, message=gda_index.Index, @@ -439,11 +441,11 @@ class DeleteIndexRequest(proto.Message): The resource ID of the index to delete. """ - project_id = proto.Field( + project_id: str = proto.Field( proto.STRING, number=1, ) - index_id = proto.Field( + index_id: str = proto.Field( proto.STRING, number=3, ) @@ -460,11 +462,11 @@ class GetIndexRequest(proto.Message): The resource ID of the index to get. """ - project_id = proto.Field( + project_id: str = proto.Field( proto.STRING, number=1, ) - index_id = proto.Field( + index_id: str = proto.Field( proto.STRING, number=3, ) @@ -487,19 +489,19 @@ class ListIndexesRequest(proto.Message): request, if any. """ - project_id = proto.Field( + project_id: str = proto.Field( proto.STRING, number=1, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=3, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=4, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=5, ) @@ -510,7 +512,7 @@ class ListIndexesResponse(proto.Message): [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. Attributes: - indexes (Sequence[google.cloud.datastore_admin_v1.types.Index]): + indexes (MutableSequence[google.cloud.datastore_admin_v1.types.Index]): The indexes. next_page_token (str): The standard List next-page token. @@ -520,12 +522,12 @@ class ListIndexesResponse(proto.Message): def raw_page(self): return self - indexes = proto.RepeatedField( + indexes: MutableSequence[gda_index.Index] = proto.RepeatedField( proto.MESSAGE, number=1, message=gda_index.Index, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -546,17 +548,17 @@ class IndexOperationMetadata(proto.Message): acting on. """ - common = proto.Field( + common: "CommonMetadata" = proto.Field( proto.MESSAGE, number=1, message="CommonMetadata", ) - progress_entities = proto.Field( + progress_entities: "Progress" = proto.Field( proto.MESSAGE, number=2, message="Progress", ) - index_id = proto.Field( + index_id: str = proto.Field( proto.STRING, number=3, ) @@ -581,12 +583,12 @@ class DatastoreFirestoreMigrationMetadata(proto.Message): Datastore to Cloud Firestore in Datastore mode. """ - migration_state = proto.Field( + migration_state: migration.MigrationState = proto.Field( proto.ENUM, number=1, enum=migration.MigrationState, ) - migration_step = proto.Field( + migration_step: migration.MigrationStep = proto.Field( proto.ENUM, number=2, enum=migration.MigrationStep, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py index e00c3bb70800..8192d81cd7e2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore @@ -38,7 +40,7 @@ class Index(proto.Message): ancestor (google.cloud.datastore_admin_v1.types.Index.AncestorMode): Required. The index's ancestor mode. Must not be ANCESTOR_MODE_UNSPECIFIED. - properties (Sequence[google.cloud.datastore_admin_v1.types.Index.IndexedProperty]): + properties (MutableSequence[google.cloud.datastore_admin_v1.types.Index.IndexedProperty]): Required. An ordered sequence of property names and their index attributes. state (google.cloud.datastore_admin_v1.types.Index.State): @@ -78,39 +80,39 @@ class IndexedProperty(proto.Message): DIRECTION_UNSPECIFIED. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - direction = proto.Field( + direction: "Index.Direction" = proto.Field( proto.ENUM, number=2, enum="Index.Direction", ) - project_id = proto.Field( + project_id: str = proto.Field( proto.STRING, number=1, ) - index_id = proto.Field( + index_id: str = proto.Field( proto.STRING, number=3, ) - kind = proto.Field( + kind: str = proto.Field( proto.STRING, number=4, ) - ancestor = proto.Field( + ancestor: AncestorMode = proto.Field( proto.ENUM, number=5, enum=AncestorMode, ) - properties = proto.RepeatedField( + properties: MutableSequence[IndexedProperty] = proto.RepeatedField( proto.MESSAGE, number=6, message=IndexedProperty, ) - state = proto.Field( + state: State = proto.Field( proto.ENUM, number=7, enum=State, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py index cbd4f4f71fae..95f25f4253fd 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore @@ -57,7 +59,7 @@ class MigrationStateEvent(proto.Message): The new state of the migration. """ - state = proto.Field( + state: "MigrationState" = proto.Field( proto.ENUM, number=1, enum="MigrationState", @@ -109,7 +111,7 @@ class PrepareStepDetails(proto.Message): the ``REDIRECT_WRITES`` step. """ - concurrency_mode = proto.Field( + concurrency_mode: "MigrationProgressEvent.ConcurrencyMode" = proto.Field( proto.ENUM, number=1, enum="MigrationProgressEvent.ConcurrencyMode", @@ -123,24 +125,24 @@ class RedirectWritesStepDetails(proto.Message): Ths concurrency mode for this database. """ - concurrency_mode = proto.Field( + concurrency_mode: "MigrationProgressEvent.ConcurrencyMode" = proto.Field( proto.ENUM, number=1, enum="MigrationProgressEvent.ConcurrencyMode", ) - step = proto.Field( + step: "MigrationStep" = proto.Field( proto.ENUM, number=1, enum="MigrationStep", ) - prepare_step_details = proto.Field( + prepare_step_details: PrepareStepDetails = proto.Field( proto.MESSAGE, number=2, oneof="step_details", message=PrepareStepDetails, ) - redirect_writes_step_details = proto.Field( + redirect_writes_step_details: RedirectWritesStepDetails = proto.Field( proto.MESSAGE, number=3, oneof="step_details", diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py index 83e7464d0960..b868c4063564 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.datastore import gapic_version as package_version + +__version__ = package_version.__version__ + from .services.datastore import DatastoreClient from .services.datastore import DatastoreAsyncClient diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index c6e6f2dff143..1986c2b5ef6c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -16,7 +16,17 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) import pkg_resources from google.api_core.client_options import ClientOptions @@ -161,9 +171,9 @@ def transport(self) -> DatastoreTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, DatastoreTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the datastore client. @@ -207,13 +217,13 @@ def __init__( async def lookup( self, - request: Union[datastore.LookupRequest, dict] = None, + request: Optional[Union[datastore.LookupRequest, dict]] = None, *, - project_id: str = None, - read_options: datastore.ReadOptions = None, - keys: Sequence[entity.Key] = None, + project_id: Optional[str] = None, + read_options: Optional[datastore.ReadOptions] = None, + keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.LookupResponse: r"""Looks up entities by key. @@ -245,7 +255,7 @@ async def sample_lookup(): print(response) Args: - request (Union[google.cloud.datastore_v1.types.LookupRequest, dict]): + request (Optional[Union[google.cloud.datastore_v1.types.LookupRequest, dict]]): The request object. The request for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. project_id (:class:`str`): @@ -260,7 +270,7 @@ async def sample_lookup(): This corresponds to the ``read_options`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - keys (:class:`Sequence[google.cloud.datastore_v1.types.Key]`): + keys (:class:`MutableSequence[google.cloud.datastore_v1.types.Key]`): Required. Keys of entities to look up. @@ -339,10 +349,10 @@ async def sample_lookup(): async def run_query( self, - request: Union[datastore.RunQueryRequest, dict] = None, + request: Optional[Union[datastore.RunQueryRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.RunQueryResponse: r"""Queries for entities. @@ -374,7 +384,7 @@ async def sample_run_query(): print(response) Args: - request (Union[google.cloud.datastore_v1.types.RunQueryRequest, dict]): + request (Optional[Union[google.cloud.datastore_v1.types.RunQueryRequest, dict]]): The request object. The request for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -431,10 +441,10 @@ async def sample_run_query(): async def run_aggregation_query( self, - request: Union[datastore.RunAggregationQueryRequest, dict] = None, + request: Optional[Union[datastore.RunAggregationQueryRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.RunAggregationQueryResponse: r"""Runs an aggregation query. @@ -466,7 +476,7 @@ async def sample_run_aggregation_query(): print(response) Args: - request (Union[google.cloud.datastore_v1.types.RunAggregationQueryRequest, dict]): + request (Optional[Union[google.cloud.datastore_v1.types.RunAggregationQueryRequest, dict]]): The request object. The request for [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -523,11 +533,11 @@ async def sample_run_aggregation_query(): async def begin_transaction( self, - request: Union[datastore.BeginTransactionRequest, dict] = None, + request: Optional[Union[datastore.BeginTransactionRequest, dict]] = None, *, - project_id: str = None, + project_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.BeginTransactionResponse: r"""Begins a new transaction. @@ -559,7 +569,7 @@ async def sample_begin_transaction(): print(response) Args: - request (Union[google.cloud.datastore_v1.types.BeginTransactionRequest, dict]): + request (Optional[Union[google.cloud.datastore_v1.types.BeginTransactionRequest, dict]]): The request object. The request for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. project_id (:class:`str`): @@ -627,14 +637,14 @@ async def sample_begin_transaction(): async def commit( self, - request: Union[datastore.CommitRequest, dict] = None, + request: Optional[Union[datastore.CommitRequest, dict]] = None, *, - project_id: str = None, - mode: datastore.CommitRequest.Mode = None, - transaction: bytes = None, - mutations: Sequence[datastore.Mutation] = None, + project_id: Optional[str] = None, + mode: Optional[datastore.CommitRequest.Mode] = None, + transaction: Optional[bytes] = None, + mutations: Optional[MutableSequence[datastore.Mutation]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.CommitResponse: r"""Commits a transaction, optionally creating, deleting @@ -668,7 +678,7 @@ async def sample_commit(): print(response) Args: - request (Union[google.cloud.datastore_v1.types.CommitRequest, dict]): + request (Optional[Union[google.cloud.datastore_v1.types.CommitRequest, dict]]): The request object. The request for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. project_id (:class:`str`): @@ -694,7 +704,7 @@ async def sample_commit(): This corresponds to the ``transaction`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - mutations (:class:`Sequence[google.cloud.datastore_v1.types.Mutation]`): + mutations (:class:`MutableSequence[google.cloud.datastore_v1.types.Mutation]`): The mutations to perform. When mode is ``TRANSACTIONAL``, mutations affecting a @@ -777,12 +787,12 @@ async def sample_commit(): async def rollback( self, - request: Union[datastore.RollbackRequest, dict] = None, + request: Optional[Union[datastore.RollbackRequest, dict]] = None, *, - project_id: str = None, - transaction: bytes = None, + project_id: Optional[str] = None, + transaction: Optional[bytes] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.RollbackResponse: r"""Rolls back a transaction. @@ -815,7 +825,7 @@ async def sample_rollback(): print(response) Args: - request (Union[google.cloud.datastore_v1.types.RollbackRequest, dict]): + request (Optional[Union[google.cloud.datastore_v1.types.RollbackRequest, dict]]): The request object. The request for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. project_id (:class:`str`): @@ -893,12 +903,12 @@ async def sample_rollback(): async def allocate_ids( self, - request: Union[datastore.AllocateIdsRequest, dict] = None, + request: Optional[Union[datastore.AllocateIdsRequest, dict]] = None, *, - project_id: str = None, - keys: Sequence[entity.Key] = None, + project_id: Optional[str] = None, + keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.AllocateIdsResponse: r"""Allocates IDs for the given keys, which is useful for @@ -931,7 +941,7 @@ async def sample_allocate_ids(): print(response) Args: - request (Union[google.cloud.datastore_v1.types.AllocateIdsRequest, dict]): + request (Optional[Union[google.cloud.datastore_v1.types.AllocateIdsRequest, dict]]): The request object. The request for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. project_id (:class:`str`): @@ -941,7 +951,7 @@ async def sample_allocate_ids(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - keys (:class:`Sequence[google.cloud.datastore_v1.types.Key]`): + keys (:class:`MutableSequence[google.cloud.datastore_v1.types.Key]`): Required. A list of keys with incomplete key paths for which to allocate IDs. No key may be @@ -1010,12 +1020,12 @@ async def sample_allocate_ids(): async def reserve_ids( self, - request: Union[datastore.ReserveIdsRequest, dict] = None, + request: Optional[Union[datastore.ReserveIdsRequest, dict]] = None, *, - project_id: str = None, - keys: Sequence[entity.Key] = None, + project_id: Optional[str] = None, + keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.ReserveIdsResponse: r"""Prevents the supplied keys' IDs from being @@ -1048,7 +1058,7 @@ async def sample_reserve_ids(): print(response) Args: - request (Union[google.cloud.datastore_v1.types.ReserveIdsRequest, dict]): + request (Optional[Union[google.cloud.datastore_v1.types.ReserveIdsRequest, dict]]): The request object. The request for [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. project_id (:class:`str`): @@ -1058,7 +1068,7 @@ async def sample_reserve_ids(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - keys (:class:`Sequence[google.cloud.datastore_v1.types.Key]`): + keys (:class:`MutableSequence[google.cloud.datastore_v1.types.Key]`): Required. A list of keys with complete key paths whose numeric IDs should not be auto-allocated. @@ -1136,10 +1146,10 @@ async def sample_reserve_ids(): async def list_operations( self, - request: operations_pb2.ListOperationsRequest = None, + request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1190,10 +1200,10 @@ async def list_operations( async def get_operation( self, - request: operations_pb2.GetOperationRequest = None, + request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1244,10 +1254,10 @@ async def get_operation( async def delete_operation( self, - request: operations_pb2.DeleteOperationRequest = None, + request: Optional[operations_pb2.DeleteOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1299,10 +1309,10 @@ async def delete_operation( async def cancel_operation( self, - request: operations_pb2.CancelOperationRequest = None, + request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index 4d0b8f3471a3..5d7446208f89 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -16,7 +16,18 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import pkg_resources from google.api_core import client_options as client_options_lib @@ -59,7 +70,7 @@ class DatastoreClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[DatastoreTransport]: """Returns an appropriate transport class. @@ -319,8 +330,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DatastoreTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, DatastoreTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the datastore client. @@ -334,7 +345,7 @@ def __init__( transport (Union[str, DatastoreTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -364,6 +375,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -416,13 +428,13 @@ def __init__( def lookup( self, - request: Union[datastore.LookupRequest, dict] = None, + request: Optional[Union[datastore.LookupRequest, dict]] = None, *, - project_id: str = None, - read_options: datastore.ReadOptions = None, - keys: Sequence[entity.Key] = None, + project_id: Optional[str] = None, + read_options: Optional[datastore.ReadOptions] = None, + keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.LookupResponse: r"""Looks up entities by key. @@ -469,7 +481,7 @@ def sample_lookup(): This corresponds to the ``read_options`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - keys (Sequence[google.cloud.datastore_v1.types.Key]): + keys (MutableSequence[google.cloud.datastore_v1.types.Key]): Required. Keys of entities to look up. @@ -538,10 +550,10 @@ def sample_lookup(): def run_query( self, - request: Union[datastore.RunQueryRequest, dict] = None, + request: Optional[Union[datastore.RunQueryRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.RunQueryResponse: r"""Queries for entities. @@ -621,10 +633,10 @@ def sample_run_query(): def run_aggregation_query( self, - request: Union[datastore.RunAggregationQueryRequest, dict] = None, + request: Optional[Union[datastore.RunAggregationQueryRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.RunAggregationQueryResponse: r"""Runs an aggregation query. @@ -704,11 +716,11 @@ def sample_run_aggregation_query(): def begin_transaction( self, - request: Union[datastore.BeginTransactionRequest, dict] = None, + request: Optional[Union[datastore.BeginTransactionRequest, dict]] = None, *, - project_id: str = None, + project_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.BeginTransactionResponse: r"""Begins a new transaction. @@ -808,14 +820,14 @@ def sample_begin_transaction(): def commit( self, - request: Union[datastore.CommitRequest, dict] = None, + request: Optional[Union[datastore.CommitRequest, dict]] = None, *, - project_id: str = None, - mode: datastore.CommitRequest.Mode = None, - transaction: bytes = None, - mutations: Sequence[datastore.Mutation] = None, + project_id: Optional[str] = None, + mode: Optional[datastore.CommitRequest.Mode] = None, + transaction: Optional[bytes] = None, + mutations: Optional[MutableSequence[datastore.Mutation]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.CommitResponse: r"""Commits a transaction, optionally creating, deleting @@ -875,7 +887,7 @@ def sample_commit(): This corresponds to the ``transaction`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - mutations (Sequence[google.cloud.datastore_v1.types.Mutation]): + mutations (MutableSequence[google.cloud.datastore_v1.types.Mutation]): The mutations to perform. When mode is ``TRANSACTIONAL``, mutations affecting a @@ -958,12 +970,12 @@ def sample_commit(): def rollback( self, - request: Union[datastore.RollbackRequest, dict] = None, + request: Optional[Union[datastore.RollbackRequest, dict]] = None, *, - project_id: str = None, - transaction: bytes = None, + project_id: Optional[str] = None, + transaction: Optional[bytes] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.RollbackResponse: r"""Rolls back a transaction. @@ -1074,12 +1086,12 @@ def sample_rollback(): def allocate_ids( self, - request: Union[datastore.AllocateIdsRequest, dict] = None, + request: Optional[Union[datastore.AllocateIdsRequest, dict]] = None, *, - project_id: str = None, - keys: Sequence[entity.Key] = None, + project_id: Optional[str] = None, + keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.AllocateIdsResponse: r"""Allocates IDs for the given keys, which is useful for @@ -1122,7 +1134,7 @@ def sample_allocate_ids(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - keys (Sequence[google.cloud.datastore_v1.types.Key]): + keys (MutableSequence[google.cloud.datastore_v1.types.Key]): Required. A list of keys with incomplete key paths for which to allocate IDs. No key may be @@ -1191,12 +1203,12 @@ def sample_allocate_ids(): def reserve_ids( self, - request: Union[datastore.ReserveIdsRequest, dict] = None, + request: Optional[Union[datastore.ReserveIdsRequest, dict]] = None, *, - project_id: str = None, - keys: Sequence[entity.Key] = None, + project_id: Optional[str] = None, + keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.ReserveIdsResponse: r"""Prevents the supplied keys' IDs from being @@ -1239,7 +1251,7 @@ def sample_reserve_ids(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - keys (Sequence[google.cloud.datastore_v1.types.Key]): + keys (MutableSequence[google.cloud.datastore_v1.types.Key]): Required. A list of keys with complete key paths whose numeric IDs should not be auto-allocated. @@ -1320,10 +1332,10 @@ def __exit__(self, type, value, traceback): def list_operations( self, - request: operations_pb2.ListOperationsRequest = None, + request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1374,10 +1386,10 @@ def list_operations( def get_operation( self, - request: operations_pb2.GetOperationRequest = None, + request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1428,10 +1440,10 @@ def get_operation( def delete_operation( self, - request: operations_pb2.DeleteOperationRequest = None, + request: Optional[operations_pb2.DeleteOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1483,10 +1495,10 @@ def delete_operation( def cancel_operation( self, - request: operations_pb2.CancelOperationRequest = None, + request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py index d628468bb158..c9b14ea8e551 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -52,7 +52,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py index 4f160c453307..622f217cea98 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -54,14 +54,14 @@ def __init__( self, *, host: str = "datastore.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -188,8 +188,8 @@ def __init__( def create_channel( cls, host: str = "datastore.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index 529c22062084..08b1e5f27b78 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -56,7 +56,7 @@ class DatastoreGrpcAsyncIOTransport(DatastoreTransport): def create_channel( cls, host: str = "datastore.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -99,15 +99,15 @@ def __init__( self, *, host: str = "datastore.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py index 17020a63ccd7..c2d1d8599acc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.cloud.datastore_v1.types import entity @@ -37,7 +39,7 @@ class AggregationResult(proto.Message): fields present for each result. Attributes: - aggregate_properties (Mapping[str, google.cloud.datastore_v1.types.Value]): + aggregate_properties (MutableMapping[str, google.cloud.datastore_v1.types.Value]): The result of the aggregation functions, ex: ``COUNT(*) AS total_entities``. @@ -48,7 +50,7 @@ class AggregationResult(proto.Message): the query. """ - aggregate_properties = proto.MapField( + aggregate_properties: MutableMapping[str, entity.Value] = proto.MapField( proto.STRING, proto.MESSAGE, number=2, @@ -61,7 +63,7 @@ class AggregationResultBatch(proto.Message): query. Attributes: - aggregation_results (Sequence[google.cloud.datastore_v1.types.AggregationResult]): + aggregation_results (MutableSequence[google.cloud.datastore_v1.types.AggregationResult]): The aggregation results for this batch. more_results (google.cloud.datastore_v1.types.QueryResultBatch.MoreResultsType): The state of the query after the current batch. Only @@ -76,17 +78,17 @@ class AggregationResultBatch(proto.Message): for all preceding batches. """ - aggregation_results = proto.RepeatedField( + aggregation_results: MutableSequence["AggregationResult"] = proto.RepeatedField( proto.MESSAGE, number=1, message="AggregationResult", ) - more_results = proto.Field( + more_results: query.QueryResultBatch.MoreResultsType = proto.Field( proto.ENUM, number=2, enum=query.QueryResultBatch.MoreResultsType, ) - read_time = proto.Field( + read_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index 42c87f72c0a3..a2d7c2a2aea3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.cloud.datastore_v1.types import aggregation_result @@ -63,24 +65,24 @@ class LookupRequest(proto.Message): string '' to refer the default database. read_options (google.cloud.datastore_v1.types.ReadOptions): The options for this lookup request. - keys (Sequence[google.cloud.datastore_v1.types.Key]): + keys (MutableSequence[google.cloud.datastore_v1.types.Key]): Required. Keys of entities to look up. """ - project_id = proto.Field( + project_id: str = proto.Field( proto.STRING, number=8, ) - database_id = proto.Field( + database_id: str = proto.Field( proto.STRING, number=9, ) - read_options = proto.Field( + read_options: "ReadOptions" = proto.Field( proto.MESSAGE, number=1, message="ReadOptions", ) - keys = proto.RepeatedField( + keys: MutableSequence[entity.Key] = proto.RepeatedField( proto.MESSAGE, number=3, message=entity.Key, @@ -92,15 +94,15 @@ class LookupResponse(proto.Message): [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. Attributes: - found (Sequence[google.cloud.datastore_v1.types.EntityResult]): + found (MutableSequence[google.cloud.datastore_v1.types.EntityResult]): Entities found as ``ResultType.FULL`` entities. The order of results in this field is undefined and has no relation to the order of the keys in the input. - missing (Sequence[google.cloud.datastore_v1.types.EntityResult]): + missing (MutableSequence[google.cloud.datastore_v1.types.EntityResult]): Entities not found as ``ResultType.KEY_ONLY`` entities. The order of results in this field is undefined and has no relation to the order of the keys in the input. - deferred (Sequence[google.cloud.datastore_v1.types.Key]): + deferred (MutableSequence[google.cloud.datastore_v1.types.Key]): A list of keys that were not looked up due to resource constraints. The order of results in this field is undefined and has no relation to @@ -110,22 +112,22 @@ class LookupResponse(proto.Message): found missing. """ - found = proto.RepeatedField( + found: MutableSequence[gd_query.EntityResult] = proto.RepeatedField( proto.MESSAGE, number=1, message=gd_query.EntityResult, ) - missing = proto.RepeatedField( + missing: MutableSequence[gd_query.EntityResult] = proto.RepeatedField( proto.MESSAGE, number=2, message=gd_query.EntityResult, ) - deferred = proto.RepeatedField( + deferred: MutableSequence[entity.Key] = proto.RepeatedField( proto.MESSAGE, number=3, message=entity.Key, ) - read_time = proto.Field( + read_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp, @@ -171,31 +173,31 @@ class RunQueryRequest(proto.Message): This field is a member of `oneof`_ ``query_type``. """ - project_id = proto.Field( + project_id: str = proto.Field( proto.STRING, number=8, ) - database_id = proto.Field( + database_id: str = proto.Field( proto.STRING, number=9, ) - partition_id = proto.Field( + partition_id: entity.PartitionId = proto.Field( proto.MESSAGE, number=2, message=entity.PartitionId, ) - read_options = proto.Field( + read_options: "ReadOptions" = proto.Field( proto.MESSAGE, number=1, message="ReadOptions", ) - query = proto.Field( + query: gd_query.Query = proto.Field( proto.MESSAGE, number=3, oneof="query_type", message=gd_query.Query, ) - gql_query = proto.Field( + gql_query: gd_query.GqlQuery = proto.Field( proto.MESSAGE, number=7, oneof="query_type", @@ -215,12 +217,12 @@ class RunQueryResponse(proto.Message): was set. """ - batch = proto.Field( + batch: gd_query.QueryResultBatch = proto.Field( proto.MESSAGE, number=1, message=gd_query.QueryResultBatch, ) - query = proto.Field( + query: gd_query.Query = proto.Field( proto.MESSAGE, number=2, message=gd_query.Query, @@ -266,31 +268,31 @@ class RunAggregationQueryRequest(proto.Message): This field is a member of `oneof`_ ``query_type``. """ - project_id = proto.Field( + project_id: str = proto.Field( proto.STRING, number=8, ) - database_id = proto.Field( + database_id: str = proto.Field( proto.STRING, number=9, ) - partition_id = proto.Field( + partition_id: entity.PartitionId = proto.Field( proto.MESSAGE, number=2, message=entity.PartitionId, ) - read_options = proto.Field( + read_options: "ReadOptions" = proto.Field( proto.MESSAGE, number=1, message="ReadOptions", ) - aggregation_query = proto.Field( + aggregation_query: gd_query.AggregationQuery = proto.Field( proto.MESSAGE, number=3, oneof="query_type", message=gd_query.AggregationQuery, ) - gql_query = proto.Field( + gql_query: gd_query.GqlQuery = proto.Field( proto.MESSAGE, number=7, oneof="query_type", @@ -311,12 +313,12 @@ class RunAggregationQueryResponse(proto.Message): was set. """ - batch = proto.Field( + batch: aggregation_result.AggregationResultBatch = proto.Field( proto.MESSAGE, number=1, message=aggregation_result.AggregationResultBatch, ) - query = proto.Field( + query: gd_query.AggregationQuery = proto.Field( proto.MESSAGE, number=2, message=gd_query.AggregationQuery, @@ -340,15 +342,15 @@ class BeginTransactionRequest(proto.Message): Options for a new transaction. """ - project_id = proto.Field( + project_id: str = proto.Field( proto.STRING, number=8, ) - database_id = proto.Field( + database_id: str = proto.Field( proto.STRING, number=9, ) - transaction_options = proto.Field( + transaction_options: "TransactionOptions" = proto.Field( proto.MESSAGE, number=10, message="TransactionOptions", @@ -364,7 +366,7 @@ class BeginTransactionResponse(proto.Message): The transaction identifier (always present). """ - transaction = proto.Field( + transaction: bytes = proto.Field( proto.BYTES, number=1, ) @@ -388,15 +390,15 @@ class RollbackRequest(proto.Message): [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. """ - project_id = proto.Field( + project_id: str = proto.Field( proto.STRING, number=8, ) - database_id = proto.Field( + database_id: str = proto.Field( proto.STRING, number=9, ) - transaction = proto.Field( + transaction: bytes = proto.Field( proto.BYTES, number=1, ) @@ -435,7 +437,7 @@ class CommitRequest(proto.Message): [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. This field is a member of `oneof`_ ``transaction_selector``. - mutations (Sequence[google.cloud.datastore_v1.types.Mutation]): + mutations (MutableSequence[google.cloud.datastore_v1.types.Mutation]): The mutations to perform. When mode is ``TRANSACTIONAL``, mutations affecting a single @@ -458,25 +460,25 @@ class Mode(proto.Enum): TRANSACTIONAL = 1 NON_TRANSACTIONAL = 2 - project_id = proto.Field( + project_id: str = proto.Field( proto.STRING, number=8, ) - database_id = proto.Field( + database_id: str = proto.Field( proto.STRING, number=9, ) - mode = proto.Field( + mode: Mode = proto.Field( proto.ENUM, number=5, enum=Mode, ) - transaction = proto.Field( + transaction: bytes = proto.Field( proto.BYTES, number=1, oneof="transaction_selector", ) - mutations = proto.RepeatedField( + mutations: MutableSequence["Mutation"] = proto.RepeatedField( proto.MESSAGE, number=6, message="Mutation", @@ -488,7 +490,7 @@ class CommitResponse(proto.Message): [Datastore.Commit][google.datastore.v1.Datastore.Commit]. Attributes: - mutation_results (Sequence[google.cloud.datastore_v1.types.MutationResult]): + mutation_results (MutableSequence[google.cloud.datastore_v1.types.MutationResult]): The result of performing the mutations. The i-th mutation result corresponds to the i-th mutation in the request. @@ -500,16 +502,16 @@ class CommitResponse(proto.Message): non-transactional commits. """ - mutation_results = proto.RepeatedField( + mutation_results: MutableSequence["MutationResult"] = proto.RepeatedField( proto.MESSAGE, number=3, message="MutationResult", ) - index_updates = proto.Field( + index_updates: int = proto.Field( proto.INT32, number=4, ) - commit_time = proto.Field( + commit_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=8, message=timestamp_pb2.Timestamp, @@ -529,21 +531,21 @@ class AllocateIdsRequest(proto.Message): the request. '(default)' is not allowed; please use empty string '' to refer the default database. - keys (Sequence[google.cloud.datastore_v1.types.Key]): + keys (MutableSequence[google.cloud.datastore_v1.types.Key]): Required. A list of keys with incomplete key paths for which to allocate IDs. No key may be reserved/read-only. """ - project_id = proto.Field( + project_id: str = proto.Field( proto.STRING, number=8, ) - database_id = proto.Field( + database_id: str = proto.Field( proto.STRING, number=9, ) - keys = proto.RepeatedField( + keys: MutableSequence[entity.Key] = proto.RepeatedField( proto.MESSAGE, number=1, message=entity.Key, @@ -555,13 +557,13 @@ class AllocateIdsResponse(proto.Message): [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. Attributes: - keys (Sequence[google.cloud.datastore_v1.types.Key]): + keys (MutableSequence[google.cloud.datastore_v1.types.Key]): The keys specified in the request (in the same order), each with its key path completed with a newly allocated ID. """ - keys = proto.RepeatedField( + keys: MutableSequence[entity.Key] = proto.RepeatedField( proto.MESSAGE, number=1, message=entity.Key, @@ -581,21 +583,21 @@ class ReserveIdsRequest(proto.Message): the request. '(default)' is not allowed; please use empty string '' to refer the default database. - keys (Sequence[google.cloud.datastore_v1.types.Key]): + keys (MutableSequence[google.cloud.datastore_v1.types.Key]): Required. A list of keys with complete key paths whose numeric IDs should not be auto-allocated. """ - project_id = proto.Field( + project_id: str = proto.Field( proto.STRING, number=8, ) - database_id = proto.Field( + database_id: str = proto.Field( proto.STRING, number=9, ) - keys = proto.RepeatedField( + keys: MutableSequence[entity.Key] = proto.RepeatedField( proto.MESSAGE, number=1, message=entity.Key, @@ -660,36 +662,36 @@ class Mutation(proto.Message): This field is a member of `oneof`_ ``conflict_detection_strategy``. """ - insert = proto.Field( + insert: entity.Entity = proto.Field( proto.MESSAGE, number=4, oneof="operation", message=entity.Entity, ) - update = proto.Field( + update: entity.Entity = proto.Field( proto.MESSAGE, number=5, oneof="operation", message=entity.Entity, ) - upsert = proto.Field( + upsert: entity.Entity = proto.Field( proto.MESSAGE, number=6, oneof="operation", message=entity.Entity, ) - delete = proto.Field( + delete: entity.Key = proto.Field( proto.MESSAGE, number=7, oneof="operation", message=entity.Key, ) - base_version = proto.Field( + base_version: int = proto.Field( proto.INT64, number=8, oneof="conflict_detection_strategy", ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=11, oneof="conflict_detection_strategy", @@ -726,21 +728,21 @@ class MutationResult(proto.Message): strategy field is not set in the mutation. """ - key = proto.Field( + key: entity.Key = proto.Field( proto.MESSAGE, number=3, message=entity.Key, ) - version = proto.Field( + version: int = proto.Field( proto.INT64, number=4, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp, ) - conflict_detected = proto.Field( + conflict_detected: bool = proto.Field( proto.BOOL, number=5, ) @@ -783,18 +785,18 @@ class ReadConsistency(proto.Enum): STRONG = 1 EVENTUAL = 2 - read_consistency = proto.Field( + read_consistency: ReadConsistency = proto.Field( proto.ENUM, number=1, oneof="consistency_type", enum=ReadConsistency, ) - transaction = proto.Field( + transaction: bytes = proto.Field( proto.BYTES, number=2, oneof="consistency_type", ) - read_time = proto.Field( + read_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=4, oneof="consistency_type", @@ -839,7 +841,7 @@ class ReadWrite(proto.Message): being retried. """ - previous_transaction = proto.Field( + previous_transaction: bytes = proto.Field( proto.BYTES, number=1, ) @@ -853,19 +855,19 @@ class ReadOnly(proto.Message): This may not be older than 60 seconds. """ - read_time = proto.Field( + read_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, ) - read_write = proto.Field( + read_write: ReadWrite = proto.Field( proto.MESSAGE, number=1, oneof="mode", message=ReadWrite, ) - read_only = proto.Field( + read_only: ReadOnly = proto.Field( proto.MESSAGE, number=2, oneof="mode", diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index d9b8febe6d95..adb651a2ce22 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.protobuf import struct_pb2 # type: ignore @@ -65,15 +67,15 @@ class PartitionId(proto.Message): which the entities belong. """ - project_id = proto.Field( + project_id: str = proto.Field( proto.STRING, number=2, ) - database_id = proto.Field( + database_id: str = proto.Field( proto.STRING, number=3, ) - namespace_id = proto.Field( + namespace_id: str = proto.Field( proto.STRING, number=4, ) @@ -92,7 +94,7 @@ class Key(proto.Message): currently identified by a project ID and namespace ID. Queries are scoped to a single partition. - path (Sequence[google.cloud.datastore_v1.types.Key.PathElement]): + path (MutableSequence[google.cloud.datastore_v1.types.Key.PathElement]): The entity path. An entity path consists of one or more elements composed of a kind and a string or numerical identifier, which identify entities. The first element @@ -158,27 +160,27 @@ class PathElement(proto.Message): This field is a member of `oneof`_ ``id_type``. """ - kind = proto.Field( + kind: str = proto.Field( proto.STRING, number=1, ) - id = proto.Field( + id: int = proto.Field( proto.INT64, number=2, oneof="id_type", ) - name = proto.Field( + name: str = proto.Field( proto.STRING, number=3, oneof="id_type", ) - partition_id = proto.Field( + partition_id: "PartitionId" = proto.Field( proto.MESSAGE, number=1, message="PartitionId", ) - path = proto.RepeatedField( + path: MutableSequence[PathElement] = proto.RepeatedField( proto.MESSAGE, number=2, message=PathElement, @@ -189,13 +191,13 @@ class ArrayValue(proto.Message): r"""An array value. Attributes: - values (Sequence[google.cloud.datastore_v1.types.Value]): + values (MutableSequence[google.cloud.datastore_v1.types.Value]): Values in the array. The order of values in an array is preserved as long as all values have identical settings for 'exclude_from_indexes'. """ - values = proto.RepeatedField( + values: MutableSequence["Value"] = proto.RepeatedField( proto.MESSAGE, number=1, message="Value", @@ -279,72 +281,72 @@ class Value(proto.Message): indexes including those defined explicitly. """ - null_value = proto.Field( + null_value: struct_pb2.NullValue = proto.Field( proto.ENUM, number=11, oneof="value_type", enum=struct_pb2.NullValue, ) - boolean_value = proto.Field( + boolean_value: bool = proto.Field( proto.BOOL, number=1, oneof="value_type", ) - integer_value = proto.Field( + integer_value: int = proto.Field( proto.INT64, number=2, oneof="value_type", ) - double_value = proto.Field( + double_value: float = proto.Field( proto.DOUBLE, number=3, oneof="value_type", ) - timestamp_value = proto.Field( + timestamp_value: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=10, oneof="value_type", message=timestamp_pb2.Timestamp, ) - key_value = proto.Field( + key_value: "Key" = proto.Field( proto.MESSAGE, number=5, oneof="value_type", message="Key", ) - string_value = proto.Field( + string_value: str = proto.Field( proto.STRING, number=17, oneof="value_type", ) - blob_value = proto.Field( + blob_value: bytes = proto.Field( proto.BYTES, number=18, oneof="value_type", ) - geo_point_value = proto.Field( + geo_point_value: latlng_pb2.LatLng = proto.Field( proto.MESSAGE, number=8, oneof="value_type", message=latlng_pb2.LatLng, ) - entity_value = proto.Field( + entity_value: "Entity" = proto.Field( proto.MESSAGE, number=6, oneof="value_type", message="Entity", ) - array_value = proto.Field( + array_value: "ArrayValue" = proto.Field( proto.MESSAGE, number=9, oneof="value_type", message="ArrayValue", ) - meaning = proto.Field( + meaning: int = proto.Field( proto.INT32, number=14, ) - exclude_from_indexes = proto.Field( + exclude_from_indexes: bool = proto.Field( proto.BOOL, number=19, ) @@ -365,7 +367,7 @@ class Entity(proto.Message): example, an entity in ``Value.entity_value`` may have no key). An entity's kind is its key path's last element's kind, or null if it has no key. - properties (Mapping[str, google.cloud.datastore_v1.types.Value]): + properties (MutableMapping[str, google.cloud.datastore_v1.types.Value]): The entity's properties. The map's keys are property names. A property name matching regex ``__.*__`` is reserved. A reserved property name is forbidden in certain documented @@ -373,12 +375,12 @@ class Entity(proto.Message): characters. The name cannot be ``""``. """ - key = proto.Field( + key: "Key" = proto.Field( proto.MESSAGE, number=1, message="Key", ) - properties = proto.MapField( + properties: MutableMapping[str, "Value"] = proto.MapField( proto.STRING, proto.MESSAGE, number=3, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index a3f6b75d46a6..f66da3f2c1de 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.cloud.datastore_v1.types import entity as gd_entity @@ -83,21 +85,21 @@ class ResultType(proto.Enum): PROJECTION = 2 KEY_ONLY = 3 - entity = proto.Field( + entity: gd_entity.Entity = proto.Field( proto.MESSAGE, number=1, message=gd_entity.Entity, ) - version = proto.Field( + version: int = proto.Field( proto.INT64, number=4, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp, ) - cursor = proto.Field( + cursor: bytes = proto.Field( proto.BYTES, number=3, ) @@ -107,19 +109,19 @@ class Query(proto.Message): r"""A query for entities. Attributes: - projection (Sequence[google.cloud.datastore_v1.types.Projection]): + projection (MutableSequence[google.cloud.datastore_v1.types.Projection]): The projection to return. Defaults to returning all properties. - kind (Sequence[google.cloud.datastore_v1.types.KindExpression]): + kind (MutableSequence[google.cloud.datastore_v1.types.KindExpression]): The kinds to query (if empty, returns entities of all kinds). Currently at most 1 kind may be specified. filter (google.cloud.datastore_v1.types.Filter): The filter to apply. - order (Sequence[google.cloud.datastore_v1.types.PropertyOrder]): + order (MutableSequence[google.cloud.datastore_v1.types.PropertyOrder]): The order to apply to the query results (if empty, order is unspecified). - distinct_on (Sequence[google.cloud.datastore_v1.types.PropertyReference]): + distinct_on (MutableSequence[google.cloud.datastore_v1.types.PropertyReference]): The properties to make distinct. The query results will contain the first result for each distinct combination of values for the given @@ -145,44 +147,44 @@ class Query(proto.Message): Must be >= 0 if specified. """ - projection = proto.RepeatedField( + projection: MutableSequence["Projection"] = proto.RepeatedField( proto.MESSAGE, number=2, message="Projection", ) - kind = proto.RepeatedField( + kind: MutableSequence["KindExpression"] = proto.RepeatedField( proto.MESSAGE, number=3, message="KindExpression", ) - filter = proto.Field( + filter: "Filter" = proto.Field( proto.MESSAGE, number=4, message="Filter", ) - order = proto.RepeatedField( + order: MutableSequence["PropertyOrder"] = proto.RepeatedField( proto.MESSAGE, number=5, message="PropertyOrder", ) - distinct_on = proto.RepeatedField( + distinct_on: MutableSequence["PropertyReference"] = proto.RepeatedField( proto.MESSAGE, number=6, message="PropertyReference", ) - start_cursor = proto.Field( + start_cursor: bytes = proto.Field( proto.BYTES, number=7, ) - end_cursor = proto.Field( + end_cursor: bytes = proto.Field( proto.BYTES, number=8, ) - offset = proto.Field( + offset: int = proto.Field( proto.INT32, number=10, ) - limit = proto.Field( + limit: wrappers_pb2.Int32Value = proto.Field( proto.MESSAGE, number=12, message=wrappers_pb2.Int32Value, @@ -201,7 +203,7 @@ class AggregationQuery(proto.Message): Nested query for aggregation This field is a member of `oneof`_ ``query_type``. - aggregations (Sequence[google.cloud.datastore_v1.types.AggregationQuery.Aggregation]): + aggregations (MutableSequence[google.cloud.datastore_v1.types.AggregationQuery.Aggregation]): Optional. Series of aggregations to apply over the results of the ``nested_query``. @@ -290,30 +292,30 @@ class Count(proto.Message): - Must be non-negative when present. """ - up_to = proto.Field( + up_to: wrappers_pb2.Int64Value = proto.Field( proto.MESSAGE, number=1, message=wrappers_pb2.Int64Value, ) - count = proto.Field( + count: "AggregationQuery.Aggregation.Count" = proto.Field( proto.MESSAGE, number=1, oneof="operator", message="AggregationQuery.Aggregation.Count", ) - alias = proto.Field( + alias: str = proto.Field( proto.STRING, number=7, ) - nested_query = proto.Field( + nested_query: "Query" = proto.Field( proto.MESSAGE, number=1, oneof="query_type", message="Query", ) - aggregations = proto.RepeatedField( + aggregations: MutableSequence[Aggregation] = proto.RepeatedField( proto.MESSAGE, number=3, message=Aggregation, @@ -328,7 +330,7 @@ class KindExpression(proto.Message): The name of the kind. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -344,7 +346,7 @@ class PropertyReference(proto.Message): a property name path. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=2, ) @@ -358,7 +360,7 @@ class Projection(proto.Message): The property to project. """ - property = proto.Field( + property: "PropertyReference" = proto.Field( proto.MESSAGE, number=1, message="PropertyReference", @@ -381,12 +383,12 @@ class Direction(proto.Enum): ASCENDING = 1 DESCENDING = 2 - property = proto.Field( + property: "PropertyReference" = proto.Field( proto.MESSAGE, number=1, message="PropertyReference", ) - direction = proto.Field( + direction: Direction = proto.Field( proto.ENUM, number=2, enum=Direction, @@ -414,13 +416,13 @@ class Filter(proto.Message): This field is a member of `oneof`_ ``filter_type``. """ - composite_filter = proto.Field( + composite_filter: "CompositeFilter" = proto.Field( proto.MESSAGE, number=1, oneof="filter_type", message="CompositeFilter", ) - property_filter = proto.Field( + property_filter: "PropertyFilter" = proto.Field( proto.MESSAGE, number=2, oneof="filter_type", @@ -435,7 +437,7 @@ class CompositeFilter(proto.Message): Attributes: op (google.cloud.datastore_v1.types.CompositeFilter.Operator): The operator for combining multiple filters. - filters (Sequence[google.cloud.datastore_v1.types.Filter]): + filters (MutableSequence[google.cloud.datastore_v1.types.Filter]): The list of filters to combine. Requires: @@ -448,12 +450,12 @@ class Operator(proto.Enum): OPERATOR_UNSPECIFIED = 0 AND = 1 - op = proto.Field( + op: Operator = proto.Field( proto.ENUM, number=1, enum=Operator, ) - filters = proto.RepeatedField( + filters: MutableSequence["Filter"] = proto.RepeatedField( proto.MESSAGE, number=2, message="Filter", @@ -485,17 +487,17 @@ class Operator(proto.Enum): HAS_ANCESTOR = 11 NOT_IN = 13 - property = proto.Field( + property: "PropertyReference" = proto.Field( proto.MESSAGE, number=1, message="PropertyReference", ) - op = proto.Field( + op: Operator = proto.Field( proto.ENUM, number=2, enum=Operator, ) - value = proto.Field( + value: gd_entity.Value = proto.Field( proto.MESSAGE, number=3, message=gd_entity.Value, @@ -515,14 +517,14 @@ class GqlQuery(proto.Message): and instead must bind all values. For example, ``SELECT * FROM Kind WHERE a = 'string literal'`` is not allowed, while ``SELECT * FROM Kind WHERE a = @value`` is. - named_bindings (Mapping[str, google.cloud.datastore_v1.types.GqlQueryParameter]): + named_bindings (MutableMapping[str, google.cloud.datastore_v1.types.GqlQueryParameter]): For each non-reserved named binding site in the query string, there must be a named parameter with that name, but not necessarily the inverse. Key must match regex ``[A-Za-z_$][A-Za-z_$0-9]*``, must not match regex ``__.*__``, and must not be ``""``. - positional_bindings (Sequence[google.cloud.datastore_v1.types.GqlQueryParameter]): + positional_bindings (MutableSequence[google.cloud.datastore_v1.types.GqlQueryParameter]): Numbered binding site @1 references the first numbered parameter, effectively using 1-based indexing, rather than the usual 0. @@ -532,21 +534,21 @@ class GqlQuery(proto.Message): true. """ - query_string = proto.Field( + query_string: str = proto.Field( proto.STRING, number=1, ) - allow_literals = proto.Field( + allow_literals: bool = proto.Field( proto.BOOL, number=2, ) - named_bindings = proto.MapField( + named_bindings: MutableMapping[str, "GqlQueryParameter"] = proto.MapField( proto.STRING, proto.MESSAGE, number=5, message="GqlQueryParameter", ) - positional_bindings = proto.RepeatedField( + positional_bindings: MutableSequence["GqlQueryParameter"] = proto.RepeatedField( proto.MESSAGE, number=4, message="GqlQueryParameter", @@ -575,13 +577,13 @@ class GqlQueryParameter(proto.Message): This field is a member of `oneof`_ ``parameter_type``. """ - value = proto.Field( + value: gd_entity.Value = proto.Field( proto.MESSAGE, number=2, oneof="parameter_type", message=gd_entity.Value, ) - cursor = proto.Field( + cursor: bytes = proto.Field( proto.BYTES, number=3, oneof="parameter_type", @@ -600,7 +602,7 @@ class QueryResultBatch(proto.Message): result. Will be set when ``skipped_results`` != 0. entity_result_type (google.cloud.datastore_v1.types.EntityResult.ResultType): The result type for every entity in ``entity_results``. - entity_results (Sequence[google.cloud.datastore_v1.types.EntityResult]): + entity_results (MutableSequence[google.cloud.datastore_v1.types.EntityResult]): The results for this batch. end_cursor (bytes): A cursor that points to the position after @@ -641,38 +643,38 @@ class MoreResultsType(proto.Enum): MORE_RESULTS_AFTER_CURSOR = 4 NO_MORE_RESULTS = 3 - skipped_results = proto.Field( + skipped_results: int = proto.Field( proto.INT32, number=6, ) - skipped_cursor = proto.Field( + skipped_cursor: bytes = proto.Field( proto.BYTES, number=3, ) - entity_result_type = proto.Field( + entity_result_type: "EntityResult.ResultType" = proto.Field( proto.ENUM, number=1, enum="EntityResult.ResultType", ) - entity_results = proto.RepeatedField( + entity_results: MutableSequence["EntityResult"] = proto.RepeatedField( proto.MESSAGE, number=2, message="EntityResult", ) - end_cursor = proto.Field( + end_cursor: bytes = proto.Field( proto.BYTES, number=4, ) - more_results = proto.Field( + more_results: MoreResultsType = proto.Field( proto.ENUM, number=5, enum=MoreResultsType, ) - snapshot_version = proto.Field( + snapshot_version: int = proto.Field( proto.INT64, number=7, ) - read_time = proto.Field( + read_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=8, message=timestamp_pb2.Timestamp, diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 47450e9e363a..60fc41725300 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -86,6 +86,7 @@ def get_staging_dirs( "", ) + s.move(library / f"google/cloud/datastore_admin", excludes=["**/gapic_version.py"]) s.move(library / f"google/cloud/datastore_admin_{library.name}") s.move(library / "tests") s.move(library / "scripts") @@ -102,7 +103,7 @@ def get_staging_dirs( ) s.move( templated_files, - excludes=["docs/multiprocessing.rst", ".coveragerc", ".github/CODEOOWNERS"], + excludes=["docs/multiprocessing.rst", ".coveragerc", ".github/CODEOOWNERS", ".github/release-please.yml"], ) python.py_samples(skip_readmes=True) diff --git a/packages/google-cloud-datastore/release-please-config.json b/packages/google-cloud-datastore/release-please-config.json new file mode 100644 index 000000000000..ebf3058fae33 --- /dev/null +++ b/packages/google-cloud-datastore/release-please-config.json @@ -0,0 +1,20 @@ +{ + "$schema": +"https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json", + "packages": { + ".": { + "release-type": "python", + "extra-files": [ + "google/cloud/datastore/gapic_version.py", + "google/cloud/datastore_admin/gapic_version.py" + ] + } + }, + "release-type": "python", + "plugins": [ + { + "type": "sentence-case" + } + ], + "initial-version": "2.10.0" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index b0e45e9edd08..af39cfa434db 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -42,7 +42,7 @@ package_root = os.path.abspath(os.path.dirname(__file__)) version = {} -with open(os.path.join(package_root, "google/cloud/datastore/version.py")) as fp: +with open(os.path.join(package_root, "google/cloud/datastore/gapic_version.py")) as fp: exec(fp.read(), version) version = version["__version__"] diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index 13c26fa9ae11..63f8814f6887 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -53,6 +53,7 @@ from google.cloud.datastore_admin_v1.types import index from google.longrunning import operations_pb2 from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore import google.auth From 2ab6400fbc53eec8df6c4a25a9186e22c15b362c Mon Sep 17 00:00:00 2001 From: Mariatta Wijaya Date: Wed, 30 Nov 2022 08:34:28 -0800 Subject: [PATCH 471/611] feat: Support "limit" in count query. (#384) * Move the limit to aggregation_query.fetch * Add test coverage --- .../google/cloud/datastore/aggregation.py | 9 +++++- .../tests/system/test_aggregation_query.py | 20 ++++++++++++ .../tests/unit/test_aggregation.py | 31 ++++++++++++++++--- 3 files changed, 54 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py b/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py index bb75d94ec09b..24d2abcc6552 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py @@ -174,6 +174,7 @@ def add_aggregations(self, aggregations): def fetch( self, client=None, + limit=None, eventual=False, retry=None, timeout=None, @@ -204,7 +205,7 @@ def fetch( >>> client.put_multi([andy, sally, bobby]) >>> query = client.query(kind='Andy') >>> aggregation_query = client.aggregation_query(query) - >>> result = aggregation_query.count(alias="total").fetch() + >>> result = aggregation_query.count(alias="total").fetch(limit=5) >>> result @@ -248,6 +249,7 @@ def fetch( return AggregationResultIterator( self, client, + limit=limit, eventual=eventual, retry=retry, timeout=timeout, @@ -293,6 +295,7 @@ def __init__( self, aggregation_query, client, + limit=None, eventual=False, retry=None, timeout=None, @@ -308,6 +311,7 @@ def __init__( self._retry = retry self._timeout = timeout self._read_time = read_time + self._limit = limit # The attributes below will change over the life of the iterator. self._more_results = True @@ -322,6 +326,9 @@ def _build_protobuf(self): state of the iterator. """ pb = self._aggregation_query._to_pb() + if self._limit is not None and self._limit > 0: + for aggregation in pb.aggregations: + aggregation.count.up_to = self._limit return pb def _process_query_results(self, response_pb): diff --git a/packages/google-cloud-datastore/tests/system/test_aggregation_query.py b/packages/google-cloud-datastore/tests/system/test_aggregation_query.py index 3e5120da9bdf..b912e96b763e 100644 --- a/packages/google-cloud-datastore/tests/system/test_aggregation_query.py +++ b/packages/google-cloud-datastore/tests/system/test_aggregation_query.py @@ -93,6 +93,26 @@ def test_aggregation_query_with_alias(aggregation_query_client, nested_query): assert r.value > 0 +def test_aggregation_query_with_limit(aggregation_query_client, nested_query): + query = nested_query + + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.count(alias="total") + result = _do_fetch(aggregation_query) # count without limit + assert len(result) == 1 + for r in result[0]: + assert r.alias == "total" + assert r.value == 8 + + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.count(alias="total_up_to") + result = _do_fetch(aggregation_query, limit=2) # count with limit = 2 + assert len(result) == 1 + for r in result[0]: + assert r.alias == "total_up_to" + assert r.value == 2 + + def test_aggregation_query_multiple_aggregations( aggregation_query_client, nested_query ): diff --git a/packages/google-cloud-datastore/tests/unit/test_aggregation.py b/packages/google-cloud-datastore/tests/unit/test_aggregation.py index 8b28a908facc..afa9dc536d62 100644 --- a/packages/google-cloud-datastore/tests/unit/test_aggregation.py +++ b/packages/google-cloud-datastore/tests/unit/test_aggregation.py @@ -127,6 +127,22 @@ def test_query_fetch_w_explicit_client_w_retry_w_timeout(client): assert iterator._timeout == timeout +def test_query_fetch_w_explicit_client_w_limit(client): + from google.cloud.datastore.aggregation import AggregationResultIterator + + other_client = _make_client() + query = _make_query(client) + aggregation_query = _make_aggregation_query(client=client, query=query) + limit = 2 + + iterator = aggregation_query.fetch(client=other_client, limit=limit) + + assert isinstance(iterator, AggregationResultIterator) + assert iterator._aggregation_query is aggregation_query + assert iterator.client is other_client + assert iterator._limit == limit + + def test_iterator_constructor_defaults(): query = object() client = object() @@ -149,12 +165,10 @@ def test_iterator_constructor_explicit(): aggregation_query = AggregationQuery(client=client, query=query) retry = mock.Mock() timeout = 100000 + limit = 2 iterator = _make_aggregation_iterator( - aggregation_query, - client, - retry=retry, - timeout=timeout, + aggregation_query, client, retry=retry, timeout=timeout, limit=limit ) assert not iterator._started @@ -165,6 +179,7 @@ def test_iterator_constructor_explicit(): assert iterator._more_results assert iterator._retry == retry assert iterator._timeout == timeout + assert iterator._limit == limit def test_iterator__build_protobuf_empty(): @@ -186,14 +201,20 @@ def test_iterator__build_protobuf_all_values(): client = _Client(None) query = _make_query(client) + alias = "total" + limit = 2 aggregation_query = AggregationQuery(client=client, query=query) + aggregation_query.count(alias) - iterator = _make_aggregation_iterator(aggregation_query, client) + iterator = _make_aggregation_iterator(aggregation_query, client, limit=limit) iterator.num_results = 4 pb = iterator._build_protobuf() expected_pb = query_pb2.AggregationQuery() expected_pb.nested_query = query_pb2.Query() + expected_count_pb = query_pb2.AggregationQuery.Aggregation(alias=alias) + expected_count_pb.count.up_to = limit + expected_pb.aggregations.append(expected_count_pb) assert pb == expected_pb From 7c85860977ba79779fe33b5b8a3a7b20cde88082 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 30 Nov 2022 16:49:30 -0800 Subject: [PATCH 472/611] chore(main): release 2.11.0 (#388) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-datastore/.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../google/cloud/datastore_admin/gapic_version.py | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index a2cc302edb7b..4de8919570a2 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.10.0" + ".": "2.11.0" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index d4e6bd77eea3..3c74fb0cffd6 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.11.0](https://github.com/googleapis/python-datastore/compare/v2.10.0...v2.11.0) (2022-11-30) + + +### Features + +* Support "limit" in count query. ([#384](https://github.com/googleapis/python-datastore/issues/384)) ([a4b666a](https://github.com/googleapis/python-datastore/commit/a4b666a4a11b04903cf7a48f74e525205d13250e)) + ## [2.10.0](https://github.com/googleapis/python-datastore/compare/v2.9.0...v2.10.0) (2022-11-01) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index 60f4adfc31c5..27ffac0f4e84 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.10.0" # {x-release-please-version} +__version__ = "2.11.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 13e710fccca2..e6e357434ce4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.10.0" +__version__ = "2.11.0" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index 00f0a8d0705e..bb74f811a5c9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.10.0" # {x-release-please-version} +__version__ = "2.11.0" # {x-release-please-version} From 171c7b88949d8c084214ea908d88f0ee1f40a826 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 1 Dec 2022 03:35:12 +0100 Subject: [PATCH 473/611] chore(deps): update dependency google-cloud-datastore to v2.11.0 (#389) --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index bc05d8eea4a4..85f7f8ec2d44 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.10.0 \ No newline at end of file +google-cloud-datastore==2.11.0 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index 3d3dda713fcb..42fae9cb6069 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.10.0 +google-cloud-datastore==2.11.0 From 829502a4cf4a1ecfb735018643bf1107e9952eb7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 7 Dec 2022 13:53:23 -0500 Subject: [PATCH 474/611] fix(deps): Require google-api-core >=1.34.0, >=2.11.0 (#390) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(deps): Require google-api-core >=1.34.0, >=2.11.0 fix: Drop usage of pkg_resources fix: Fix timeout default values docs(samples): Snippetgen should call await on the operation coroutine before calling result PiperOrigin-RevId: 493260409 Source-Link: https://github.com/googleapis/googleapis/commit/fea43879f83a8d0dacc9353b3f75f8f46d37162f Source-Link: https://github.com/googleapis/googleapis-gen/commit/387b7344c7529ee44be84e613b19a820508c612b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzg3YjczNDRjNzUyOWVlNDRiZTg0ZTYxM2IxOWE4MjA1MDhjNjEyYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(deps): require google-api-core>=1.34.0,>=2.11.0 * fix cover * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * revert * set coverage level to 99 * set coverage level to 100 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * add test * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * add test Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/google-cloud-datastore/.coveragerc | 5 --- .../cloud/datastore_admin_v1/__init__.py | 2 +- .../cloud/datastore_admin_v1/gapic_version.py | 15 ++++++++ .../services/datastore_admin/async_client.py | 34 ++++++++--------- .../services/datastore_admin/client.py | 26 ++++++------- .../datastore_admin/transports/base.py | 14 +++---- .../google/cloud/datastore_v1/__init__.py | 2 +- .../cloud/datastore_v1/gapic_version.py | 15 ++++++++ .../services/datastore/async_client.py | 38 +++++++++---------- .../datastore_v1/services/datastore/client.py | 38 +++++++++---------- .../services/datastore/transports/base.py | 14 +++---- packages/google-cloud-datastore/owlbot.py | 4 +- packages/google-cloud-datastore/setup.py | 2 +- .../testing/constraints-3.7.txt | 2 +- .../tests/unit/test__gapic.py | 11 ++++++ 15 files changed, 117 insertions(+), 105 deletions(-) create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py diff --git a/packages/google-cloud-datastore/.coveragerc b/packages/google-cloud-datastore/.coveragerc index cab8fd728a5f..fc99a17ba24a 100644 --- a/packages/google-cloud-datastore/.coveragerc +++ b/packages/google-cloud-datastore/.coveragerc @@ -30,8 +30,3 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py index 4b0552ded8f5..d2b8b534c66b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.datastore_admin import gapic_version as package_version +from google.cloud.datastore_admin_v1 import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py new file mode 100644 index 000000000000..3aec944280b1 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -0,0 +1,15 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +__version__ = "2.11.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index bd96febe7643..ad1ced594952 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -27,7 +27,8 @@ Type, Union, ) -import pkg_resources + +from google.cloud.datastore_admin_v1 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -281,7 +282,7 @@ async def export_entities( entity_filter: Optional[datastore_admin.EntityFilter] = None, output_url_prefix: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Exports a copy of all or a subset of entities from @@ -321,7 +322,7 @@ async def sample_export_entities(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -462,7 +463,7 @@ async def import_entities( input_url: Optional[str] = None, entity_filter: Optional[datastore_admin.EntityFilter] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Imports entities into Google Cloud Datastore. @@ -499,7 +500,7 @@ async def sample_import_entities(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -637,7 +638,7 @@ async def create_index( request: Optional[Union[datastore_admin.CreateIndexRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Creates the specified index. A newly created index's initial @@ -680,7 +681,7 @@ async def sample_create_index(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -747,7 +748,7 @@ async def delete_index( request: Optional[Union[datastore_admin.DeleteIndexRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Deletes an existing index. An index can only be deleted if it is @@ -789,7 +790,7 @@ async def sample_delete_index(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -859,7 +860,7 @@ async def get_index( request: Optional[Union[datastore_admin.GetIndexRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> index.Index: r"""Gets an index. @@ -951,7 +952,7 @@ async def list_indexes( request: Optional[Union[datastore_admin.ListIndexesRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListIndexesAsyncPager: r"""Lists the indexes that match the specified filters. @@ -1060,14 +1061,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-datastore-admin", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("DatastoreAdminAsyncClient",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index 4c00fb94b52e..e1ab3a5909fe 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -28,7 +28,8 @@ Union, cast, ) -import pkg_resources + +from google.cloud.datastore_admin_v1 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -485,7 +486,7 @@ def export_entities( entity_filter: Optional[datastore_admin.EntityFilter] = None, output_url_prefix: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Exports a copy of all or a subset of entities from @@ -665,7 +666,7 @@ def import_entities( input_url: Optional[str] = None, entity_filter: Optional[datastore_admin.EntityFilter] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Imports entities into Google Cloud Datastore. @@ -839,7 +840,7 @@ def create_index( request: Optional[Union[datastore_admin.CreateIndexRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Creates the specified index. A newly created index's initial @@ -950,7 +951,7 @@ def delete_index( request: Optional[Union[datastore_admin.DeleteIndexRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Deletes an existing index. An index can only be deleted if it is @@ -1063,7 +1064,7 @@ def get_index( request: Optional[Union[datastore_admin.GetIndexRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> index.Index: r"""Gets an index. @@ -1146,7 +1147,7 @@ def list_indexes( request: Optional[Union[datastore_admin.ListIndexesRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListIndexesPager: r"""Lists the indexes that match the specified filters. @@ -1253,14 +1254,9 @@ def __exit__(self, type, value, traceback): self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-datastore-admin", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("DatastoreAdminClient",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index 23175bd2e4f2..72391f4ba4d1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.datastore_admin_v1 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -30,14 +31,9 @@ from google.cloud.datastore_admin_v1.types import index from google.longrunning import operations_pb2 # type: ignore -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-datastore-admin", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class DatastoreAdminTransport(abc.ABC): diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py index b868c4063564..05159e6877c8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.datastore import gapic_version as package_version +from google.cloud.datastore_v1 import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py new file mode 100644 index 000000000000..3aec944280b1 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -0,0 +1,15 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +__version__ = "2.11.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index 1986c2b5ef6c..ece12c50d6d3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -27,7 +27,8 @@ Type, Union, ) -import pkg_resources + +from google.cloud.datastore_v1 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -223,7 +224,7 @@ async def lookup( read_options: Optional[datastore.ReadOptions] = None, keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.LookupResponse: r"""Looks up entities by key. @@ -352,7 +353,7 @@ async def run_query( request: Optional[Union[datastore.RunQueryRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.RunQueryResponse: r"""Queries for entities. @@ -444,7 +445,7 @@ async def run_aggregation_query( request: Optional[Union[datastore.RunAggregationQueryRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.RunAggregationQueryResponse: r"""Runs an aggregation query. @@ -537,7 +538,7 @@ async def begin_transaction( *, project_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.BeginTransactionResponse: r"""Begins a new transaction. @@ -644,7 +645,7 @@ async def commit( transaction: Optional[bytes] = None, mutations: Optional[MutableSequence[datastore.Mutation]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.CommitResponse: r"""Commits a transaction, optionally creating, deleting @@ -792,7 +793,7 @@ async def rollback( project_id: Optional[str] = None, transaction: Optional[bytes] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.RollbackResponse: r"""Rolls back a transaction. @@ -908,7 +909,7 @@ async def allocate_ids( project_id: Optional[str] = None, keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.AllocateIdsResponse: r"""Allocates IDs for the given keys, which is useful for @@ -1025,7 +1026,7 @@ async def reserve_ids( project_id: Optional[str] = None, keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.ReserveIdsResponse: r"""Prevents the supplied keys' IDs from being @@ -1149,7 +1150,7 @@ async def list_operations( request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1203,7 +1204,7 @@ async def get_operation( request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1257,7 +1258,7 @@ async def delete_operation( request: Optional[operations_pb2.DeleteOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1312,7 +1313,7 @@ async def cancel_operation( request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1368,14 +1369,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-datastore", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("DatastoreAsyncClient",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index 5d7446208f89..8693049c47d3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -28,7 +28,8 @@ Union, cast, ) -import pkg_resources + +from google.cloud.datastore_v1 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -434,7 +435,7 @@ def lookup( read_options: Optional[datastore.ReadOptions] = None, keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.LookupResponse: r"""Looks up entities by key. @@ -553,7 +554,7 @@ def run_query( request: Optional[Union[datastore.RunQueryRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.RunQueryResponse: r"""Queries for entities. @@ -636,7 +637,7 @@ def run_aggregation_query( request: Optional[Union[datastore.RunAggregationQueryRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.RunAggregationQueryResponse: r"""Runs an aggregation query. @@ -720,7 +721,7 @@ def begin_transaction( *, project_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.BeginTransactionResponse: r"""Begins a new transaction. @@ -827,7 +828,7 @@ def commit( transaction: Optional[bytes] = None, mutations: Optional[MutableSequence[datastore.Mutation]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.CommitResponse: r"""Commits a transaction, optionally creating, deleting @@ -975,7 +976,7 @@ def rollback( project_id: Optional[str] = None, transaction: Optional[bytes] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.RollbackResponse: r"""Rolls back a transaction. @@ -1091,7 +1092,7 @@ def allocate_ids( project_id: Optional[str] = None, keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.AllocateIdsResponse: r"""Allocates IDs for the given keys, which is useful for @@ -1208,7 +1209,7 @@ def reserve_ids( project_id: Optional[str] = None, keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.ReserveIdsResponse: r"""Prevents the supplied keys' IDs from being @@ -1335,7 +1336,7 @@ def list_operations( request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1389,7 +1390,7 @@ def get_operation( request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1443,7 +1444,7 @@ def delete_operation( request: Optional[operations_pb2.DeleteOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1498,7 +1499,7 @@ def cancel_operation( request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1548,14 +1549,9 @@ def cancel_operation( ) -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-datastore", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("DatastoreClient",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py index c9b14ea8e551..1e7a3c9416bb 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.datastore_v1 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -28,14 +29,9 @@ from google.cloud.datastore_v1.types import datastore from google.longrunning import operations_pb2 -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-datastore", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class DatastoreTransport(abc.ABC): diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 60fc41725300..d50402316b9a 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -68,7 +68,7 @@ def get_staging_dirs( datastore_admin_default_version = "v1" for library in get_staging_dirs(datastore_default_version, "datastore"): - s.move(library / f"google/cloud/datastore_{library.name}") + s.move(library / f"google/cloud/datastore_{library.name}", excludes=["**/gapic_version.py"]) s.move(library / "tests/") s.move(library / "scripts") @@ -87,7 +87,7 @@ def get_staging_dirs( ) s.move(library / f"google/cloud/datastore_admin", excludes=["**/gapic_version.py"]) - s.move(library / f"google/cloud/datastore_admin_{library.name}") + s.move(library / f"google/cloud/datastore_admin_{library.name}", excludes=["**/gapic_version.py"]) s.move(library / "tests") s.move(library / "scripts") diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index af39cfa434db..a64557cb6f80 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -29,7 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "google-cloud-core >= 1.4.0, <3.0.0dev", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/google-cloud-datastore/testing/constraints-3.7.txt b/packages/google-cloud-datastore/testing/constraints-3.7.txt index 5ee7f9eed70a..4b49551c56a0 100644 --- a/packages/google-cloud-datastore/testing/constraints-3.7.txt +++ b/packages/google-cloud-datastore/testing/constraints-3.7.txt @@ -5,7 +5,7 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.32.0 +google-api-core==1.34.0 google-cloud-core==1.4.0 proto-plus==1.22.0 libcst==0.2.5 diff --git a/packages/google-cloud-datastore/tests/unit/test__gapic.py b/packages/google-cloud-datastore/tests/unit/test__gapic.py index b72a68b56d39..b515aba8786e 100644 --- a/packages/google-cloud-datastore/tests/unit/test__gapic.py +++ b/packages/google-cloud-datastore/tests/unit/test__gapic.py @@ -92,3 +92,14 @@ def test_emulator(make_chan, mock_transport, mock_klass): mock_klass.assert_called_once_with( transport=mock.sentinel.transport, client_info=mock.sentinel.client_info ) + + +def test_version_from_gapic_version_matches_datastore_version(): + from google.cloud.datastore import gapic_version + from google.cloud.datastore_v1 import gapic_version as gapic_version_v1 + from google.cloud.datastore_admin import gapic_version as gapic_version_admin + from google.cloud.datastore_admin_v1 import gapic_version as gapic_version_admin_v1 + + assert gapic_version.__version__ == gapic_version_admin.__version__ + assert gapic_version.__version__ == gapic_version_v1.__version__ + assert gapic_version.__version__ == gapic_version_admin_v1.__version__ From 7373d49465cb1e47b7b1e2c885ca9bac483b27b6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 8 Dec 2022 14:34:31 -0500 Subject: [PATCH 475/611] build(deps): bump certifi from 2022.9.24 to 2022.12.7 in /synthtool/gcp/templates/python_library/.kokoro (#393) Source-Link: https://github.com/googleapis/synthtool/commit/b4fe62efb5114b6738ad4b13d6f654f2bf4b7cc0 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/.kokoro/requirements.txt | 6 +++--- packages/google-cloud-datastore/.pre-commit-config.yaml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index bb21147e4c23..fccaa8e84449 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3abfa0f1886adaf0b83f07cb117b24a639ea1cb9cffe56d43280b977033563eb + digest: sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index 9c1b9be34e6b..05dc4672edaa 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.9.24 \ - --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ - --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 +certifi==2022.12.7 \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ diff --git a/packages/google-cloud-datastore/.pre-commit-config.yaml b/packages/google-cloud-datastore/.pre-commit-config.yaml index 46d237160f6d..5405cc8ff1f3 100644 --- a/packages/google-cloud-datastore/.pre-commit-config.yaml +++ b/packages/google-cloud-datastore/.pre-commit-config.yaml @@ -25,7 +25,7 @@ repos: rev: 22.3.0 hooks: - id: black -- repo: https://gitlab.com/pycqa/flake8 +- repo: https://github.com/pycqa/flake8 rev: 3.9.2 hooks: - id: flake8 From b7302287aae7165e408839612c56dea37452e51a Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 9 Dec 2022 13:55:24 -0500 Subject: [PATCH 476/611] chore: update release-please-config.json (#392) Co-authored-by: Mariatta Wijaya --- packages/google-cloud-datastore/release-please-config.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/release-please-config.json b/packages/google-cloud-datastore/release-please-config.json index ebf3058fae33..24afe2b1772a 100644 --- a/packages/google-cloud-datastore/release-please-config.json +++ b/packages/google-cloud-datastore/release-please-config.json @@ -6,7 +6,9 @@ "release-type": "python", "extra-files": [ "google/cloud/datastore/gapic_version.py", - "google/cloud/datastore_admin/gapic_version.py" + "google/cloud/datastore_admin/gapic_version.py", + "google/cloud/datastore_admin_v1/gapic_version.py", + "google/cloud/datastore_v1/gapic_version.py" ] } }, From 1220fc47c38bb39c8aadcd3bec1d43435cf00441 Mon Sep 17 00:00:00 2001 From: Mariatta Wijaya Date: Fri, 9 Dec 2022 11:49:58 -0800 Subject: [PATCH 477/611] samples: Add snippets and samples for Count query (#383) * Add samples for Count query * Remove unused variable. * Add count query samples with limit * Fix the stale read test. * Raise ValueError instead of general Exception --- .../samples/snippets/snippets.py | 164 +++++++++++++++++- .../samples/snippets/snippets_test.py | 50 +++++- 2 files changed, 210 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/snippets.py b/packages/google-cloud-datastore/samples/snippets/snippets.py index b37020c066b7..78e72bca66d3 100644 --- a/packages/google-cloud-datastore/samples/snippets/snippets.py +++ b/packages/google-cloud-datastore/samples/snippets/snippets.py @@ -14,6 +14,7 @@ import argparse from datetime import datetime, timedelta, timezone from pprint import pprint +import time from google.cloud import datastore # noqa: I100 @@ -63,7 +64,7 @@ def query_with_readtime(client): read_time = datetime.now(timezone.utc) - timedelta(seconds=15) # Fetch an entity with read_time - task_key = client.key('Task', 'sampletask') + task_key = client.key("Task", "sampletask") entity = client.get(task_key, read_time=read_time) # Query Task entities with read_time @@ -77,11 +78,170 @@ def query_with_readtime(client): return results +def count_query_in_transaction(client): + # [START datastore_count_in_transaction] + task1 = datastore.Entity(client.key("Task", "task1")) + task2 = datastore.Entity(client.key("Task", "task2")) + + task1["owner"] = "john" + task2["owner"] = "john" + + tasks = [task1, task2] + client.put_multi(tasks) + + with client.transaction() as transaction: + + tasks_of_john = client.query(kind="Task") + tasks_of_john.add_filter("owner", "=", "john") + total_tasks_query = client.aggregation_query(tasks_of_john) + + query_result = total_tasks_query.count(alias="tasks_count").fetch() + for task_result in query_result: + tasks_count = task_result[0] + if tasks_count.value < 2: + task3 = datastore.Entity(client.key("Task", "task3")) + task3["owner"] = "john" + transaction.put(task3) + tasks.append(task3) + else: + print(f"Found existing {tasks_count.value} tasks, rolling back") + client.entities_to_delete.extend(tasks) + raise ValueError("User 'John' cannot have more than 2 tasks") + # [END datastore_count_in_transaction] + + +def count_query_on_kind(client): + # [START datastore_count_on_kind] + task1 = datastore.Entity(client.key("Task", "task1")) + task2 = datastore.Entity(client.key("Task", "task2")) + + tasks = [task1, task2] + client.put_multi(tasks) + all_tasks_query = client.query(kind="Task") + all_tasks_count_query = client.aggregation_query(all_tasks_query).count() + query_result = all_tasks_count_query.fetch() + for aggregation_results in query_result: + for aggregation in aggregation_results: + print(f"Total tasks (accessible from default alias) is {aggregation.value}") + # [END datastore_count_on_kind] + return tasks + + +def count_query_with_limit(client): + # [START datastore_count_with_limit] + task1 = datastore.Entity(client.key("Task", "task1")) + task2 = datastore.Entity(client.key("Task", "task2")) + task3 = datastore.Entity(client.key("Task", "task3")) + + tasks = [task1, task2, task3] + client.put_multi(tasks) + all_tasks_query = client.query(kind="Task") + all_tasks_count_query = client.aggregation_query(all_tasks_query).count() + query_result = all_tasks_count_query.fetch(limit=2) + for aggregation_results in query_result: + for aggregation in aggregation_results: + print(f"We have at least {aggregation.value} tasks") + # [END datastore_count_with_limit] + return tasks + + +def count_query_property_filter(client): + # [START datastore_count_with_property_filter] + task1 = datastore.Entity(client.key("Task", "task1")) + task2 = datastore.Entity(client.key("Task", "task2")) + task3 = datastore.Entity(client.key("Task", "task3")) + + task1["done"] = True + task2["done"] = False + task3["done"] = True + + tasks = [task1, task2, task3] + client.put_multi(tasks) + completed_tasks = client.query(kind="Task").add_filter("done", "=", True) + remaining_tasks = client.query(kind="Task").add_filter("done", "=", False) + + completed_tasks_query = client.aggregation_query(query=completed_tasks).count( + alias="total_completed_count" + ) + remaining_tasks_query = client.aggregation_query(query=remaining_tasks).count( + alias="total_remaining_count" + ) + + completed_query_result = completed_tasks_query.fetch() + for aggregation_results in completed_query_result: + for aggregation_result in aggregation_results: + if aggregation_result.alias == "total_completed_count": + print(f"Total completed tasks count is {aggregation_result.value}") + + remaining_query_result = remaining_tasks_query.fetch() + for aggregation_results in remaining_query_result: + for aggregation_result in aggregation_results: + if aggregation_result.alias == "total_remaining_count": + print(f"Total remaining tasks count is {aggregation_result.value}") + # [END datastore_count_with_property_filter] + return tasks + + +def count_query_with_stale_read(client): + + tasks = [task for task in client.query(kind="Task").fetch()] + client.delete_multi(tasks) # ensure the database is empty before starting + + # [START datastore_count_query_with_stale_read] + task1 = datastore.Entity(client.key("Task", "task1")) + task2 = datastore.Entity(client.key("Task", "task2")) + + # Saving two tasks + task1["done"] = True + task2["done"] = False + client.put_multi([task1, task2]) + time.sleep(10) + + past_timestamp = datetime.now( + timezone.utc + ) # we have two tasks in database at this time. + time.sleep(10) + + # Saving third task + task3 = datastore.Entity(client.key("Task", "task3")) + task3["done"] = False + client.put(task3) + + all_tasks = client.query(kind="Task") + all_tasks_count = client.aggregation_query( + query=all_tasks, + ).count(alias="all_tasks_count") + + # Executing aggregation query + query_result = all_tasks_count.fetch() + for aggregation_results in query_result: + for aggregation_result in aggregation_results: + print(f"Latest tasks count is {aggregation_result.value}") + + # Executing aggregation query with past timestamp + tasks_in_past = client.aggregation_query(query=all_tasks).count( + alias="tasks_in_past" + ) + tasks_in_the_past_query_result = tasks_in_past.fetch(read_time=past_timestamp) + for aggregation_results in tasks_in_the_past_query_result: + for aggregation_result in aggregation_results: + print(f"Stale tasks count is {aggregation_result.value}") + # [END datastore_count_query_with_stale_read] + return [task1, task2, task3] + + def main(project_id): client = datastore.Client(project_id) for name, function in globals().items(): - if name in ("main", "_preamble", "defaultdict", "datetime", "timezone", "timedelta") or not callable(function): + if name in ( + "main", + "_preamble", + "defaultdict", + "datetime", + "timezone", + "timedelta", + ) or not callable(function): continue print(name) diff --git a/packages/google-cloud-datastore/samples/snippets/snippets_test.py b/packages/google-cloud-datastore/samples/snippets/snippets_test.py index 58e75a599b8b..18bc701ec288 100644 --- a/packages/google-cloud-datastore/samples/snippets/snippets_test.py +++ b/packages/google-cloud-datastore/samples/snippets/snippets_test.py @@ -15,8 +15,6 @@ import backoff from google.cloud import datastore - - import pytest import snippets @@ -72,3 +70,51 @@ def test_query_with_readtime(self, client): tasks = snippets.query_with_readtime(client) client.entities_to_delete.extend(tasks) assert tasks is not None + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_count_query_in_transaction(self, client): + with pytest.raises(ValueError) as excinfo: + snippets.count_query_in_transaction(client) + assert "User 'John' cannot have more than 2 tasks" in str(excinfo.value) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_count_query_on_kind(self, capsys, client): + tasks = snippets.count_query_on_kind(client) + captured = capsys.readouterr() + assert ( + captured.out.strip() == "Total tasks (accessible from default alias) is 2" + ) + assert captured.err == "" + + client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_count_query_with_limit(self, capsys, client): + tasks = snippets.count_query_with_limit(client) + captured = capsys.readouterr() + assert captured.out.strip() == "We have at least 2 tasks" + assert captured.err == "" + + client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_count_query_property_filter(self, capsys, client): + tasks = snippets.count_query_property_filter(client) + captured = capsys.readouterr() + + assert "Total completed tasks count is 2" in captured.out + assert "Total remaining tasks count is 1" in captured.out + assert captured.err == "" + + client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_count_query_with_stale_read(self, capsys, client): + tasks = snippets.count_query_with_stale_read(client) + captured = capsys.readouterr() + + assert "Latest tasks count is 3" in captured.out + assert "Stale tasks count is 2" in captured.out + assert captured.err == "" + + client.entities_to_delete.extend(tasks) From e8cddfebf093b1d64dd1f88855d7f19cf2c10151 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 4 Jan 2023 13:22:42 -0800 Subject: [PATCH 478/611] chore(main): release 2.11.1 (#391) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- .../.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 14 ++++++++++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../google/cloud/datastore_admin/gapic_version.py | 2 +- .../cloud/datastore_admin_v1/gapic_version.py | 2 +- .../google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 20 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index 4de8919570a2..7c0e7e00ec87 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.11.0" + ".": "2.11.1" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 3c74fb0cffd6..38f0e7715c85 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,20 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.11.1](https://github.com/googleapis/python-datastore/compare/v2.11.0...v2.11.1) (2022-12-09) + + +### Bug Fixes + +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([6f6bb63](https://github.com/googleapis/python-datastore/commit/6f6bb63219ac4369d16d39d5ec7b22bffe59c48f)) +* Drop usage of pkg_resources ([6f6bb63](https://github.com/googleapis/python-datastore/commit/6f6bb63219ac4369d16d39d5ec7b22bffe59c48f)) +* Fix timeout default values ([6f6bb63](https://github.com/googleapis/python-datastore/commit/6f6bb63219ac4369d16d39d5ec7b22bffe59c48f)) + + +### Documentation + +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([6f6bb63](https://github.com/googleapis/python-datastore/commit/6f6bb63219ac4369d16d39d5ec7b22bffe59c48f)) + ## [2.11.0](https://github.com/googleapis/python-datastore/compare/v2.10.0...v2.11.0) (2022-11-30) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index 27ffac0f4e84..dda7a86e4042 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.11.0" # {x-release-please-version} +__version__ = "2.11.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index e6e357434ce4..4ddd9c795e39 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.11.0" +__version__ = "2.11.1" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index bb74f811a5c9..9ac1d4a82044 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.11.0" # {x-release-please-version} +__version__ = "2.11.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index 3aec944280b1..8771a58903ad 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.11.0" # {x-release-please-version} +__version__ = "2.11.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index 3aec944280b1..8771a58903ad 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.11.0" # {x-release-please-version} +__version__ = "2.11.1" # {x-release-please-version} From bd9eaccf08a86ad2fcb8b83f59586022a09fe8e3 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 4 Jan 2023 23:56:38 +0100 Subject: [PATCH 479/611] chore(deps): update dependency google-cloud-datastore to v2.11.1 (#394) --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index 85f7f8ec2d44..51f08b253bc7 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.11.0 \ No newline at end of file +google-cloud-datastore==2.11.1 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index 42fae9cb6069..756096cfb351 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.11.0 +google-cloud-datastore==2.11.1 From e9729c42f95ff395ad4a899f3a741a5041933c5d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 6 Jan 2023 12:21:39 -0500 Subject: [PATCH 480/611] chore(python): add support for python 3.11 (#395) Source-Link: https://github.com/googleapis/synthtool/commit/7197a001ffb6d8ce7b0b9b11c280f0c536c1033a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.github/workflows/unittest.yml | 2 +- .../.kokoro/samples/python3.11/common.cfg | 40 +++++++++++++++++++ .../.kokoro/samples/python3.11/continuous.cfg | 6 +++ .../samples/python3.11/periodic-head.cfg | 11 +++++ .../.kokoro/samples/python3.11/periodic.cfg | 6 +++ .../.kokoro/samples/python3.11/presubmit.cfg | 6 +++ .../google-cloud-datastore/CONTRIBUTING.rst | 6 ++- packages/google-cloud-datastore/noxfile.py | 2 +- .../samples/snippets/noxfile.py | 2 +- .../snippets/schedule-export/noxfile.py | 2 +- 11 files changed, 79 insertions(+), 8 deletions(-) create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.11/common.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.11/continuous.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.11/periodic-head.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.11/periodic.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.11/presubmit.cfg diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index fccaa8e84449..889f77dfa25d 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 + digest: sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 diff --git a/packages/google-cloud-datastore/.github/workflows/unittest.yml b/packages/google-cloud-datastore/.github/workflows/unittest.yml index 23000c05d9d8..8057a7691b12 100644 --- a/packages/google-cloud-datastore/.github/workflows/unittest.yml +++ b/packages/google-cloud-datastore/.github/workflows/unittest.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10'] + python: ['3.7', '3.8', '3.9', '3.10', '3.11'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.11/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.11/common.cfg new file mode 100644 index 000000000000..e4b302a64a47 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.11/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.11" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-311" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-datastore/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.11/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.11/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.11/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.11/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.11/periodic-head.cfg new file mode 100644 index 000000000000..714045a75ed7 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.11/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.11/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.11/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.11/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.11/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.11/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.11/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/CONTRIBUTING.rst b/packages/google-cloud-datastore/CONTRIBUTING.rst index 5018565cf6d0..bcd67e5af249 100644 --- a/packages/google-cloud-datastore/CONTRIBUTING.rst +++ b/packages/google-cloud-datastore/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.10 -- -k + $ nox -s unit-3.11 -- -k .. note:: @@ -250,11 +250,13 @@ We support: - `Python 3.8`_ - `Python 3.9`_ - `Python 3.10`_ +- `Python 3.11`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ .. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 9f36cbeb61a9..94e34d85e639 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -31,7 +31,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", diff --git a/packages/google-cloud-datastore/samples/snippets/noxfile.py b/packages/google-cloud-datastore/samples/snippets/noxfile.py index f5c32b22789b..7c8a63994cbd 100644 --- a/packages/google-cloud-datastore/samples/snippets/noxfile.py +++ b/packages/google-cloud-datastore/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py b/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py index f5c32b22789b..7c8a63994cbd 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] From dfefbf731bad4e5e5c0497f29ab3ed6c64b5d155 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 9 Jan 2023 23:31:43 -0500 Subject: [PATCH 481/611] feat: Add support for python 3.11 (#396) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add support for python 3.11 chore: Update gapic-generator-python to v1.8.0 PiperOrigin-RevId: 500768693 Source-Link: https://github.com/googleapis/googleapis/commit/190b612e3d0ff8f025875a669e5d68a1446d43c1 Source-Link: https://github.com/googleapis/googleapis-gen/commit/7bf29a414b9ecac3170f0b65bdc2a95705c0ef1a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2JmMjlhNDE0YjllY2FjMzE3MGYwYjY1YmRjMmE5NTcwNWMwZWYxYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * require proto-plus 1.22.2 for python 3.11 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google/cloud/datastore_admin_v1/__init__.py | 2 +- .../datastore_admin_v1/services/datastore_admin/async_client.py | 2 +- .../cloud/datastore_admin_v1/services/datastore_admin/client.py | 2 +- .../google/cloud/datastore_v1/__init__.py | 2 +- .../cloud/datastore_v1/services/datastore/async_client.py | 2 +- .../google/cloud/datastore_v1/services/datastore/client.py | 2 +- packages/google-cloud-datastore/setup.py | 2 ++ 7 files changed, 8 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py index d2b8b534c66b..4b0552ded8f5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.datastore_admin_v1 import gapic_version as package_version +from google.cloud.datastore_admin import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index ad1ced594952..72617b7408ca 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -193,7 +193,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index e1ab3a5909fe..11b158cc2418 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -326,7 +326,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py index 05159e6877c8..b868c4063564 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.datastore_v1 import gapic_version as package_version +from google.cloud.datastore import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index ece12c50d6d3..6051b4dc7aff 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -136,7 +136,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index 8693049c47d3..a0b39a73a76e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -276,7 +276,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index a64557cb6f80..5447922df83c 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -32,6 +32,7 @@ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "google-cloud-core >= 1.4.0, <3.0.0dev", "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = {"libcst": "libcst >= 0.2.5"} @@ -83,6 +84,7 @@ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Operating System :: OS Independent", "Topic :: Internet", "Topic :: Software Development :: Libraries :: Python Modules", From 4226ac4cf41357856a0dd50330e77e30dabcfc63 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 10 Jan 2023 12:18:46 -0500 Subject: [PATCH 482/611] chore(main): release 2.12.0 (#397) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-datastore/.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../google/cloud/datastore_admin/gapic_version.py | 2 +- .../google/cloud/datastore_admin_v1/gapic_version.py | 2 +- .../google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 13 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index 7c0e7e00ec87..997329e9fcf5 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.11.1" + ".": "2.12.0" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 38f0e7715c85..8ba6d02efc99 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.12.0](https://github.com/googleapis/python-datastore/compare/v2.11.1...v2.12.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#396](https://github.com/googleapis/python-datastore/issues/396)) ([19aca56](https://github.com/googleapis/python-datastore/commit/19aca5608af28d0623fa1d43616b355b019fd18e)) + ## [2.11.1](https://github.com/googleapis/python-datastore/compare/v2.11.0...v2.11.1) (2022-12-09) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index dda7a86e4042..ea068f5dbe12 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.11.1" # {x-release-please-version} +__version__ = "2.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 4ddd9c795e39..67e043bde2a0 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.11.1" +__version__ = "2.12.0" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index 9ac1d4a82044..16ae0e953c12 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.11.1" # {x-release-please-version} +__version__ = "2.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index 8771a58903ad..ebfc0a28a23f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.11.1" # {x-release-please-version} +__version__ = "2.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index 8771a58903ad..ebfc0a28a23f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.11.1" # {x-release-please-version} +__version__ = "2.12.0" # {x-release-please-version} From 8dc5be0a45ec6409441419f89f92b744ba48b81c Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 10 Jan 2023 18:42:49 +0000 Subject: [PATCH 483/611] chore(deps): update dependency google-cloud-datastore to v2.12.0 (#399) --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index 51f08b253bc7..b60eaea51ae0 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.11.1 \ No newline at end of file +google-cloud-datastore==2.12.0 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index 756096cfb351..a35fb2f733b1 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.11.1 +google-cloud-datastore==2.12.0 From 56971142f188aee68482039e03f813da536a8e52 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 12 Jan 2023 15:31:28 -0500 Subject: [PATCH 484/611] feat: add dynamic routing header annotation to DatastoreV1 (#400) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add dynamic routing header annotation to DatastoreV1 PiperOrigin-RevId: 501591105 Source-Link: https://github.com/googleapis/googleapis/commit/c4ebfff4f512a9cb0fc09eb6d82a2987a3524ae0 Source-Link: https://github.com/googleapis/googleapis-gen/commit/e2d6011df35c3b7b54ff39098be779c105408b1b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTJkNjAxMWRmMzVjM2I3YjU0ZmYzOTA5OGJlNzc5YzEwNTQwOGIxYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../services/datastore/async_client.py | 13 +- .../datastore_v1/services/datastore/client.py | 165 +++++---- .../unit/gapic/datastore_v1/test_datastore.py | 320 +++++------------- 3 files changed, 190 insertions(+), 308 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index 6051b4dc7aff..84c963d05adf 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -397,7 +397,7 @@ async def sample_run_query(): Returns: google.cloud.datastore_v1.types.RunQueryResponse: The response for - [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. + [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. """ # Create or coerce a protobuf request object. @@ -489,7 +489,7 @@ async def sample_run_aggregation_query(): Returns: google.cloud.datastore_v1.types.RunAggregationQueryResponse: The response for - [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. + [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. """ # Create or coerce a protobuf request object. @@ -589,7 +589,7 @@ async def sample_begin_transaction(): Returns: google.cloud.datastore_v1.types.BeginTransactionResponse: The response for - [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. """ # Create or coerce a protobuf request object. @@ -852,7 +852,8 @@ async def sample_rollback(): Returns: google.cloud.datastore_v1.types.RollbackResponse: - The response for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. + The response for + [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. (an empty message). """ @@ -970,7 +971,7 @@ async def sample_allocate_ids(): Returns: google.cloud.datastore_v1.types.AllocateIdsResponse: The response for - [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. + [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. """ # Create or coerce a protobuf request object. @@ -1086,7 +1087,7 @@ async def sample_reserve_ids(): Returns: google.cloud.datastore_v1.types.ReserveIdsResponse: The response for - [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. + [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index a0b39a73a76e..679a750d4ddd 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -530,13 +530,18 @@ def sample_lookup(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.lookup] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), - ) + header_params = {} + + if request.project_id: + header_params["project_id"] = request.project_id + + if request.database_id: + header_params["database_id"] = request.database_id + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -598,7 +603,7 @@ def sample_run_query(): Returns: google.cloud.datastore_v1.types.RunQueryResponse: The response for - [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. + [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. """ # Create or coerce a protobuf request object. @@ -613,13 +618,18 @@ def sample_run_query(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.run_query] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), - ) + header_params = {} + + if request.project_id: + header_params["project_id"] = request.project_id + + if request.database_id: + header_params["database_id"] = request.database_id + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -681,7 +691,7 @@ def sample_run_aggregation_query(): Returns: google.cloud.datastore_v1.types.RunAggregationQueryResponse: The response for - [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. + [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. """ # Create or coerce a protobuf request object. @@ -696,13 +706,18 @@ def sample_run_aggregation_query(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.run_aggregation_query] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), - ) + header_params = {} + + if request.project_id: + header_params["project_id"] = request.project_id + + if request.database_id: + header_params["database_id"] = request.database_id + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -772,7 +787,7 @@ def sample_begin_transaction(): Returns: google.cloud.datastore_v1.types.BeginTransactionResponse: The response for - [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. """ # Create or coerce a protobuf request object. @@ -800,13 +815,18 @@ def sample_begin_transaction(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.begin_transaction] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), - ) + header_params = {} + + if request.project_id: + header_params["project_id"] = request.project_id + + if request.database_id: + header_params["database_id"] = request.database_id + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -950,13 +970,18 @@ def sample_commit(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.commit] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), - ) + header_params = {} + + if request.project_id: + header_params["project_id"] = request.project_id + + if request.database_id: + header_params["database_id"] = request.database_id + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1035,7 +1060,8 @@ def sample_rollback(): Returns: google.cloud.datastore_v1.types.RollbackResponse: - The response for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. + The response for + [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. (an empty message). """ @@ -1066,13 +1092,18 @@ def sample_rollback(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.rollback] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), - ) + header_params = {} + + if request.project_id: + header_params["project_id"] = request.project_id + + if request.database_id: + header_params["database_id"] = request.database_id + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1153,7 +1184,7 @@ def sample_allocate_ids(): Returns: google.cloud.datastore_v1.types.AllocateIdsResponse: The response for - [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. + [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. """ # Create or coerce a protobuf request object. @@ -1183,13 +1214,18 @@ def sample_allocate_ids(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.allocate_ids] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), - ) + header_params = {} + + if request.project_id: + header_params["project_id"] = request.project_id + + if request.database_id: + header_params["database_id"] = request.database_id + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1269,7 +1305,7 @@ def sample_reserve_ids(): Returns: google.cloud.datastore_v1.types.ReserveIdsResponse: The response for - [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. + [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. """ # Create or coerce a protobuf request object. @@ -1299,13 +1335,18 @@ def sample_reserve_ids(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.reserve_ids] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), - ) + header_params = {} + + if request.project_id: + header_params["project_id"] = request.project_id + + if request.database_id: + header_params["database_id"] = request.database_id + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 7448690a5338..1b13defcc417 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -746,16 +746,14 @@ async def test_lookup_async_from_dict(): await test_lookup_async(request_type=dict) -def test_lookup_field_headers(): +def test_lookup_routing_parameters(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = datastore.LookupRequest() - - request.project_id = "project_id_value" + request = datastore.LookupRequest(**{"project_id": "sample1"}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup), "__call__") as call: @@ -767,44 +765,26 @@ def test_lookup_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_lookup_field_headers_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - + # This test doesn't assert anything useful. + assert kw["metadata"] # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = datastore.LookupRequest() - - request.project_id = "project_id_value" + request = datastore.LookupRequest(**{"database_id": "sample1"}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.LookupResponse() - ) - await client.lookup(request) + call.return_value = datastore.LookupResponse() + client.lookup(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_lookup_flattened(): @@ -1024,16 +1004,14 @@ async def test_run_query_async_from_dict(): await test_run_query_async(request_type=dict) -def test_run_query_field_headers(): +def test_run_query_routing_parameters(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = datastore.RunQueryRequest() - - request.project_id = "project_id_value" + request = datastore.RunQueryRequest(**{"project_id": "sample1"}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.run_query), "__call__") as call: @@ -1045,44 +1023,26 @@ def test_run_query_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_run_query_field_headers_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - + # This test doesn't assert anything useful. + assert kw["metadata"] # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = datastore.RunQueryRequest() - - request.project_id = "project_id_value" + request = datastore.RunQueryRequest(**{"database_id": "sample1"}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.run_query), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.RunQueryResponse() - ) - await client.run_query(request) + call.return_value = datastore.RunQueryResponse() + client.run_query(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] @pytest.mark.parametrize( @@ -1174,16 +1134,14 @@ async def test_run_aggregation_query_async_from_dict(): await test_run_aggregation_query_async(request_type=dict) -def test_run_aggregation_query_field_headers(): +def test_run_aggregation_query_routing_parameters(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = datastore.RunAggregationQueryRequest() - - request.project_id = "project_id_value" + request = datastore.RunAggregationQueryRequest(**{"project_id": "sample1"}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1197,46 +1155,28 @@ def test_run_aggregation_query_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_run_aggregation_query_field_headers_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - + # This test doesn't assert anything useful. + assert kw["metadata"] # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = datastore.RunAggregationQueryRequest() - - request.project_id = "project_id_value" + request = datastore.RunAggregationQueryRequest(**{"database_id": "sample1"}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.run_aggregation_query), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.RunAggregationQueryResponse() - ) - await client.run_aggregation_query(request) + call.return_value = datastore.RunAggregationQueryResponse() + client.run_aggregation_query(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] @pytest.mark.parametrize( @@ -1334,16 +1274,14 @@ async def test_begin_transaction_async_from_dict(): await test_begin_transaction_async(request_type=dict) -def test_begin_transaction_field_headers(): +def test_begin_transaction_routing_parameters(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = datastore.BeginTransactionRequest() - - request.project_id = "project_id_value" + request = datastore.BeginTransactionRequest(**{"project_id": "sample1"}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1357,46 +1295,28 @@ def test_begin_transaction_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_begin_transaction_field_headers_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - + # This test doesn't assert anything useful. + assert kw["metadata"] # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = datastore.BeginTransactionRequest() - - request.project_id = "project_id_value" + request = datastore.BeginTransactionRequest(**{"database_id": "sample1"}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.begin_transaction), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.BeginTransactionResponse() - ) - await client.begin_transaction(request) + call.return_value = datastore.BeginTransactionResponse() + client.begin_transaction(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_begin_transaction_flattened(): @@ -1574,16 +1494,14 @@ async def test_commit_async_from_dict(): await test_commit_async(request_type=dict) -def test_commit_field_headers(): +def test_commit_routing_parameters(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = datastore.CommitRequest() - - request.project_id = "project_id_value" + request = datastore.CommitRequest(**{"project_id": "sample1"}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: @@ -1595,44 +1513,26 @@ def test_commit_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_commit_field_headers_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - + # This test doesn't assert anything useful. + assert kw["metadata"] # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = datastore.CommitRequest() - - request.project_id = "project_id_value" + request = datastore.CommitRequest(**{"database_id": "sample1"}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.CommitResponse() - ) - await client.commit(request) + call.return_value = datastore.CommitResponse() + client.commit(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_commit_flattened(): @@ -1882,16 +1782,14 @@ async def test_rollback_async_from_dict(): await test_rollback_async(request_type=dict) -def test_rollback_field_headers(): +def test_rollback_routing_parameters(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = datastore.RollbackRequest() - - request.project_id = "project_id_value" + request = datastore.RollbackRequest(**{"project_id": "sample1"}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback), "__call__") as call: @@ -1903,44 +1801,26 @@ def test_rollback_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_rollback_field_headers_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - + # This test doesn't assert anything useful. + assert kw["metadata"] # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = datastore.RollbackRequest() - - request.project_id = "project_id_value" + request = datastore.RollbackRequest(**{"database_id": "sample1"}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.RollbackResponse() - ) - await client.rollback(request) + call.return_value = datastore.RollbackResponse() + client.rollback(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_rollback_flattened(): @@ -2118,16 +1998,14 @@ async def test_allocate_ids_async_from_dict(): await test_allocate_ids_async(request_type=dict) -def test_allocate_ids_field_headers(): +def test_allocate_ids_routing_parameters(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = datastore.AllocateIdsRequest() - - request.project_id = "project_id_value" + request = datastore.AllocateIdsRequest(**{"project_id": "sample1"}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: @@ -2139,44 +2017,26 @@ def test_allocate_ids_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_allocate_ids_field_headers_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - + # This test doesn't assert anything useful. + assert kw["metadata"] # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = datastore.AllocateIdsRequest() - - request.project_id = "project_id_value" + request = datastore.AllocateIdsRequest(**{"database_id": "sample1"}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.AllocateIdsResponse() - ) - await client.allocate_ids(request) + call.return_value = datastore.AllocateIdsResponse() + client.allocate_ids(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_allocate_ids_flattened(): @@ -2374,16 +2234,14 @@ async def test_reserve_ids_async_from_dict(): await test_reserve_ids_async(request_type=dict) -def test_reserve_ids_field_headers(): +def test_reserve_ids_routing_parameters(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = datastore.ReserveIdsRequest() - - request.project_id = "project_id_value" + request = datastore.ReserveIdsRequest(**{"project_id": "sample1"}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: @@ -2395,44 +2253,26 @@ def test_reserve_ids_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_reserve_ids_field_headers_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - + # This test doesn't assert anything useful. + assert kw["metadata"] # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = datastore.ReserveIdsRequest() - - request.project_id = "project_id_value" + request = datastore.ReserveIdsRequest(**{"database_id": "sample1"}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.ReserveIdsResponse() - ) - await client.reserve_ids(request) + call.return_value = datastore.ReserveIdsResponse() + client.reserve_ids(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_reserve_ids_flattened(): From a42661cd147d786ca8ec6a2bda4ca5ed2fda56b4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 14 Jan 2023 06:39:34 -0500 Subject: [PATCH 485/611] feat: new transaction options for datastoreV1 (#402) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: new transaction options for datastoreV1 PiperOrigin-RevId: 501960571 Source-Link: https://github.com/googleapis/googleapis/commit/d4599ad2c2374feb1b508096cbd89e6e3079eb24 Source-Link: https://github.com/googleapis/googleapis-gen/commit/02ff42cf030965b9006243c697e761fdcabb8ca1 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDJmZjQyY2YwMzA5NjViOTAwNjI0M2M2OTdlNzYxZmRjYWJiOGNhMSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../cloud/datastore_v1/types/datastore.py | 65 +++++++++++++++++++ .../scripts/fixup_datastore_v1_keywords.py | 2 +- .../unit/gapic/datastore_v1/test_datastore.py | 30 +++++++-- 3 files changed, 90 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index a2d7c2a2aea3..0fe81089d1f2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -107,6 +107,12 @@ class LookupResponse(proto.Message): resource constraints. The order of results in this field is undefined and has no relation to the order of the keys in the input. + transaction (bytes): + The identifier of the transaction that was started as part + of this Lookup request. + + Set only when [ReadOptions.begin_transaction][] was set in + [LookupRequest.read_options][google.datastore.v1.LookupRequest.read_options]. read_time (google.protobuf.timestamp_pb2.Timestamp): The time at which these entities were read or found missing. @@ -127,6 +133,10 @@ class LookupResponse(proto.Message): number=3, message=entity.Key, ) + transaction: bytes = proto.Field( + proto.BYTES, + number=5, + ) read_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=7, @@ -215,6 +225,12 @@ class RunQueryResponse(proto.Message): query (google.cloud.datastore_v1.types.Query): The parsed form of the ``GqlQuery`` from the request, if it was set. + transaction (bytes): + The identifier of the transaction that was started as part + of this RunQuery request. + + Set only when [ReadOptions.begin_transaction][] was set in + [RunQueryRequest.read_options][google.datastore.v1.RunQueryRequest.read_options]. """ batch: gd_query.QueryResultBatch = proto.Field( @@ -227,6 +243,10 @@ class RunQueryResponse(proto.Message): number=2, message=gd_query.Query, ) + transaction: bytes = proto.Field( + proto.BYTES, + number=5, + ) class RunAggregationQueryRequest(proto.Message): @@ -311,6 +331,12 @@ class RunAggregationQueryResponse(proto.Message): query (google.cloud.datastore_v1.types.AggregationQuery): The parsed form of the ``GqlQuery`` from the request, if it was set. + transaction (bytes): + The identifier of the transaction that was started as part + of this RunAggregationQuery request. + + Set only when [ReadOptions.begin_transaction][] was set in + [RunAggregationQueryRequest.read_options][google.datastore.v1.RunAggregationQueryRequest.read_options]. """ batch: aggregation_result.AggregationResultBatch = proto.Field( @@ -323,6 +349,10 @@ class RunAggregationQueryResponse(proto.Message): number=2, message=gd_query.AggregationQuery, ) + transaction: bytes = proto.Field( + proto.BYTES, + number=5, + ) class BeginTransactionRequest(proto.Message): @@ -416,6 +446,10 @@ class CommitRequest(proto.Message): r"""The request for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -436,6 +470,15 @@ class CommitRequest(proto.Message): commit. A transaction identifier is returned by a call to [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + This field is a member of `oneof`_ ``transaction_selector``. + single_use_transaction (google.cloud.datastore_v1.types.TransactionOptions): + Options for beginning a new transaction for this request. + The transaction is committed when the request completes. If + specified, + [TransactionOptions.mode][google.datastore.v1.TransactionOptions.mode] + must be + [TransactionOptions.ReadWrite][google.datastore.v1.TransactionOptions.ReadWrite]. + This field is a member of `oneof`_ ``transaction_selector``. mutations (MutableSequence[google.cloud.datastore_v1.types.Mutation]): The mutations to perform. @@ -478,6 +521,12 @@ class Mode(proto.Enum): number=1, oneof="transaction_selector", ) + single_use_transaction: "TransactionOptions" = proto.Field( + proto.MESSAGE, + number=10, + oneof="transaction_selector", + message="TransactionOptions", + ) mutations: MutableSequence["Mutation"] = proto.RepeatedField( proto.MESSAGE, number=6, @@ -769,6 +818,16 @@ class ReadOptions(proto.Message): transaction identifier is returned by a call to [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + This field is a member of `oneof`_ ``consistency_type``. + new_transaction (google.cloud.datastore_v1.types.TransactionOptions): + Options for beginning a new transaction for this request. + + The new transaction identifier will be returned in the + corresponding response as either + [LookupResponse.transaction][google.datastore.v1.LookupResponse.transaction] + or + [RunQueryResponse.transaction][google.datastore.v1.RunQueryResponse.transaction]. + This field is a member of `oneof`_ ``consistency_type``. read_time (google.protobuf.timestamp_pb2.Timestamp): Reads entities as they were at the given @@ -796,6 +855,12 @@ class ReadConsistency(proto.Enum): number=2, oneof="consistency_type", ) + new_transaction: "TransactionOptions" = proto.Field( + proto.MESSAGE, + number=3, + oneof="consistency_type", + message="TransactionOptions", + ) read_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=4, diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py index 82a24a4f9c91..77b0527b8a12 100644 --- a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py @@ -41,7 +41,7 @@ class datastoreCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'allocate_ids': ('project_id', 'keys', 'database_id', ), 'begin_transaction': ('project_id', 'database_id', 'transaction_options', ), - 'commit': ('project_id', 'database_id', 'mode', 'transaction', 'mutations', ), + 'commit': ('project_id', 'database_id', 'mode', 'transaction', 'single_use_transaction', 'mutations', ), 'lookup': ('project_id', 'keys', 'database_id', 'read_options', ), 'reserve_ids': ('project_id', 'keys', 'database_id', ), 'rollback': ('project_id', 'transaction', 'database_id', ), diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 1b13defcc417..4833d39b90ea 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -683,7 +683,9 @@ def test_lookup(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = datastore.LookupResponse() + call.return_value = datastore.LookupResponse( + transaction=b"transaction_blob", + ) response = client.lookup(request) # Establish that the underlying gRPC stub method was called. @@ -693,6 +695,7 @@ def test_lookup(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, datastore.LookupResponse) + assert response.transaction == b"transaction_blob" def test_lookup_empty_call(): @@ -728,7 +731,9 @@ async def test_lookup_async( with mock.patch.object(type(client.transport.lookup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.LookupResponse() + datastore.LookupResponse( + transaction=b"transaction_blob", + ) ) response = await client.lookup(request) @@ -739,6 +744,7 @@ async def test_lookup_async( # Establish that the response is the type that we expect. assert isinstance(response, datastore.LookupResponse) + assert response.transaction == b"transaction_blob" @pytest.mark.asyncio @@ -941,7 +947,9 @@ def test_run_query(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.run_query), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = datastore.RunQueryResponse() + call.return_value = datastore.RunQueryResponse( + transaction=b"transaction_blob", + ) response = client.run_query(request) # Establish that the underlying gRPC stub method was called. @@ -951,6 +959,7 @@ def test_run_query(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, datastore.RunQueryResponse) + assert response.transaction == b"transaction_blob" def test_run_query_empty_call(): @@ -986,7 +995,9 @@ async def test_run_query_async( with mock.patch.object(type(client.transport.run_query), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.RunQueryResponse() + datastore.RunQueryResponse( + transaction=b"transaction_blob", + ) ) response = await client.run_query(request) @@ -997,6 +1008,7 @@ async def test_run_query_async( # Establish that the response is the type that we expect. assert isinstance(response, datastore.RunQueryResponse) + assert response.transaction == b"transaction_blob" @pytest.mark.asyncio @@ -1067,7 +1079,9 @@ def test_run_aggregation_query(request_type, transport: str = "grpc"): type(client.transport.run_aggregation_query), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = datastore.RunAggregationQueryResponse() + call.return_value = datastore.RunAggregationQueryResponse( + transaction=b"transaction_blob", + ) response = client.run_aggregation_query(request) # Establish that the underlying gRPC stub method was called. @@ -1077,6 +1091,7 @@ def test_run_aggregation_query(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, datastore.RunAggregationQueryResponse) + assert response.transaction == b"transaction_blob" def test_run_aggregation_query_empty_call(): @@ -1116,7 +1131,9 @@ async def test_run_aggregation_query_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.RunAggregationQueryResponse() + datastore.RunAggregationQueryResponse( + transaction=b"transaction_blob", + ) ) response = await client.run_aggregation_query(request) @@ -1127,6 +1144,7 @@ async def test_run_aggregation_query_async( # Establish that the response is the type that we expect. assert isinstance(response, datastore.RunAggregationQueryResponse) + assert response.transaction == b"transaction_blob" @pytest.mark.asyncio From 1e8c122d8abc76f8cf2d12939859e7e6c5273f0d Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 14 Jan 2023 18:14:31 +0000 Subject: [PATCH 486/611] chore(deps): update dependency pytest to v7.2.1 (#403) --- .../samples/snippets/requirements-test.txt | 2 +- .../samples/snippets/schedule-export/requirements-test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt index b34edda37662..c7b5651ff672 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff===1.11.1; python_version < "3.7" backoff==2.2.1; python_version >= "3.7" -pytest==7.2.0 +pytest==7.2.1 flaky==3.7.0 diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt index 89cb815c988e..dd3c7330bb99 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt @@ -1 +1 @@ -pytest==7.2.0 \ No newline at end of file +pytest==7.2.1 \ No newline at end of file From ae55bdf157d4cdf117a46f097fca3b1f967ceb64 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 17 Jan 2023 16:22:20 -0800 Subject: [PATCH 487/611] chore(main): release 2.13.0 (#401) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Mariatta Wijaya --- .../google-cloud-datastore/.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 8 ++++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../google/cloud/datastore_admin/gapic_version.py | 2 +- .../google/cloud/datastore_admin_v1/gapic_version.py | 2 +- .../google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 14 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index 997329e9fcf5..c61c7bc41ea3 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.12.0" + ".": "2.13.0" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 8ba6d02efc99..bf9a09ded705 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.13.0](https://github.com/googleapis/python-datastore/compare/v2.12.0...v2.13.0) (2023-01-14) + + +### Features + +* Add dynamic routing header annotation to DatastoreV1 ([#400](https://github.com/googleapis/python-datastore/issues/400)) ([1043ba3](https://github.com/googleapis/python-datastore/commit/1043ba3638434657226c176f8e714f5dc476d1f5)) +* New transaction options for datastoreV1 ([#402](https://github.com/googleapis/python-datastore/issues/402)) ([906d026](https://github.com/googleapis/python-datastore/commit/906d026920abb0e7e44b9309a7c37254159b95ee)) + ## [2.12.0](https://github.com/googleapis/python-datastore/compare/v2.11.1...v2.12.0) (2023-01-10) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index ea068f5dbe12..fa898bdeeafa 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.12.0" # {x-release-please-version} +__version__ = "2.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 67e043bde2a0..b6000e20f865 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.12.0" +__version__ = "2.13.0" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index 16ae0e953c12..a3c9255942c5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.12.0" # {x-release-please-version} +__version__ = "2.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index ebfc0a28a23f..a6b394cc45a9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.12.0" # {x-release-please-version} +__version__ = "2.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index ebfc0a28a23f..a6b394cc45a9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.12.0" # {x-release-please-version} +__version__ = "2.13.0" # {x-release-please-version} From 908e55f21f7f60e2a7061cd8a179e5abdb177a4f Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 18 Jan 2023 14:43:21 +0000 Subject: [PATCH 488/611] chore(deps): update dependency google-cloud-datastore to v2.13.0 (#405) --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index b60eaea51ae0..50988b168c2f 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.12.0 \ No newline at end of file +google-cloud-datastore==2.13.0 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index a35fb2f733b1..f36a896f375f 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.12.0 +google-cloud-datastore==2.13.0 From 8aae4be33320c1476aeeb6bfc54144e1c1e76697 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 20 Jan 2023 12:09:35 -0500 Subject: [PATCH 489/611] docs: Add documentation for enums (#407) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Add documentation for enums fix: Add context manager return types chore: Update gapic-generator-python to v1.8.1 PiperOrigin-RevId: 503210727 Source-Link: https://github.com/googleapis/googleapis/commit/a391fd1dac18dfdfa00c18c8404f2c3a6ff8e98e Source-Link: https://github.com/googleapis/googleapis-gen/commit/0080f830dec37c3384157082bce279e37079ea58 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDA4MGY4MzBkZWMzN2MzMzg0MTU3MDgyYmNlMjc5ZTM3MDc5ZWE1OCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../services/datastore_admin/client.py | 2 +- .../types/datastore_admin.py | 43 ++++++- .../cloud/datastore_admin_v1/types/index.py | 57 ++++++++- .../datastore_admin_v1/types/migration.py | 52 +++++++- .../datastore_v1/services/datastore/client.py | 2 +- .../cloud/datastore_v1/types/datastore.py | 25 +++- .../google/cloud/datastore_v1/types/query.py | 120 +++++++++++++++++- 7 files changed, 286 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index 11b158cc2418..a15b1680369f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -1240,7 +1240,7 @@ def sample_list_indexes(): # Done; return the response. return response - def __enter__(self): + def __enter__(self) -> "DatastoreAdminClient": return self def __exit__(self, type, value, traceback): diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py index f7e3adcf62a4..313c36dc7518 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -46,7 +46,20 @@ class OperationType(proto.Enum): - r"""Operation types.""" + r"""Operation types. + + Values: + OPERATION_TYPE_UNSPECIFIED (0): + Unspecified. + EXPORT_ENTITIES (1): + ExportEntities. + IMPORT_ENTITIES (2): + ImportEntities. + CREATE_INDEX (3): + CreateIndex. + DELETE_INDEX (4): + DeleteIndex. + """ OPERATION_TYPE_UNSPECIFIED = 0 EXPORT_ENTITIES = 1 IMPORT_ENTITIES = 2 @@ -75,7 +88,33 @@ class CommonMetadata(proto.Message): """ class State(proto.Enum): - r"""The various possible states for an ongoing Operation.""" + r"""The various possible states for an ongoing Operation. + + Values: + STATE_UNSPECIFIED (0): + Unspecified. + INITIALIZING (1): + Request is being prepared for processing. + PROCESSING (2): + Request is actively being processed. + CANCELLING (3): + Request is in the process of being cancelled + after user called + google.longrunning.Operations.CancelOperation on + the operation. + FINALIZING (4): + Request has been processed and is in its + finalization stage. + SUCCESSFUL (5): + Request has completed successfully. + FAILED (6): + Request has finished being processed, but + encountered an error. + CANCELLED (7): + Request has finished being cancelled after + user called + google.longrunning.Operations.CancelOperation. + """ STATE_UNSPECIFIED = 0 INITIALIZING = 1 PROCESSING = 2 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py index 8192d81cd7e2..799aef4d143b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py @@ -50,19 +50,72 @@ class Index(proto.Message): class AncestorMode(proto.Enum): r"""For an ordered index, specifies whether each of the entity's ancestors will be included. + + Values: + ANCESTOR_MODE_UNSPECIFIED (0): + The ancestor mode is unspecified. + NONE (1): + Do not include the entity's ancestors in the + index. + ALL_ANCESTORS (2): + Include all the entity's ancestors in the + index. """ ANCESTOR_MODE_UNSPECIFIED = 0 NONE = 1 ALL_ANCESTORS = 2 class Direction(proto.Enum): - r"""The direction determines how a property is indexed.""" + r"""The direction determines how a property is indexed. + + Values: + DIRECTION_UNSPECIFIED (0): + The direction is unspecified. + ASCENDING (1): + The property's values are indexed so as to + support sequencing in ascending order and also + query by <, >, <=, >=, and =. + DESCENDING (2): + The property's values are indexed so as to + support sequencing in descending order and also + query by <, >, <=, >=, and =. + """ DIRECTION_UNSPECIFIED = 0 ASCENDING = 1 DESCENDING = 2 class State(proto.Enum): - r"""The possible set of states of an index.""" + r"""The possible set of states of an index. + + Values: + STATE_UNSPECIFIED (0): + The state is unspecified. + CREATING (1): + The index is being created, and cannot be + used by queries. There is an active long-running + operation for the index. The index is updated + when writing an entity. + Some index data may exist. + READY (2): + The index is ready to be used. + The index is updated when writing an entity. + The index is fully populated from all stored + entities it applies to. + DELETING (3): + The index is being deleted, and cannot be + used by queries. There is an active long-running + operation for the index. The index is not + updated when writing an entity. Some index data + may exist. + ERROR (4): + The index was being created or deleted, but + something went wrong. The index cannot by used + by queries. There is no active long-running + operation for the index, and the most recently + finished long-running operation failed. The + index is not updated when writing an entity. + Some index data may exist. + """ STATE_UNSPECIFIED = 0 CREATING = 1 READY = 2 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py index 95f25f4253fd..8a919b5efa44 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py @@ -30,7 +30,18 @@ class MigrationState(proto.Enum): - r"""States for a migration.""" + r"""States for a migration. + + Values: + MIGRATION_STATE_UNSPECIFIED (0): + Unspecified. + RUNNING (1): + The migration is running. + PAUSED (2): + The migration is paused. + COMPLETE (3): + The migration is complete. + """ MIGRATION_STATE_UNSPECIFIED = 0 RUNNING = 1 PAUSED = 2 @@ -38,7 +49,31 @@ class MigrationState(proto.Enum): class MigrationStep(proto.Enum): - r"""Steps in a migration.""" + r"""Steps in a migration. + + Values: + MIGRATION_STEP_UNSPECIFIED (0): + Unspecified. + PREPARE (6): + Pre-migration: the database is prepared for + migration. + START (1): + Start of migration. + APPLY_WRITES_SYNCHRONOUSLY (7): + Writes are applied synchronously to at least + one replica. + COPY_AND_VERIFY (2): + Data is copied to Cloud Firestore and then + verified to match the data in Cloud Datastore. + REDIRECT_EVENTUALLY_CONSISTENT_READS (3): + Eventually-consistent reads are redirected to + Cloud Firestore. + REDIRECT_STRONGLY_CONSISTENT_READS (4): + Strongly-consistent reads are redirected to + Cloud Firestore. + REDIRECT_WRITES (5): + Writes are redirected to Cloud Firestore. + """ MIGRATION_STEP_UNSPECIFIED = 0 PREPARE = 6 START = 1 @@ -96,7 +131,18 @@ class MigrationProgressEvent(proto.Message): """ class ConcurrencyMode(proto.Enum): - r"""Concurrency modes for transactions in Cloud Firestore.""" + r"""Concurrency modes for transactions in Cloud Firestore. + + Values: + CONCURRENCY_MODE_UNSPECIFIED (0): + Unspecified. + PESSIMISTIC (1): + Pessimistic concurrency. + OPTIMISTIC (2): + Optimistic concurrency. + OPTIMISTIC_WITH_ENTITY_GROUPS (3): + Optimistic concurrency with entity groups. + """ CONCURRENCY_MODE_UNSPECIFIED = 0 PESSIMISTIC = 1 OPTIMISTIC = 2 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index 679a750d4ddd..542a1d70ddd6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -1359,7 +1359,7 @@ def sample_reserve_ids(): # Done; return the response. return response - def __enter__(self): + def __enter__(self) -> "DatastoreClient": return self def __exit__(self, type, value, traceback): diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index 0fe81089d1f2..b748dd4758f1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -498,7 +498,19 @@ class CommitRequest(proto.Message): """ class Mode(proto.Enum): - r"""The modes available for commits.""" + r"""The modes available for commits. + + Values: + MODE_UNSPECIFIED (0): + Unspecified. This value must not be used. + TRANSACTIONAL (1): + Transactional: The mutations are either all applied, or none + are applied. Learn about transactions + `here `__. + NON_TRANSACTIONAL (2): + Non-transactional: The mutations may not + apply as all or none. + """ MODE_UNSPECIFIED = 0 TRANSACTIONAL = 1 NON_TRANSACTIONAL = 2 @@ -839,7 +851,16 @@ class ReadOptions(proto.Message): """ class ReadConsistency(proto.Enum): - r"""The possible values for read consistencies.""" + r"""The possible values for read consistencies. + + Values: + READ_CONSISTENCY_UNSPECIFIED (0): + Unspecified. This value must not be used. + STRONG (1): + Strong consistency. + EVENTUAL (2): + Eventual consistency. + """ READ_CONSISTENCY_UNSPECIFIED = 0 STRONG = 1 EVENTUAL = 2 diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index f66da3f2c1de..e19c2c2c44b1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -79,6 +79,17 @@ class ResultType(proto.Enum): context (for example, in message ``QueryResultBatch``, field ``entity_result_type`` specifies a ``ResultType`` for all the values in field ``entity_results``). + + Values: + RESULT_TYPE_UNSPECIFIED (0): + Unspecified. This value is never used. + FULL (1): + The key and properties. + PROJECTION (2): + A projected subset of properties. The entity + may have no key. + KEY_ONLY (3): + Only the key. """ RESULT_TYPE_UNSPECIFIED = 0 FULL = 1 @@ -378,7 +389,16 @@ class PropertyOrder(proto.Message): """ class Direction(proto.Enum): - r"""The sort direction.""" + r"""The sort direction. + + Values: + DIRECTION_UNSPECIFIED (0): + Unspecified. This value must not be used. + ASCENDING (1): + Ascending. + DESCENDING (2): + Descending. + """ DIRECTION_UNSPECIFIED = 0 ASCENDING = 1 DESCENDING = 2 @@ -446,7 +466,15 @@ class CompositeFilter(proto.Message): """ class Operator(proto.Enum): - r"""A composite filter operator.""" + r"""A composite filter operator. + + Values: + OPERATOR_UNSPECIFIED (0): + Unspecified. This value must not be used. + AND (1): + The results are required to satisfy each of + the combined filters. + """ OPERATOR_UNSPECIFIED = 0 AND = 1 @@ -475,7 +503,74 @@ class PropertyFilter(proto.Message): """ class Operator(proto.Enum): - r"""A property filter operator.""" + r"""A property filter operator. + + Values: + OPERATOR_UNSPECIFIED (0): + Unspecified. This value must not be used. + LESS_THAN (1): + The given ``property`` is less than the given ``value``. + + Requires: + + - That ``property`` comes first in ``order_by``. + LESS_THAN_OR_EQUAL (2): + The given ``property`` is less than or equal to the given + ``value``. + + Requires: + + - That ``property`` comes first in ``order_by``. + GREATER_THAN (3): + The given ``property`` is greater than the given ``value``. + + Requires: + + - That ``property`` comes first in ``order_by``. + GREATER_THAN_OR_EQUAL (4): + The given ``property`` is greater than or equal to the given + ``value``. + + Requires: + + - That ``property`` comes first in ``order_by``. + EQUAL (5): + The given ``property`` is equal to the given ``value``. + IN (6): + The given ``property`` is equal to at least one value in the + given array. + + Requires: + + - That ``value`` is a non-empty ``ArrayValue`` with at most + 10 values. + - No other ``IN`` or ``NOT_IN`` is in the same query. + NOT_EQUAL (9): + The given ``property`` is not equal to the given ``value``. + + Requires: + + - No other ``NOT_EQUAL`` or ``NOT_IN`` is in the same + query. + - That ``property`` comes first in the ``order_by``. + HAS_ANCESTOR (11): + Limit the result set to the given entity and its + descendants. + + Requires: + + - That ``value`` is an entity key. + NOT_IN (13): + The value of the ``property`` is not in the given array. + + Requires: + + - That ``value`` is a non-empty ``ArrayValue`` with at most + 10 values. + - No other ``IN``, ``NOT_IN``, ``NOT_EQUAL`` is in the same + query. + - That ``field`` comes first in the ``order_by``. + """ OPERATOR_UNSPECIFIED = 0 LESS_THAN = 1 LESS_THAN_OR_EQUAL = 2 @@ -636,7 +731,24 @@ class QueryResultBatch(proto.Message): """ class MoreResultsType(proto.Enum): - r"""The possible values for the ``more_results`` field.""" + r"""The possible values for the ``more_results`` field. + + Values: + MORE_RESULTS_TYPE_UNSPECIFIED (0): + Unspecified. This value is never used. + NOT_FINISHED (1): + There may be additional batches to fetch from + this query. + MORE_RESULTS_AFTER_LIMIT (2): + The query is finished, but there may be more + results after the limit. + MORE_RESULTS_AFTER_CURSOR (4): + The query is finished, but there may be more + results after the end cursor. + NO_MORE_RESULTS (3): + The query is finished, and there are no more + results. + """ MORE_RESULTS_TYPE_UNSPECIFIED = 0 NOT_FINISHED = 1 MORE_RESULTS_AFTER_LIMIT = 2 From 09e7e154271ac51bf07c8a264d69086d52e47108 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 23 Jan 2023 10:38:03 -0500 Subject: [PATCH 490/611] chore(main): release 2.13.1 (#408) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 12 ++++++++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../google/cloud/datastore_admin/gapic_version.py | 2 +- .../google/cloud/datastore_admin_v1/gapic_version.py | 2 +- .../google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 18 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index c61c7bc41ea3..f68b4eb3e501 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.13.0" + ".": "2.13.1" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index bf9a09ded705..284efd6420c6 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.13.1](https://github.com/googleapis/python-datastore/compare/v2.13.0...v2.13.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([9cec031](https://github.com/googleapis/python-datastore/commit/9cec031e7da6c97853e13c72f2e434f924e59743)) + + +### Documentation + +* Add documentation for enums ([9cec031](https://github.com/googleapis/python-datastore/commit/9cec031e7da6c97853e13c72f2e434f924e59743)) + ## [2.13.0](https://github.com/googleapis/python-datastore/compare/v2.12.0...v2.13.0) (2023-01-14) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index fa898bdeeafa..99ebb9c01eef 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.13.0" # {x-release-please-version} +__version__ = "2.13.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index b6000e20f865..2330d0c2cdfb 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.13.0" +__version__ = "2.13.1" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index a3c9255942c5..4c0211a94a39 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.0" # {x-release-please-version} +__version__ = "2.13.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index a6b394cc45a9..d1a74ca3759f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.13.0" # {x-release-please-version} +__version__ = "2.13.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index a6b394cc45a9..d1a74ca3759f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.13.0" # {x-release-please-version} +__version__ = "2.13.1" # {x-release-please-version} From 74d39836637016241cf9d45b3018445e8ca5983a Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 23 Jan 2023 16:09:43 +0000 Subject: [PATCH 491/611] chore(deps): update dependency google-cloud-datastore to v2.13.1 (#409) --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index 50988b168c2f..0b516b62cede 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.13.0 \ No newline at end of file +google-cloud-datastore==2.13.1 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index f36a896f375f..b7428c142787 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.13.0 +google-cloud-datastore==2.13.1 From e135538230640d791662bcdea2962980cb096f49 Mon Sep 17 00:00:00 2001 From: Mariatta Wijaya Date: Mon, 23 Jan 2023 08:38:12 -0800 Subject: [PATCH 492/611] fix: Fix system test using utc timezone (#406) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Specify the utc timezone when using datetime.now() in snapshot reads test cases. Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: - [ ] Make sure to open an issue as a [bug/issue](https://togithub.com/googleapis/python-datastore/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [x] Ensure the tests and linter pass - [x] Code coverage does not decrease (if any source code was changed) - [ ] Appropriate docs were updated (if necessary) Fixes # 🦕 --- .../tests/system/test_read_consistency.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/tests/system/test_read_consistency.py b/packages/google-cloud-datastore/tests/system/test_read_consistency.py index d65b935678a5..9435c5f7d638 100644 --- a/packages/google-cloud-datastore/tests/system/test_read_consistency.py +++ b/packages/google-cloud-datastore/tests/system/test_read_consistency.py @@ -14,7 +14,7 @@ import time -from datetime import datetime +from datetime import datetime, timezone from google.cloud import datastore @@ -40,7 +40,7 @@ def test_get_w_read_time(datastore_client, entities_to_delete): # Add some sleep to accommodate server & client clock discrepancy. time.sleep(1) - read_time = datetime.now() + read_time = datetime.now(tz=timezone.utc) time.sleep(1) entity["field"] = "new_value" @@ -73,7 +73,7 @@ def test_query_w_read_time(datastore_client, entities_to_delete): # Add some sleep to accommodate server & client clock discrepancy. time.sleep(1) - read_time = datetime.now() + read_time = datetime.now(tz=timezone.utc) time.sleep(1) entity2["field"] = "new_value" From cd8a0693bf75a1be4cc30022efffbe32cc35c245 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 23 Jan 2023 12:37:38 -0800 Subject: [PATCH 493/611] chore(main): release 2.13.2 (#410) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-datastore/.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../google/cloud/datastore_admin/gapic_version.py | 2 +- .../google/cloud/datastore_admin_v1/gapic_version.py | 2 +- .../google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 13 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index f68b4eb3e501..bc12e128dd66 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.13.1" + ".": "2.13.2" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 284efd6420c6..423aa9fe167f 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.13.2](https://github.com/googleapis/python-datastore/compare/v2.13.1...v2.13.2) (2023-01-23) + + +### Bug Fixes + +* Fix system test using utc timezone ([#406](https://github.com/googleapis/python-datastore/issues/406)) ([93537ef](https://github.com/googleapis/python-datastore/commit/93537ef83e9b0b9a2b367b863eb57c22862a9bde)) + ## [2.13.1](https://github.com/googleapis/python-datastore/compare/v2.13.0...v2.13.1) (2023-01-20) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index 99ebb9c01eef..7eeb26dd316b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.13.1" # {x-release-please-version} +__version__ = "2.13.2" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 2330d0c2cdfb..0cae902c855f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.13.1" +__version__ = "2.13.2" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index 4c0211a94a39..c790ba98e3c2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.1" # {x-release-please-version} +__version__ = "2.13.2" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index d1a74ca3759f..6d50101819e4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.13.1" # {x-release-please-version} +__version__ = "2.13.2" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index d1a74ca3759f..6d50101819e4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.13.1" # {x-release-please-version} +__version__ = "2.13.2" # {x-release-please-version} From 76cd905a973eb763cce842d77de1fb2f0f0d17dc Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 24 Jan 2023 15:05:09 +0000 Subject: [PATCH 494/611] chore(deps): update dependency google-cloud-datastore to v2.13.2 (#411) --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index 0b516b62cede..21c16a12ab52 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.13.1 \ No newline at end of file +google-cloud-datastore==2.13.2 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index b7428c142787..51cac2bed779 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.13.1 +google-cloud-datastore==2.13.2 From 357ee47d73f523b1f1205a5c42922d1aff82d5a8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 25 Jan 2023 11:33:48 -0500 Subject: [PATCH 495/611] chore: Update gapic-generator-python to v1.8.2 (#412) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.8.2 PiperOrigin-RevId: 504289125 Source-Link: https://github.com/googleapis/googleapis/commit/38a48a44a44279e9cf9f2f864b588958a2d87491 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b2dc22663dbe47a972c8d8c2f8a4df013dafdcbc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjJkYzIyNjYzZGJlNDdhOTcyYzhkOGMyZjhhNGRmMDEzZGFmZGNiYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../google/cloud/datastore_admin_v1/__init__.py | 2 +- .../google/cloud/datastore_v1/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py index 4b0552ded8f5..d2b8b534c66b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.datastore_admin import gapic_version as package_version +from google.cloud.datastore_admin_v1 import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py index b868c4063564..05159e6877c8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.datastore import gapic_version as package_version +from google.cloud.datastore_v1 import gapic_version as package_version __version__ = package_version.__version__ From 6a68362e457c436c0b7c3e3f502bf6bfc9e93e82 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Jan 2023 16:42:11 +0000 Subject: [PATCH 496/611] chore: fix prerelease_deps nox session [autoapprove] (#413) Source-Link: https://togithub.com/googleapis/synthtool/commit/26c7505b2f76981ec1707b851e1595c8c06e90fc Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 --- .../.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/noxfile.py | 14 ++++++-------- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 889f77dfa25d..f0f3b24b20cd 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 + digest: sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 94e34d85e639..84ae80a43893 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -200,9 +200,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1") + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -375,9 +375,7 @@ def prerelease_deps(session): unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES session.install(*unit_deps_all) system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS + SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES ) session.install(*system_deps_all) @@ -407,8 +405,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", "proto-plus", From 31e3d8d29c75f84db960e3b50c5fb0e77b41c131 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 8 Feb 2023 15:20:11 +0000 Subject: [PATCH 497/611] build(deps): bump cryptography from 38.0.3 to 39.0.1 in /synthtool/gcp/templates/python_library/.kokoro (#417) Source-Link: https://togithub.com/googleapis/synthtool/commit/bb171351c3946d3c3c32e60f5f18cee8c464ec51 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf --- .../.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/requirements.txt | 49 +++++++++---------- 2 files changed, 23 insertions(+), 28 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index f0f3b24b20cd..894fb6bc9b47 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 + digest: sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index 05dc4672edaa..096e4800a9ac 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -113,33 +113,28 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==38.0.3 \ - --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ - --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ - --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ - --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ - --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ - --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ - --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ - --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ - --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ - --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ - --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ - --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ - --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ - --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ - --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ - --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ - --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ - --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ - --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ - --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ - --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ - --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ - --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ - --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ - --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ - --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 +cryptography==39.0.1 \ + --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ + --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ + --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ + --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ + --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ + --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ + --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ + --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ + --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ + --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ + --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ + --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ + --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ + --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ + --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ + --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ + --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ + --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ + --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ + --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ + --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 # via # gcp-releasetool # secretstorage From 01441c4738fbc08f70815b62ed5081f8efc85299 Mon Sep 17 00:00:00 2001 From: kolea2 <45548808+kolea2@users.noreply.github.com> Date: Tue, 14 Feb 2023 15:39:08 -0500 Subject: [PATCH 498/611] chore: update CODEOWNERS (#420) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update CODEOWNERS * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/CODEOWNERS | 8 ++++---- packages/google-cloud-datastore/.repo-metadata.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/.github/CODEOWNERS b/packages/google-cloud-datastore/.github/CODEOWNERS index cac512401802..eb5a61d0ffd2 100644 --- a/packages/google-cloud-datastore/.github/CODEOWNERS +++ b/packages/google-cloud-datastore/.github/CODEOWNERS @@ -5,8 +5,8 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax # Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# @googleapis/yoshi-python @googleapis/cloud-native-db-dpes are the default owners for changes in this repo -* @googleapis/yoshi-python @googleapis/cloud-native-db-dpes +# @googleapis/yoshi-python @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk -# @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes are the default owners for samples changes -/samples/ @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes +# @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk diff --git a/packages/google-cloud-datastore/.repo-metadata.json b/packages/google-cloud-datastore/.repo-metadata.json index 21be1c4496a0..e6645432a60d 100644 --- a/packages/google-cloud-datastore/.repo-metadata.json +++ b/packages/google-cloud-datastore/.repo-metadata.json @@ -11,7 +11,7 @@ "distribution_name": "google-cloud-datastore", "api_id": "datastore.googleapis.com", "default_version": "v1", - "codeowner_team": "@googleapis/cloud-native-db-dpes", + "codeowner_team": "@googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk", "api_shortname": "datastore", "api_description": "is a fully managed, schemaless database for\nstoring non-relational data. Cloud Datastore automatically scales with\nyour users and supports ACID transactions, high availability of reads and\nwrites, strong consistency for reads and ancestor queries, and eventual\nconsistency for all other queries." } From 2ba01a3059eba9c99596a957d149ba8efa2ac5ab Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 27 Feb 2023 12:11:53 -0500 Subject: [PATCH 499/611] chore(python): upgrade gcp-releasetool in .kokoro [autoapprove] (#421) Source-Link: https://github.com/googleapis/synthtool/commit/5f2a6089f73abf06238fe4310f6a14d6f6d1eed3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-datastore/.kokoro/requirements.in | 2 +- packages/google-cloud-datastore/.kokoro/requirements.txt | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 894fb6bc9b47..5fc5daa31783 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf + digest: sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 diff --git a/packages/google-cloud-datastore/.kokoro/requirements.in b/packages/google-cloud-datastore/.kokoro/requirements.in index cbd7e77f44db..882178ce6001 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.in +++ b/packages/google-cloud-datastore/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool +gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x importlib-metadata typing-extensions twine diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index 096e4800a9ac..fa99c12908f0 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -154,9 +154,9 @@ gcp-docuploader==0.6.4 \ --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.10.0 \ - --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ - --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d +gcp-releasetool==1.10.5 \ + --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \ + --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9 # via -r requirements.in google-api-core==2.10.2 \ --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ From ab42f9e9593eb9f34dd5e22df4ee90c84540c8b5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 28 Feb 2023 06:35:20 -0500 Subject: [PATCH 500/611] feat: enable "rest" transport in Python for services supporting numeric enums (#419) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: enable "rest" transport in Python for services supporting numeric enums PiperOrigin-RevId: 508143576 Source-Link: https://github.com/googleapis/googleapis/commit/7a702a989db3b413f39ff8994ca53fb38b6928c2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/6ad1279c0e7aa787ac6b66c9fd4a210692edffcd Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNmFkMTI3OWMwZTdhYTc4N2FjNmI2NmM5ZmQ0YTIxMDY5MmVkZmZjZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Add `OR` query support docs: Minor documentation formatting and cleanup PiperOrigin-RevId: 509848726 Source-Link: https://github.com/googleapis/googleapis/commit/b9351b4bf998332ed831fb82e9d9b02d9b3fa21f Source-Link: https://github.com/googleapis/googleapis-gen/commit/99317db6827185e8cae8eeb46137cd81cd2d6a54 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOTkzMTdkYjY4MjcxODVlOGNhZThlZWI0NjEzN2NkODFjZDJkNmE1NCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix: Add service_yaml_parameters to py_gapic_library BUILD.bazel targets PiperOrigin-RevId: 510187992 Source-Link: https://github.com/googleapis/googleapis/commit/5edc23561778df80d5293f20132765f8757a6b2c Source-Link: https://github.com/googleapis/googleapis-gen/commit/b0bedb72e4765a3e0b674a28c50ea0f9a9b26a89 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjBiZWRiNzJlNDc2NWEzZTBiNjc0YTI4YzUwZWEwZjlhOWIyNmE4OSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.8.5 PiperOrigin-RevId: 511892190 Source-Link: https://github.com/googleapis/googleapis/commit/a45d9c09c1287ffdf938f4e8083e791046c0b23b Source-Link: https://github.com/googleapis/googleapis-gen/commit/1907294b1d8365ea24f8c5f2e059a64124c4ed3b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTkwNzI5NGIxZDgzNjVlYTI0ZjhjNWYyZTA1OWE2NDEyNGM0ZWQzYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Mariatta Wijaya Co-authored-by: Anthonios Partheniou --- .../datastore_admin_v1/gapic_metadata.json | 35 + .../services/datastore_admin/async_client.py | 218 ++ .../services/datastore_admin/client.py | 220 ++ .../datastore_admin/transports/__init__.py | 5 + .../datastore_admin/transports/base.py | 34 + .../datastore_admin/transports/grpc.py | 71 + .../transports/grpc_asyncio.py | 71 + .../datastore_admin/transports/rest.py | 1394 ++++++++ .../types/datastore_admin.py | 2 + .../cloud/datastore_admin_v1/types/index.py | 2 + .../datastore_admin_v1/types/migration.py | 2 + .../cloud/datastore_v1/gapic_metadata.json | 45 + .../datastore_v1/services/datastore/client.py | 2 + .../services/datastore/transports/__init__.py | 5 + .../services/datastore/transports/rest.py | 1632 +++++++++ .../datastore_v1/types/aggregation_result.py | 2 + .../cloud/datastore_v1/types/datastore.py | 24 +- .../google/cloud/datastore_v1/types/entity.py | 2 + .../google/cloud/datastore_v1/types/query.py | 18 + .../test_datastore_admin.py | 3043 ++++++++++++++--- .../unit/gapic/datastore_v1/test_datastore.py | 2660 +++++++++++++- 21 files changed, 8855 insertions(+), 632 deletions(-) create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_metadata.json b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_metadata.json index 8df5d4747bfe..450debcd023b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_metadata.json +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_metadata.json @@ -76,6 +76,41 @@ ] } } + }, + "rest": { + "libraryClient": "DatastoreAdminClient", + "rpcs": { + "CreateIndex": { + "methods": [ + "create_index" + ] + }, + "DeleteIndex": { + "methods": [ + "delete_index" + ] + }, + "ExportEntities": { + "methods": [ + "export_entities" + ] + }, + "GetIndex": { + "methods": [ + "get_index" + ] + }, + "ImportEntities": { + "methods": [ + "import_entities" + ] + }, + "ListIndexes": { + "methods": [ + "list_indexes" + ] + } + } } } } diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 72617b7408ca..d940a0b078f6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -47,6 +47,7 @@ from google.cloud.datastore_admin_v1.services.datastore_admin import pagers from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index +from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 # type: ignore from .transports.base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DatastoreAdminGrpcAsyncIOTransport @@ -1054,6 +1055,223 @@ async def sample_list_indexes(): # Done; return the response. return response + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self): return self diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index a15b1680369f..f160915dfb10 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -51,10 +51,12 @@ from google.cloud.datastore_admin_v1.services.datastore_admin import pagers from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index +from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 # type: ignore from .transports.base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DatastoreAdminGrpcTransport from .transports.grpc_asyncio import DatastoreAdminGrpcAsyncIOTransport +from .transports.rest import DatastoreAdminRestTransport class DatastoreAdminClientMeta(type): @@ -70,6 +72,7 @@ class DatastoreAdminClientMeta(type): ) # type: Dict[str, Type[DatastoreAdminTransport]] _transport_registry["grpc"] = DatastoreAdminGrpcTransport _transport_registry["grpc_asyncio"] = DatastoreAdminGrpcAsyncIOTransport + _transport_registry["rest"] = DatastoreAdminRestTransport def get_transport_class( cls, @@ -1253,6 +1256,223 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py index 7d7ea9d4ddfd..e31fda239e8d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py @@ -19,15 +19,20 @@ from .base import DatastoreAdminTransport from .grpc import DatastoreAdminGrpcTransport from .grpc_asyncio import DatastoreAdminGrpcAsyncIOTransport +from .rest import DatastoreAdminRestTransport +from .rest import DatastoreAdminRestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[DatastoreAdminTransport]] _transport_registry["grpc"] = DatastoreAdminGrpcTransport _transport_registry["grpc_asyncio"] = DatastoreAdminGrpcAsyncIOTransport +_transport_registry["rest"] = DatastoreAdminRestTransport __all__ = ( "DatastoreAdminTransport", "DatastoreAdminGrpcTransport", "DatastoreAdminGrpcAsyncIOTransport", + "DatastoreAdminRestTransport", + "DatastoreAdminRestInterceptor", ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index 72391f4ba4d1..bd0b94b4aef8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -29,6 +29,7 @@ from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index +from google.longrunning import operations_pb2 from google.longrunning import operations_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -251,6 +252,39 @@ def list_indexes( ]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index 2eac944f3680..ce244dbcad26 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -27,6 +27,7 @@ from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index +from google.longrunning import operations_pb2 from google.longrunning import operations_pb2 # type: ignore from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO @@ -508,6 +509,76 @@ def list_indexes( def close(self): self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index d0fc1d60c99b..1519b53f7f3b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -27,6 +27,7 @@ from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index +from google.longrunning import operations_pb2 from google.longrunning import operations_pb2 # type: ignore from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO from .grpc import DatastoreAdminGrpcTransport @@ -524,5 +525,75 @@ def list_indexes( def close(self): return self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + __all__ = ("DatastoreAdminGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py new file mode 100644 index 000000000000..8c6339a945d5 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py @@ -0,0 +1,1394 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.longrunning import operations_pb2 +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.datastore_admin_v1.types import datastore_admin +from google.cloud.datastore_admin_v1.types import index +from google.longrunning import operations_pb2 # type: ignore + +from .base import ( + DatastoreAdminTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DatastoreAdminRestInterceptor: + """Interceptor for DatastoreAdmin. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DatastoreAdminRestTransport. + + .. code-block:: python + class MyCustomDatastoreAdminInterceptor(DatastoreAdminRestInterceptor): + def pre_create_index(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_index(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_index(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_index(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_export_entities(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_entities(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_index(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_index(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_import_entities(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_entities(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_indexes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_indexes(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DatastoreAdminRestTransport(interceptor=MyCustomDatastoreAdminInterceptor()) + client = DatastoreAdminClient(transport=transport) + + + """ + + def pre_create_index( + self, + request: datastore_admin.CreateIndexRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastore_admin.CreateIndexRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_index + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatastoreAdmin server. + """ + return request, metadata + + def post_create_index( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_index + + Override in a subclass to manipulate the response + after it is returned by the DatastoreAdmin server but before + it is returned to user code. + """ + return response + + def pre_delete_index( + self, + request: datastore_admin.DeleteIndexRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastore_admin.DeleteIndexRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_index + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatastoreAdmin server. + """ + return request, metadata + + def post_delete_index( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_index + + Override in a subclass to manipulate the response + after it is returned by the DatastoreAdmin server but before + it is returned to user code. + """ + return response + + def pre_export_entities( + self, + request: datastore_admin.ExportEntitiesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastore_admin.ExportEntitiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_entities + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatastoreAdmin server. + """ + return request, metadata + + def post_export_entities( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_entities + + Override in a subclass to manipulate the response + after it is returned by the DatastoreAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_index( + self, + request: datastore_admin.GetIndexRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastore_admin.GetIndexRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_index + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatastoreAdmin server. + """ + return request, metadata + + def post_get_index(self, response: index.Index) -> index.Index: + """Post-rpc interceptor for get_index + + Override in a subclass to manipulate the response + after it is returned by the DatastoreAdmin server but before + it is returned to user code. + """ + return response + + def pre_import_entities( + self, + request: datastore_admin.ImportEntitiesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastore_admin.ImportEntitiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for import_entities + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatastoreAdmin server. + """ + return request, metadata + + def post_import_entities( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for import_entities + + Override in a subclass to manipulate the response + after it is returned by the DatastoreAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_indexes( + self, + request: datastore_admin.ListIndexesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastore_admin.ListIndexesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_indexes + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatastoreAdmin server. + """ + return request, metadata + + def post_list_indexes( + self, response: datastore_admin.ListIndexesResponse + ) -> datastore_admin.ListIndexesResponse: + """Post-rpc interceptor for list_indexes + + Override in a subclass to manipulate the response + after it is returned by the DatastoreAdmin server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatastoreAdmin server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the DatastoreAdmin server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatastoreAdmin server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the DatastoreAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatastoreAdmin server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DatastoreAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatastoreAdmin server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the DatastoreAdmin server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DatastoreAdminRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DatastoreAdminRestInterceptor + + +class DatastoreAdminRestTransport(DatastoreAdminTransport): + """REST backend transport for DatastoreAdmin. + + Google Cloud Datastore Admin API + The Datastore Admin API provides several admin services for + Cloud Datastore. + ----------------------------------------------------------------------------- + ## Concepts + + Project, namespace, kind, and entity as defined in the Google + Cloud Datastore API. + + Operation: An Operation represents work being performed in the + background. + EntityFilter: Allows specifying a subset of entities in a + project. This is specified as a combination of kinds and + namespaces (either or both of which may be all). + + ----------------------------------------------------------------------------- + ## Services + + # Export/Import + + The Export/Import service provides the ability to copy all or a + subset of entities to/from Google Cloud Storage. + + Exported data may be imported into Cloud Datastore for any + Google Cloud Platform project. It is not restricted to the + export source project. It is possible to export from one project + and then import into another. + Exported data can also be loaded into Google BigQuery for + analysis. + Exports and imports are performed asynchronously. An Operation + resource is created for each export/import. The state (including + any errors encountered) of the export/import may be queried via + the Operation resource. + # Index + + The index service manages Cloud Datastore composite indexes. + Index creation and deletion are performed asynchronously. An + Operation resource is created for each such asynchronous + operation. The state of the operation (including any errors + encountered) may be queried via the Operation resource. + + # Operation + + The Operations collection provides a record of actions performed + for the specified project (including any operations in + progress). Operations are not created directly but through calls + on other collections or resources. + An operation that is not yet done may be cancelled. The request + to cancel is asynchronous and the operation may continue to run + for some time after the request to cancel is made. + + An operation that is done may be deleted so that it is no longer + listed as part of the Operation collection. + + ListOperations returns all pending operations, but not completed + operations. + Operations are created by service DatastoreAdmin, + but are accessed via service google.longrunning.Operations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "datastore.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DatastoreAdminRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DatastoreAdminRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateIndex(DatastoreAdminRestStub): + def __hash__(self): + return hash("CreateIndex") + + def __call__( + self, + request: datastore_admin.CreateIndexRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create index method over HTTP. + + Args: + request (~.datastore_admin.CreateIndexRequest): + The request object. The request for + [google.datastore.admin.v1.DatastoreAdmin.CreateIndex][google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}/indexes", + "body": "index", + }, + ] + request, metadata = self._interceptor.pre_create_index(request, metadata) + pb_request = datastore_admin.CreateIndexRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_index(resp) + return resp + + class _DeleteIndex(DatastoreAdminRestStub): + def __hash__(self): + return hash("DeleteIndex") + + def __call__( + self, + request: datastore_admin.DeleteIndexRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete index method over HTTP. + + Args: + request (~.datastore_admin.DeleteIndexRequest): + The request object. The request for + [google.datastore.admin.v1.DatastoreAdmin.DeleteIndex][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/projects/{project_id}/indexes/{index_id}", + }, + ] + request, metadata = self._interceptor.pre_delete_index(request, metadata) + pb_request = datastore_admin.DeleteIndexRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_index(resp) + return resp + + class _ExportEntities(DatastoreAdminRestStub): + def __hash__(self): + return hash("ExportEntities") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastore_admin.ExportEntitiesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the export entities method over HTTP. + + Args: + request (~.datastore_admin.ExportEntitiesRequest): + The request object. The request for + [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:export", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_entities(request, metadata) + pb_request = datastore_admin.ExportEntitiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_entities(resp) + return resp + + class _GetIndex(DatastoreAdminRestStub): + def __hash__(self): + return hash("GetIndex") + + def __call__( + self, + request: datastore_admin.GetIndexRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> index.Index: + r"""Call the get index method over HTTP. + + Args: + request (~.datastore_admin.GetIndexRequest): + The request object. The request for + [google.datastore.admin.v1.DatastoreAdmin.GetIndex][google.datastore.admin.v1.DatastoreAdmin.GetIndex]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.index.Index: + Datastore composite index definition. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/projects/{project_id}/indexes/{index_id}", + }, + ] + request, metadata = self._interceptor.pre_get_index(request, metadata) + pb_request = datastore_admin.GetIndexRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = index.Index() + pb_resp = index.Index.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_index(resp) + return resp + + class _ImportEntities(DatastoreAdminRestStub): + def __hash__(self): + return hash("ImportEntities") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastore_admin.ImportEntitiesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the import entities method over HTTP. + + Args: + request (~.datastore_admin.ImportEntitiesRequest): + The request object. The request for + [google.datastore.admin.v1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:import", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_import_entities(request, metadata) + pb_request = datastore_admin.ImportEntitiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_entities(resp) + return resp + + class _ListIndexes(DatastoreAdminRestStub): + def __hash__(self): + return hash("ListIndexes") + + def __call__( + self, + request: datastore_admin.ListIndexesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore_admin.ListIndexesResponse: + r"""Call the list indexes method over HTTP. + + Args: + request (~.datastore_admin.ListIndexesRequest): + The request object. The request for + [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore_admin.ListIndexesResponse: + The response for + [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/projects/{project_id}/indexes", + }, + ] + request, metadata = self._interceptor.pre_list_indexes(request, metadata) + pb_request = datastore_admin.ListIndexesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastore_admin.ListIndexesResponse() + pb_resp = datastore_admin.ListIndexesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_indexes(resp) + return resp + + @property + def create_index( + self, + ) -> Callable[[datastore_admin.CreateIndexRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateIndex(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_index( + self, + ) -> Callable[[datastore_admin.DeleteIndexRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteIndex(self._session, self._host, self._interceptor) # type: ignore + + @property + def export_entities( + self, + ) -> Callable[[datastore_admin.ExportEntitiesRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportEntities(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_index(self) -> Callable[[datastore_admin.GetIndexRequest], index.Index]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIndex(self._session, self._host, self._interceptor) # type: ignore + + @property + def import_entities( + self, + ) -> Callable[[datastore_admin.ImportEntitiesRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportEntities(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_indexes( + self, + ) -> Callable[ + [datastore_admin.ListIndexesRequest], datastore_admin.ListIndexesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListIndexes(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(DatastoreAdminRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(DatastoreAdminRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(DatastoreAdminRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(DatastoreAdminRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DatastoreAdminRestTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py index 313c36dc7518..8b40de02c6bf 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py index 799aef4d143b..7966ab118517 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py index 8a919b5efa44..773c4238fb59 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_metadata.json b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_metadata.json index ddde2b762f17..a38c26092fe0 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_metadata.json +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_metadata.json @@ -96,6 +96,51 @@ ] } } + }, + "rest": { + "libraryClient": "DatastoreClient", + "rpcs": { + "AllocateIds": { + "methods": [ + "allocate_ids" + ] + }, + "BeginTransaction": { + "methods": [ + "begin_transaction" + ] + }, + "Commit": { + "methods": [ + "commit" + ] + }, + "Lookup": { + "methods": [ + "lookup" + ] + }, + "ReserveIds": { + "methods": [ + "reserve_ids" + ] + }, + "Rollback": { + "methods": [ + "rollback" + ] + }, + "RunAggregationQuery": { + "methods": [ + "run_aggregation_query" + ] + }, + "RunQuery": { + "methods": [ + "run_query" + ] + } + } } } } diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index 542a1d70ddd6..480da31c83c9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -55,6 +55,7 @@ from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DatastoreGrpcTransport from .transports.grpc_asyncio import DatastoreGrpcAsyncIOTransport +from .transports.rest import DatastoreRestTransport class DatastoreClientMeta(type): @@ -68,6 +69,7 @@ class DatastoreClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[DatastoreTransport]] _transport_registry["grpc"] = DatastoreGrpcTransport _transport_registry["grpc_asyncio"] = DatastoreGrpcAsyncIOTransport + _transport_registry["rest"] = DatastoreRestTransport def get_transport_class( cls, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py index b7d617f6aa87..201364063c0f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py @@ -19,15 +19,20 @@ from .base import DatastoreTransport from .grpc import DatastoreGrpcTransport from .grpc_asyncio import DatastoreGrpcAsyncIOTransport +from .rest import DatastoreRestTransport +from .rest import DatastoreRestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[DatastoreTransport]] _transport_registry["grpc"] = DatastoreGrpcTransport _transport_registry["grpc_asyncio"] = DatastoreGrpcAsyncIOTransport +_transport_registry["rest"] = DatastoreRestTransport __all__ = ( "DatastoreTransport", "DatastoreGrpcTransport", "DatastoreGrpcAsyncIOTransport", + "DatastoreRestTransport", + "DatastoreRestInterceptor", ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py new file mode 100644 index 000000000000..842a2f8b6d1c --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py @@ -0,0 +1,1632 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.longrunning import operations_pb2 +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.datastore_v1.types import datastore + +from .base import DatastoreTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DatastoreRestInterceptor: + """Interceptor for Datastore. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DatastoreRestTransport. + + .. code-block:: python + class MyCustomDatastoreInterceptor(DatastoreRestInterceptor): + def pre_allocate_ids(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_allocate_ids(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_begin_transaction(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_begin_transaction(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_commit(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_commit(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_lookup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_lookup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_reserve_ids(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_reserve_ids(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_rollback(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_rollback(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_aggregation_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_aggregation_query(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_query(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DatastoreRestTransport(interceptor=MyCustomDatastoreInterceptor()) + client = DatastoreClient(transport=transport) + + + """ + + def pre_allocate_ids( + self, request: datastore.AllocateIdsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[datastore.AllocateIdsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for allocate_ids + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastore server. + """ + return request, metadata + + def post_allocate_ids( + self, response: datastore.AllocateIdsResponse + ) -> datastore.AllocateIdsResponse: + """Post-rpc interceptor for allocate_ids + + Override in a subclass to manipulate the response + after it is returned by the Datastore server but before + it is returned to user code. + """ + return response + + def pre_begin_transaction( + self, + request: datastore.BeginTransactionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastore.BeginTransactionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for begin_transaction + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastore server. + """ + return request, metadata + + def post_begin_transaction( + self, response: datastore.BeginTransactionResponse + ) -> datastore.BeginTransactionResponse: + """Post-rpc interceptor for begin_transaction + + Override in a subclass to manipulate the response + after it is returned by the Datastore server but before + it is returned to user code. + """ + return response + + def pre_commit( + self, request: datastore.CommitRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[datastore.CommitRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for commit + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastore server. + """ + return request, metadata + + def post_commit( + self, response: datastore.CommitResponse + ) -> datastore.CommitResponse: + """Post-rpc interceptor for commit + + Override in a subclass to manipulate the response + after it is returned by the Datastore server but before + it is returned to user code. + """ + return response + + def pre_lookup( + self, request: datastore.LookupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[datastore.LookupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for lookup + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastore server. + """ + return request, metadata + + def post_lookup( + self, response: datastore.LookupResponse + ) -> datastore.LookupResponse: + """Post-rpc interceptor for lookup + + Override in a subclass to manipulate the response + after it is returned by the Datastore server but before + it is returned to user code. + """ + return response + + def pre_reserve_ids( + self, request: datastore.ReserveIdsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[datastore.ReserveIdsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for reserve_ids + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastore server. + """ + return request, metadata + + def post_reserve_ids( + self, response: datastore.ReserveIdsResponse + ) -> datastore.ReserveIdsResponse: + """Post-rpc interceptor for reserve_ids + + Override in a subclass to manipulate the response + after it is returned by the Datastore server but before + it is returned to user code. + """ + return response + + def pre_rollback( + self, request: datastore.RollbackRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[datastore.RollbackRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for rollback + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastore server. + """ + return request, metadata + + def post_rollback( + self, response: datastore.RollbackResponse + ) -> datastore.RollbackResponse: + """Post-rpc interceptor for rollback + + Override in a subclass to manipulate the response + after it is returned by the Datastore server but before + it is returned to user code. + """ + return response + + def pre_run_aggregation_query( + self, + request: datastore.RunAggregationQueryRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastore.RunAggregationQueryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_aggregation_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastore server. + """ + return request, metadata + + def post_run_aggregation_query( + self, response: datastore.RunAggregationQueryResponse + ) -> datastore.RunAggregationQueryResponse: + """Post-rpc interceptor for run_aggregation_query + + Override in a subclass to manipulate the response + after it is returned by the Datastore server but before + it is returned to user code. + """ + return response + + def pre_run_query( + self, request: datastore.RunQueryRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[datastore.RunQueryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastore server. + """ + return request, metadata + + def post_run_query( + self, response: datastore.RunQueryResponse + ) -> datastore.RunQueryResponse: + """Post-rpc interceptor for run_query + + Override in a subclass to manipulate the response + after it is returned by the Datastore server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastore server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Datastore server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastore server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the Datastore server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastore server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Datastore server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastore server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Datastore server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DatastoreRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DatastoreRestInterceptor + + +class DatastoreRestTransport(DatastoreTransport): + """REST backend transport for Datastore. + + Each RPC normalizes the partition IDs of the keys in its + input entities, and always returns entities with keys with + normalized partition IDs. This applies to all keys and entities, + including those in values, except keys with both an empty path + and an empty or unset partition ID. Normalization of input keys + sets the project ID (if not already set) to the project ID from + the request. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "datastore.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DatastoreRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DatastoreRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AllocateIds(DatastoreRestStub): + def __hash__(self): + return hash("AllocateIds") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastore.AllocateIdsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.AllocateIdsResponse: + r"""Call the allocate ids method over HTTP. + + Args: + request (~.datastore.AllocateIdsRequest): + The request object. The request for + [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.AllocateIdsResponse: + The response for + [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:allocateIds", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_allocate_ids(request, metadata) + pb_request = datastore.AllocateIdsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastore.AllocateIdsResponse() + pb_resp = datastore.AllocateIdsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_allocate_ids(resp) + return resp + + class _BeginTransaction(DatastoreRestStub): + def __hash__(self): + return hash("BeginTransaction") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastore.BeginTransactionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.BeginTransactionResponse: + r"""Call the begin transaction method over HTTP. + + Args: + request (~.datastore.BeginTransactionRequest): + The request object. The request for + [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.BeginTransactionResponse: + The response for + [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:beginTransaction", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_begin_transaction( + request, metadata + ) + pb_request = datastore.BeginTransactionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastore.BeginTransactionResponse() + pb_resp = datastore.BeginTransactionResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_begin_transaction(resp) + return resp + + class _Commit(DatastoreRestStub): + def __hash__(self): + return hash("Commit") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastore.CommitRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.CommitResponse: + r"""Call the commit method over HTTP. + + Args: + request (~.datastore.CommitRequest): + The request object. The request for + [Datastore.Commit][google.datastore.v1.Datastore.Commit]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.CommitResponse: + The response for + [Datastore.Commit][google.datastore.v1.Datastore.Commit]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:commit", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_commit(request, metadata) + pb_request = datastore.CommitRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastore.CommitResponse() + pb_resp = datastore.CommitResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_commit(resp) + return resp + + class _Lookup(DatastoreRestStub): + def __hash__(self): + return hash("Lookup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastore.LookupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.LookupResponse: + r"""Call the lookup method over HTTP. + + Args: + request (~.datastore.LookupRequest): + The request object. The request for + [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.LookupResponse: + The response for + [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:lookup", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_lookup(request, metadata) + pb_request = datastore.LookupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastore.LookupResponse() + pb_resp = datastore.LookupResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_lookup(resp) + return resp + + class _ReserveIds(DatastoreRestStub): + def __hash__(self): + return hash("ReserveIds") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastore.ReserveIdsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.ReserveIdsResponse: + r"""Call the reserve ids method over HTTP. + + Args: + request (~.datastore.ReserveIdsRequest): + The request object. The request for + [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.ReserveIdsResponse: + The response for + [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:reserveIds", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_reserve_ids(request, metadata) + pb_request = datastore.ReserveIdsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastore.ReserveIdsResponse() + pb_resp = datastore.ReserveIdsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reserve_ids(resp) + return resp + + class _Rollback(DatastoreRestStub): + def __hash__(self): + return hash("Rollback") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastore.RollbackRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.RollbackResponse: + r"""Call the rollback method over HTTP. + + Args: + request (~.datastore.RollbackRequest): + The request object. The request for + [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.RollbackResponse: + The response for + [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. + (an empty message). + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:rollback", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_rollback(request, metadata) + pb_request = datastore.RollbackRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastore.RollbackResponse() + pb_resp = datastore.RollbackResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_rollback(resp) + return resp + + class _RunAggregationQuery(DatastoreRestStub): + def __hash__(self): + return hash("RunAggregationQuery") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastore.RunAggregationQueryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.RunAggregationQueryResponse: + r"""Call the run aggregation query method over HTTP. + + Args: + request (~.datastore.RunAggregationQueryRequest): + The request object. The request for + [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.RunAggregationQueryResponse: + The response for + [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:runAggregationQuery", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_run_aggregation_query( + request, metadata + ) + pb_request = datastore.RunAggregationQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastore.RunAggregationQueryResponse() + pb_resp = datastore.RunAggregationQueryResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_aggregation_query(resp) + return resp + + class _RunQuery(DatastoreRestStub): + def __hash__(self): + return hash("RunQuery") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastore.RunQueryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastore.RunQueryResponse: + r"""Call the run query method over HTTP. + + Args: + request (~.datastore.RunQueryRequest): + The request object. The request for + [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastore.RunQueryResponse: + The response for + [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:runQuery", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_run_query(request, metadata) + pb_request = datastore.RunQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastore.RunQueryResponse() + pb_resp = datastore.RunQueryResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_query(resp) + return resp + + @property + def allocate_ids( + self, + ) -> Callable[[datastore.AllocateIdsRequest], datastore.AllocateIdsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AllocateIds(self._session, self._host, self._interceptor) # type: ignore + + @property + def begin_transaction( + self, + ) -> Callable[ + [datastore.BeginTransactionRequest], datastore.BeginTransactionResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BeginTransaction(self._session, self._host, self._interceptor) # type: ignore + + @property + def commit(self) -> Callable[[datastore.CommitRequest], datastore.CommitResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Commit(self._session, self._host, self._interceptor) # type: ignore + + @property + def lookup(self) -> Callable[[datastore.LookupRequest], datastore.LookupResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Lookup(self._session, self._host, self._interceptor) # type: ignore + + @property + def reserve_ids( + self, + ) -> Callable[[datastore.ReserveIdsRequest], datastore.ReserveIdsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ReserveIds(self._session, self._host, self._interceptor) # type: ignore + + @property + def rollback( + self, + ) -> Callable[[datastore.RollbackRequest], datastore.RollbackResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Rollback(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_aggregation_query( + self, + ) -> Callable[ + [datastore.RunAggregationQueryRequest], datastore.RunAggregationQueryResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunAggregationQuery(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_query( + self, + ) -> Callable[[datastore.RunQueryRequest], datastore.RunQueryResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunQuery(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(DatastoreRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(DatastoreRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(DatastoreRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(DatastoreRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DatastoreRestTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py index c2d1d8599acc..cf8e8339abf7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index b748dd4758f1..2512ad890890 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore @@ -111,7 +113,9 @@ class LookupResponse(proto.Message): The identifier of the transaction that was started as part of this Lookup request. - Set only when [ReadOptions.begin_transaction][] was set in + Set only when + [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] + was set in [LookupRequest.read_options][google.datastore.v1.LookupRequest.read_options]. read_time (google.protobuf.timestamp_pb2.Timestamp): The time at which these entities were read or @@ -229,7 +233,9 @@ class RunQueryResponse(proto.Message): The identifier of the transaction that was started as part of this RunQuery request. - Set only when [ReadOptions.begin_transaction][] was set in + Set only when + [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] + was set in [RunQueryRequest.read_options][google.datastore.v1.RunQueryRequest.read_options]. """ @@ -335,7 +341,9 @@ class RunAggregationQueryResponse(proto.Message): The identifier of the transaction that was started as part of this RunAggregationQuery request. - Set only when [ReadOptions.begin_transaction][] was set in + Set only when + [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] + was set in [RunAggregationQueryRequest.read_options][google.datastore.v1.RunAggregationQueryRequest.read_options]. """ @@ -475,7 +483,7 @@ class CommitRequest(proto.Message): Options for beginning a new transaction for this request. The transaction is committed when the request completes. If specified, - [TransactionOptions.mode][google.datastore.v1.TransactionOptions.mode] + [TransactionOptions.mode][google.datastore.v1.TransactionOptions] must be [TransactionOptions.ReadWrite][google.datastore.v1.TransactionOptions.ReadWrite]. @@ -776,6 +784,9 @@ class MutationResult(proto.Message): greater than the version of any previous entity and less than the version of any possible future entity. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The create time of the entity. This field + will not be set after a 'delete'. update_time (google.protobuf.timestamp_pb2.Timestamp): The update time of the entity on the server after processing the mutation. If the mutation @@ -798,6 +809,11 @@ class MutationResult(proto.Message): proto.INT64, number=4, ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=6, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index adb651a2ce22..ec6564da5e70 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index e19c2c2c44b1..17e1cca502d8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore @@ -60,6 +62,12 @@ class EntityResult(proto.Message): entities in ``LookupResponse``, this is the version of the snapshot that was used to look up the entity, and it is always set except for eventually consistent reads. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the entity was created. This field is set + for + [``FULL``][google.datastore.v1.EntityResult.ResultType.FULL] + entity results. If this entity is missing, this field will + not be set. update_time (google.protobuf.timestamp_pb2.Timestamp): The time at which the entity was last changed. This field is set for @@ -105,6 +113,11 @@ class ResultType(proto.Enum): proto.INT64, number=4, ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=5, @@ -474,9 +487,13 @@ class Operator(proto.Enum): AND (1): The results are required to satisfy each of the combined filters. + OR (2): + Documents are required to satisfy at least + one of the combined filters. """ OPERATOR_UNSPECIFIED = 0 AND = 1 + OR = 2 op: Operator = proto.Field( proto.ENUM, @@ -560,6 +577,7 @@ class Operator(proto.Enum): Requires: - That ``value`` is an entity key. + - No other ``HAS_ANCESTOR`` is in the same query. NOT_IN (13): The value of the ``property`` is not in the given array. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index 63f8814f6887..8e9b37d08065 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -24,10 +24,17 @@ import grpc from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format from google.api_core import client_options from google.api_core import exceptions as core_exceptions @@ -106,6 +113,7 @@ def test__get_default_mtls_endpoint(): [ (DatastoreAdminClient, "grpc"), (DatastoreAdminAsyncClient, "grpc_asyncio"), + (DatastoreAdminClient, "rest"), ], ) def test_datastore_admin_client_from_service_account_info(client_class, transport_name): @@ -119,7 +127,11 @@ def test_datastore_admin_client_from_service_account_info(client_class, transpor assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("datastore.googleapis.com:443") + assert client.transport._host == ( + "datastore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://datastore.googleapis.com" + ) @pytest.mark.parametrize( @@ -127,6 +139,7 @@ def test_datastore_admin_client_from_service_account_info(client_class, transpor [ (transports.DatastoreAdminGrpcTransport, "grpc"), (transports.DatastoreAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DatastoreAdminRestTransport, "rest"), ], ) def test_datastore_admin_client_service_account_always_use_jwt( @@ -152,6 +165,7 @@ def test_datastore_admin_client_service_account_always_use_jwt( [ (DatastoreAdminClient, "grpc"), (DatastoreAdminAsyncClient, "grpc_asyncio"), + (DatastoreAdminClient, "rest"), ], ) def test_datastore_admin_client_from_service_account_file(client_class, transport_name): @@ -172,13 +186,18 @@ def test_datastore_admin_client_from_service_account_file(client_class, transpor assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("datastore.googleapis.com:443") + assert client.transport._host == ( + "datastore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://datastore.googleapis.com" + ) def test_datastore_admin_client_get_transport_class(): transport = DatastoreAdminClient.get_transport_class() available_transports = [ transports.DatastoreAdminGrpcTransport, + transports.DatastoreAdminRestTransport, ] assert transport in available_transports @@ -195,6 +214,7 @@ def test_datastore_admin_client_get_transport_class(): transports.DatastoreAdminGrpcAsyncIOTransport, "grpc_asyncio", ), + (DatastoreAdminClient, transports.DatastoreAdminRestTransport, "rest"), ], ) @mock.patch.object( @@ -340,6 +360,8 @@ def test_datastore_admin_client_client_options( "grpc_asyncio", "false", ), + (DatastoreAdminClient, transports.DatastoreAdminRestTransport, "rest", "true"), + (DatastoreAdminClient, transports.DatastoreAdminRestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -539,6 +561,7 @@ def test_datastore_admin_client_get_mtls_endpoint_and_cert_source(client_class): transports.DatastoreAdminGrpcAsyncIOTransport, "grpc_asyncio", ), + (DatastoreAdminClient, transports.DatastoreAdminRestTransport, "rest"), ], ) def test_datastore_admin_client_client_options_scopes( @@ -579,6 +602,7 @@ def test_datastore_admin_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (DatastoreAdminClient, transports.DatastoreAdminRestTransport, "rest", None), ], ) def test_datastore_admin_client_client_options_credentials_file( @@ -2000,687 +2024,2707 @@ async def test_list_indexes_async_pages(): assert page_.raw_page.next_page_token == token -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DatastoreAdminGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.ExportEntitiesRequest, + dict, + ], +) +def test_export_entities_rest(request_type): + client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.export_entities(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_export_entities_rest_required_fields( + request_type=datastore_admin.ExportEntitiesRequest, +): + transport_class = transports.DatastoreAdminRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["output_url_prefix"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, ) + ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.DatastoreAdminGrpcTransport( + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_entities._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["outputUrlPrefix"] = "output_url_prefix_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_entities._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "outputUrlPrefix" in jsonified_request + assert jsonified_request["outputUrlPrefix"] == "output_url_prefix_value" + + client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = DatastoreAdminClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.export_entities(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_export_entities_rest_unset_required_fields(): + transport = transports.DatastoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.export_entities._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "outputUrlPrefix", + ) ) + ) - # It is an error to provide an api_key and a transport instance. - transport = transports.DatastoreAdminGrpcTransport( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_entities_rest_interceptors(null_interceptor): + transport = transports.DatastoreAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastoreAdminRestInterceptor(), ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DatastoreAdminClient( - client_options=options, - transport=transport, + client = DatastoreAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_export_entities" + ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "pre_export_entities" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore_admin.ExportEntitiesRequest.pb( + datastore_admin.ExportEntitiesRequest() ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DatastoreAdminClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - # It is an error to provide scopes and a transport instance. - transport = transports.DatastoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DatastoreAdminClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + request = datastore_admin.ExportEntitiesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_entities( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatastoreAdminGrpcTransport( + +def test_export_entities_rest_bad_request( + transport: str = "rest", request_type=datastore_admin.ExportEntitiesRequest +): + client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client = DatastoreAdminClient(transport=transport) - assert client.transport is transport + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatastoreAdminGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.export_entities(request) + + +def test_export_entities_rest_flattened(): + client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - transport = transports.DatastoreAdminGrpcAsyncIOTransport( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + labels={"key_value": "value_value"}, + entity_filter=datastore_admin.EntityFilter(kinds=["kinds_value"]), + output_url_prefix="output_url_prefix_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.export_entities(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}:export" % client.transport._host, args[1] + ) + + +def test_export_entities_rest_flattened_error(transport: str = "rest"): + client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.export_entities( + datastore_admin.ExportEntitiesRequest(), + project_id="project_id_value", + labels={"key_value": "value_value"}, + entity_filter=datastore_admin.EntityFilter(kinds=["kinds_value"]), + output_url_prefix="output_url_prefix_value", + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatastoreAdminGrpcTransport, - transports.DatastoreAdminGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + +def test_export_entities_rest_error(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + datastore_admin.ImportEntitiesRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = DatastoreAdminClient.get_transport_class(transport_name)( +def test_import_entities_rest(request_type): + client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == transport_name + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.import_entities(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_import_entities_rest_required_fields( + request_type=datastore_admin.ImportEntitiesRequest, +): + transport_class = transports.DatastoreAdminRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["input_url"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_entities._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["inputUrl"] = "input_url_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_entities._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "inputUrl" in jsonified_request + assert jsonified_request["inputUrl"] == "input_url_value" -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert isinstance( - client.transport, - transports.DatastoreAdminGrpcTransport, + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.import_entities(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_import_entities_rest_unset_required_fields(): + transport = transports.DatastoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials ) + unset_fields = transport.import_entities._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "inputUrl", + ) + ) + ) -def test_datastore_admin_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DatastoreAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_entities_rest_interceptors(null_interceptor): + transport = transports.DatastoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastoreAdminRestInterceptor(), + ) + client = DatastoreAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_import_entities" + ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "pre_import_entities" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore_admin.ImportEntitiesRequest.pb( + datastore_admin.ImportEntitiesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) + request = datastore_admin.ImportEntitiesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() -def test_datastore_admin_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.datastore_admin_v1.services.datastore_admin.transports.DatastoreAdminTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.DatastoreAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), + client.import_entities( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "export_entities", - "import_entities", - "create_index", - "delete_index", - "get_index", - "list_indexes", + pre.assert_called_once() + post.assert_called_once() + + +def test_import_entities_rest_bad_request( + transport: str = "rest", request_type=datastore_admin.ImportEntitiesRequest +): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - with pytest.raises(NotImplementedError): - transport.close() + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.import_entities(request) - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() +def test_import_entities_rest_flattened(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) -def test_datastore_admin_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.datastore_admin_v1.services.datastore_admin.transports.DatastoreAdminTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DatastoreAdminTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - quota_project_id="octopus", - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1"} -def test_datastore_admin_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.datastore_admin_v1.services.datastore_admin.transports.DatastoreAdminTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DatastoreAdminTransport() - adc.assert_called_once() + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + labels={"key_value": "value_value"}, + input_url="input_url_value", + entity_filter=datastore_admin.EntityFilter(kinds=["kinds_value"]), + ) + mock_args.update(sample_request) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -def test_datastore_admin_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DatastoreAdminClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - quota_project_id=None, + client.import_entities(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}:import" % client.transport._host, args[1] ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatastoreAdminGrpcTransport, - transports.DatastoreAdminGrpcAsyncIOTransport, - ], -) -def test_datastore_admin_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - quota_project_id="octopus", +def test_import_entities_rest_flattened_error(transport: str = "rest"): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.import_entities( + datastore_admin.ImportEntitiesRequest(), + project_id="project_id_value", + labels={"key_value": "value_value"}, + input_url="input_url_value", + entity_filter=datastore_admin.EntityFilter(kinds=["kinds_value"]), ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatastoreAdminGrpcTransport, - transports.DatastoreAdminGrpcAsyncIOTransport, - ], -) -def test_datastore_admin_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) +def test_import_entities_rest_error(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_class,grpc_helpers", + "request_type", [ - (transports.DatastoreAdminGrpcTransport, grpc_helpers), - (transports.DatastoreAdminGrpcAsyncIOTransport, grpc_helpers_async), + datastore_admin.CreateIndexRequest, + dict, ], ) -def test_datastore_admin_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) +def test_create_index_rest(request_type): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - create_channel.assert_called_with( - "datastore.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - scopes=["1", "2"], - default_host="datastore.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request_init["index"] = { + "project_id": "project_id_value", + "index_id": "index_id_value", + "kind": "kind_value", + "ancestor": 1, + "properties": [{"name": "name_value", "direction": 1}], + "state": 1, + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatastoreAdminGrpcTransport, - transports.DatastoreAdminGrpcAsyncIOTransport, - ], -) -def test_datastore_admin_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_index(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_index_rest_interceptors(null_interceptor): + transport = transports.DatastoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastoreAdminRestInterceptor(), + ) + client = DatastoreAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_create_index" + ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "pre_create_index" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore_admin.CreateIndexRequest.pb( + datastore_admin.CreateIndexRequest() ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastore_admin.CreateIndexRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_index( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), ], ) - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) + pre.assert_called_once() + post.assert_called_once() -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - ], -) -def test_datastore_admin_host_no_port(transport_name): +def test_create_index_rest_bad_request( + transport: str = "rest", request_type=datastore_admin.CreateIndexRequest +): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="datastore.googleapis.com" - ), - transport=transport_name, + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request_init["index"] = { + "project_id": "project_id_value", + "index_id": "index_id_value", + "kind": "kind_value", + "ancestor": 1, + "properties": [{"name": "name_value", "direction": 1}], + "state": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_index(request) + + +def test_create_index_rest_error(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - assert client.transport._host == ("datastore.googleapis.com:443") @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", - "grpc_asyncio", + datastore_admin.DeleteIndexRequest, + dict, ], ) -def test_datastore_admin_host_with_port(transport_name): +def test_delete_index_rest(request_type): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="datastore.googleapis.com:8000" - ), - transport=transport_name, + transport="rest", ) - assert client.transport._host == ("datastore.googleapis.com:8000") + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "index_id": "sample2"} + request = request_type(**request_init) -def test_datastore_admin_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") - # Check that channel is used if provided. - transport = transports.DatastoreAdminGrpcTransport( - host="squid.clam.whelk", - channel=channel, + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_index(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_index_rest_interceptors(null_interceptor): + transport = transports.DatastoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastoreAdminRestInterceptor(), ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None + client = DatastoreAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_delete_index" + ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "pre_delete_index" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore_admin.DeleteIndexRequest.pb( + datastore_admin.DeleteIndexRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastore_admin.DeleteIndexRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_index( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + pre.assert_called_once() + post.assert_called_once() -def test_datastore_admin_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - # Check that channel is used if provided. - transport = transports.DatastoreAdminGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, +def test_delete_index_rest_bad_request( + transport: str = "rest", request_type=datastore_admin.DeleteIndexRequest +): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "index_id": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_index(request) + + +def test_delete_index_rest_error(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( - "transport_class", + "request_type", [ - transports.DatastoreAdminGrpcTransport, - transports.DatastoreAdminGrpcAsyncIOTransport, + datastore_admin.GetIndexRequest, + dict, ], ) -def test_datastore_admin_transport_channel_mtls_with_client_cert_source( - transport_class, +def test_get_index_rest(request_type): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "index_id": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = index.Index( + project_id="project_id_value", + index_id="index_id_value", + kind="kind_value", + ancestor=index.Index.AncestorMode.NONE, + state=index.Index.State.CREATING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = index.Index.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_index(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, index.Index) + assert response.project_id == "project_id_value" + assert response.index_id == "index_id_value" + assert response.kind == "kind_value" + assert response.ancestor == index.Index.AncestorMode.NONE + assert response.state == index.Index.State.CREATING + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_index_rest_interceptors(null_interceptor): + transport = transports.DatastoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastoreAdminRestInterceptor(), + ) + client = DatastoreAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_get_index" + ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "pre_get_index" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore_admin.GetIndexRequest.pb( + datastore_admin.GetIndexRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = index.Index.to_json(index.Index()) + + request = datastore_admin.GetIndexRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = index.Index() + + client.get_index( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_index_rest_bad_request( + transport: str = "rest", request_type=datastore_admin.GetIndexRequest ): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "index_id": "sample2"} + request = request_type(**request_init) - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_index(request) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), + +def test_get_index_rest_error(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.ListIndexesRequest, + dict, + ], +) +def test_list_indexes_rest(request_type): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore_admin.ListIndexesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastore_admin.ListIndexesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_indexes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListIndexesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_indexes_rest_interceptors(null_interceptor): + transport = transports.DatastoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastoreAdminRestInterceptor(), + ) + client = DatastoreAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_list_indexes" + ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "pre_list_indexes" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore_admin.ListIndexesRequest.pb( + datastore_admin.ListIndexesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastore_admin.ListIndexesResponse.to_json( + datastore_admin.ListIndexesResponse() + ) + + request = datastore_admin.ListIndexesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastore_admin.ListIndexesResponse() + + client.list_indexes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_indexes_rest_bad_request( + transport: str = "rest", request_type=datastore_admin.ListIndexesRequest +): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_indexes(request) + + +def test_list_indexes_rest_pager(transport: str = "rest"): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datastore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + index.Index(), ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred + next_page_token="abc", + ), + datastore_admin.ListIndexesResponse( + indexes=[], + next_page_token="def", + ), + datastore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + ], + next_page_token="ghi", + ), + datastore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + datastore_admin.ListIndexesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project_id": "sample1"} + + pager = client.list_indexes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, index.Index) for i in results) + + pages = list(client.list_indexes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastoreAdminClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatastoreAdminClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatastoreAdminClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastoreAdminClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DatastoreAdminClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DatastoreAdminGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ transports.DatastoreAdminGrpcTransport, transports.DatastoreAdminGrpcAsyncIOTransport, + transports.DatastoreAdminRestTransport, ], ) -def test_datastore_admin_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DatastoreAdminClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DatastoreAdminGrpcTransport, + ) + + +def test_datastore_admin_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DatastoreAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_datastore_admin_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.datastore_admin_v1.services.datastore_admin.transports.DatastoreAdminTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DatastoreAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "export_entities", + "import_entities", + "create_index", + "delete_index", + "get_index", + "list_indexes", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_datastore_admin_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.datastore_admin_v1.services.datastore_admin.transports.DatastoreAdminTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DatastoreAdminTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + quota_project_id="octopus", + ) + + +def test_datastore_admin_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.datastore_admin_v1.services.datastore_admin.transports.DatastoreAdminTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DatastoreAdminTransport() + adc.assert_called_once() + + +def test_datastore_admin_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DatastoreAdminClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatastoreAdminGrpcTransport, + transports.DatastoreAdminGrpcAsyncIOTransport, + ], +) +def test_datastore_admin_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatastoreAdminGrpcTransport, + transports.DatastoreAdminGrpcAsyncIOTransport, + transports.DatastoreAdminRestTransport, + ], +) +def test_datastore_admin_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DatastoreAdminGrpcTransport, grpc_helpers), + (transports.DatastoreAdminGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_datastore_admin_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "datastore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + scopes=["1", "2"], + default_host="datastore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatastoreAdminGrpcTransport, + transports.DatastoreAdminGrpcAsyncIOTransport, + ], +) +def test_datastore_admin_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_datastore_admin_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DatastoreAdminRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_datastore_admin_rest_lro_client(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_datastore_admin_host_no_port(transport_name): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datastore.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "datastore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://datastore.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_datastore_admin_host_with_port(transport_name): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datastore.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "datastore.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://datastore.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_datastore_admin_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DatastoreAdminClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DatastoreAdminClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.export_entities._session + session2 = client2.transport.export_entities._session + assert session1 != session2 + session1 = client1.transport.import_entities._session + session2 = client2.transport.import_entities._session + assert session1 != session2 + session1 = client1.transport.create_index._session + session2 = client2.transport.create_index._session + assert session1 != session2 + session1 = client1.transport.delete_index._session + session2 = client2.transport.delete_index._session + assert session1 != session2 + session1 = client1.transport.get_index._session + session2 = client2.transport.get_index._session + assert session1 != session2 + session1 = client1.transport.list_indexes._session + session2 = client2.transport.list_indexes._session + assert session1 != session2 + + +def test_datastore_admin_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DatastoreAdminGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_datastore_admin_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DatastoreAdminGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatastoreAdminGrpcTransport, + transports.DatastoreAdminGrpcAsyncIOTransport, + ], +) +def test_datastore_admin_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatastoreAdminGrpcTransport, + transports.DatastoreAdminGrpcAsyncIOTransport, + ], +) +def test_datastore_admin_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_datastore_admin_grpc_lro_client(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_datastore_admin_grpc_lro_async_client(): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DatastoreAdminClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = DatastoreAdminClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DatastoreAdminClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DatastoreAdminClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = DatastoreAdminClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DatastoreAdminClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DatastoreAdminClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = DatastoreAdminClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DatastoreAdminClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = DatastoreAdminClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = DatastoreAdminClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DatastoreAdminClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DatastoreAdminClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = DatastoreAdminClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DatastoreAdminClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DatastoreAdminTransport, "_prep_wrapped_messages" + ) as prep: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DatastoreAdminTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DatastoreAdminClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -def test_datastore_admin_grpc_lro_client(): +def test_get_operation_field_headers(): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) - transport = client.transport - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_datastore_admin_grpc_lro_async_client(): + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): client = DatastoreAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", ) - transport = client.transport - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, +def test_get_operation_from_dict(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = DatastoreAdminClient.common_billing_account_path(billing_account) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = DatastoreAdminClient.common_billing_account_path(**expected) - # Check that the path construction is reversible. - actual = DatastoreAdminClient.parse_common_billing_account_path(path) - assert expected == actual +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, +def test_list_operations(transport: str = "grpc"): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - actual = DatastoreAdminClient.common_folder_path(folder) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = DatastoreAdminClient.common_folder_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = DatastoreAdminClient.parse_common_folder_path(path) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - actual = DatastoreAdminClient.common_organization_path(organization) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = DatastoreAdminClient.common_organization_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = DatastoreAdminClient.parse_common_organization_path(path) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format( - project=project, +def test_list_operations_field_headers(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = DatastoreAdminClient.common_project_path(project) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = DatastoreAdminClient.common_project_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() - # Check that the path construction is reversible. - actual = DatastoreAdminClient.parse_common_project_path(path) - assert expected == actual + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = DatastoreAdminClient.common_location_path(project, location) - assert expected == actual +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = DatastoreAdminClient.common_location_path(**expected) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" - # Check that the path construction is reversible. - actual = DatastoreAdminClient.parse_common_location_path(path) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object( - transports.DatastoreAdminTransport, "_prep_wrapped_messages" - ) as prep: - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) +def test_list_operations_from_dict(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() - with mock.patch.object( - transports.DatastoreAdminTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = DatastoreAdminClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, + response = client.list_operations( + request={ + "name": "locations", + } ) - prep.assert_called_once_with(client_info) + call.assert_called() @pytest.mark.asyncio -async def test_transport_close_async(): +async def test_list_operations_from_dict_async(): client = DatastoreAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -2698,6 +4742,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 4833d39b90ea..2a89f4e4ac60 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -24,10 +24,17 @@ import grpc from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format from google.api_core import client_options from google.api_core import exceptions as core_exceptions @@ -97,6 +104,7 @@ def test__get_default_mtls_endpoint(): [ (DatastoreClient, "grpc"), (DatastoreAsyncClient, "grpc_asyncio"), + (DatastoreClient, "rest"), ], ) def test_datastore_client_from_service_account_info(client_class, transport_name): @@ -110,7 +118,11 @@ def test_datastore_client_from_service_account_info(client_class, transport_name assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("datastore.googleapis.com:443") + assert client.transport._host == ( + "datastore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://datastore.googleapis.com" + ) @pytest.mark.parametrize( @@ -118,6 +130,7 @@ def test_datastore_client_from_service_account_info(client_class, transport_name [ (transports.DatastoreGrpcTransport, "grpc"), (transports.DatastoreGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DatastoreRestTransport, "rest"), ], ) def test_datastore_client_service_account_always_use_jwt( @@ -143,6 +156,7 @@ def test_datastore_client_service_account_always_use_jwt( [ (DatastoreClient, "grpc"), (DatastoreAsyncClient, "grpc_asyncio"), + (DatastoreClient, "rest"), ], ) def test_datastore_client_from_service_account_file(client_class, transport_name): @@ -163,13 +177,18 @@ def test_datastore_client_from_service_account_file(client_class, transport_name assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("datastore.googleapis.com:443") + assert client.transport._host == ( + "datastore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://datastore.googleapis.com" + ) def test_datastore_client_get_transport_class(): transport = DatastoreClient.get_transport_class() available_transports = [ transports.DatastoreGrpcTransport, + transports.DatastoreRestTransport, ] assert transport in available_transports @@ -186,6 +205,7 @@ def test_datastore_client_get_transport_class(): transports.DatastoreGrpcAsyncIOTransport, "grpc_asyncio", ), + (DatastoreClient, transports.DatastoreRestTransport, "rest"), ], ) @mock.patch.object( @@ -327,6 +347,8 @@ def test_datastore_client_client_options(client_class, transport_class, transpor "grpc_asyncio", "false", ), + (DatastoreClient, transports.DatastoreRestTransport, "rest", "true"), + (DatastoreClient, transports.DatastoreRestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -520,6 +542,7 @@ def test_datastore_client_get_mtls_endpoint_and_cert_source(client_class): transports.DatastoreGrpcAsyncIOTransport, "grpc_asyncio", ), + (DatastoreClient, transports.DatastoreRestTransport, "rest"), ], ) def test_datastore_client_client_options_scopes( @@ -555,6 +578,7 @@ def test_datastore_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (DatastoreClient, transports.DatastoreRestTransport, "rest", None), ], ) def test_datastore_client_client_options_credentials_file( @@ -2405,181 +2429,2262 @@ async def test_reserve_ids_flattened_error_async(): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DatastoreGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + datastore.LookupRequest, + dict, + ], +) +def test_lookup_rest(request_type): + client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.LookupResponse( + transaction=b"transaction_blob", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.DatastoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DatastoreClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastore.LookupResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.lookup(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.LookupResponse) + assert response.transaction == b"transaction_blob" + + +def test_lookup_rest_required_fields(request_type=datastore.LookupRequest): + transport_class = transports.DatastoreRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, ) + ) - # It is an error to provide an api_key and a transport instance. - transport = transports.DatastoreGrpcTransport( + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + + client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DatastoreClient( - client_options=options, - transport=transport, - ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastore.LookupResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DatastoreClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastore.LookupResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.lookup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_lookup_rest_unset_required_fields(): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.lookup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "keys", + ) ) + ) - # It is an error to provide scopes and a transport instance. - transport = transports.DatastoreGrpcTransport( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_lookup_rest_interceptors(null_interceptor): + transport = transports.DatastoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), ) - with pytest.raises(ValueError): - client = DatastoreClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client = DatastoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreRestInterceptor, "post_lookup" + ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "pre_lookup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore.LookupRequest.pb(datastore.LookupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastore.LookupResponse.to_json( + datastore.LookupResponse() ) + request = datastore.LookupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastore.LookupResponse() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatastoreGrpcTransport( + client.lookup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_lookup_rest_bad_request( + transport: str = "rest", request_type=datastore.LookupRequest +): + client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client = DatastoreClient(transport=transport) - assert client.transport is transport + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatastoreGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.lookup(request) + + +def test_lookup_rest_flattened(): + client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - transport = transports.DatastoreGrpcAsyncIOTransport( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.LookupResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + read_options=datastore.ReadOptions( + read_consistency=datastore.ReadOptions.ReadConsistency.STRONG + ), + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastore.LookupResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.lookup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}:lookup" % client.transport._host, args[1] + ) + + +def test_lookup_rest_flattened_error(transport: str = "rest"): + client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.lookup( + datastore.LookupRequest(), + project_id="project_id_value", + read_options=datastore.ReadOptions( + read_consistency=datastore.ReadOptions.ReadConsistency.STRONG + ), + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatastoreGrpcTransport, - transports.DatastoreGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() +def test_lookup_rest_error(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + datastore.RunQueryRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = DatastoreClient.get_transport_class(transport_name)( +def test_run_query_rest(request_type): + client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == transport_name + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.RunQueryResponse( + transaction=b"transaction_blob", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastore.RunQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.RunQueryResponse) + assert response.transaction == b"transaction_blob" + + +def test_run_query_rest_required_fields(request_type=datastore.RunQueryRequest): + transport_class = transports.DatastoreRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert isinstance( - client.transport, - transports.DatastoreGrpcTransport, - ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastore.RunQueryResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -def test_datastore_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DatastoreTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) + pb_return_value = datastore.RunQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -def test_datastore_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.datastore_v1.services.datastore.transports.DatastoreTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.DatastoreTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) + response = client.run_query(request) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "lookup", - "run_query", - "run_aggregation_query", - "begin_transaction", - "commit", - "rollback", - "allocate_ids", - "reserve_ids", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - with pytest.raises(NotImplementedError): - transport.close() - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() +def test_run_query_rest_unset_required_fields(): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + unset_fields = transport.run_query._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("projectId",))) -def test_datastore_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_query_rest_interceptors(null_interceptor): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), + ) + client = DatastoreClient(transport=transport) with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.datastore_v1.services.datastore.transports.DatastoreTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreRestInterceptor, "post_run_query" + ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "pre_run_query" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore.RunQueryRequest.pb(datastore.RunQueryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastore.RunQueryResponse.to_json( + datastore.RunQueryResponse() + ) + + request = datastore.RunQueryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastore.RunQueryResponse() + + client.run_query( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_query_rest_bad_request( + transport: str = "rest", request_type=datastore.RunQueryRequest +): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_query(request) + + +def test_run_query_rest_error(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore.RunAggregationQueryRequest, + dict, + ], +) +def test_run_aggregation_query_rest(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.RunAggregationQueryResponse( + transaction=b"transaction_blob", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastore.RunAggregationQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_aggregation_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.RunAggregationQueryResponse) + assert response.transaction == b"transaction_blob" + + +def test_run_aggregation_query_rest_required_fields( + request_type=datastore.RunAggregationQueryRequest, +): + transport_class = transports.DatastoreRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_aggregation_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_aggregation_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastore.RunAggregationQueryResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastore.RunAggregationQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.run_aggregation_query(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_run_aggregation_query_rest_unset_required_fields(): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.run_aggregation_query._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("projectId",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_aggregation_query_rest_interceptors(null_interceptor): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), + ) + client = DatastoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreRestInterceptor, "post_run_aggregation_query" + ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "pre_run_aggregation_query" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore.RunAggregationQueryRequest.pb( + datastore.RunAggregationQueryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastore.RunAggregationQueryResponse.to_json( + datastore.RunAggregationQueryResponse() + ) + + request = datastore.RunAggregationQueryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastore.RunAggregationQueryResponse() + + client.run_aggregation_query( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_aggregation_query_rest_bad_request( + transport: str = "rest", request_type=datastore.RunAggregationQueryRequest +): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_aggregation_query(request) + + +def test_run_aggregation_query_rest_error(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore.BeginTransactionRequest, + dict, + ], +) +def test_begin_transaction_rest(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.BeginTransactionResponse( + transaction=b"transaction_blob", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastore.BeginTransactionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.begin_transaction(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.BeginTransactionResponse) + assert response.transaction == b"transaction_blob" + + +def test_begin_transaction_rest_required_fields( + request_type=datastore.BeginTransactionRequest, +): + transport_class = transports.DatastoreRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).begin_transaction._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).begin_transaction._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastore.BeginTransactionResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastore.BeginTransactionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.begin_transaction(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_begin_transaction_rest_unset_required_fields(): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.begin_transaction._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("projectId",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_begin_transaction_rest_interceptors(null_interceptor): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), + ) + client = DatastoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreRestInterceptor, "post_begin_transaction" + ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "pre_begin_transaction" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore.BeginTransactionRequest.pb( + datastore.BeginTransactionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastore.BeginTransactionResponse.to_json( + datastore.BeginTransactionResponse() + ) + + request = datastore.BeginTransactionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastore.BeginTransactionResponse() + + client.begin_transaction( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_begin_transaction_rest_bad_request( + transport: str = "rest", request_type=datastore.BeginTransactionRequest +): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.begin_transaction(request) + + +def test_begin_transaction_rest_flattened(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.BeginTransactionResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastore.BeginTransactionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.begin_transaction(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}:beginTransaction" % client.transport._host, + args[1], + ) + + +def test_begin_transaction_rest_flattened_error(transport: str = "rest"): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.begin_transaction( + datastore.BeginTransactionRequest(), + project_id="project_id_value", + ) + + +def test_begin_transaction_rest_error(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore.CommitRequest, + dict, + ], +) +def test_commit_rest(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.CommitResponse( + index_updates=1389, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastore.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.commit(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.CommitResponse) + assert response.index_updates == 1389 + + +def test_commit_rest_required_fields(request_type=datastore.CommitRequest): + transport_class = transports.DatastoreRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).commit._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).commit._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastore.CommitResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastore.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.commit(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_commit_rest_unset_required_fields(): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.commit._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("projectId",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_commit_rest_interceptors(null_interceptor): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), + ) + client = DatastoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreRestInterceptor, "post_commit" + ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "pre_commit" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore.CommitRequest.pb(datastore.CommitRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastore.CommitResponse.to_json( + datastore.CommitResponse() + ) + + request = datastore.CommitRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastore.CommitResponse() + + client.commit( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_commit_rest_bad_request( + transport: str = "rest", request_type=datastore.CommitRequest +): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.commit(request) + + +def test_commit_rest_flattened(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.CommitResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + mode=datastore.CommitRequest.Mode.TRANSACTIONAL, + mutations=[ + datastore.Mutation( + insert=entity.Entity( + key=entity.Key( + partition_id=entity.PartitionId( + project_id="project_id_value" + ) + ) + ) + ) + ], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastore.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.commit(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}:commit" % client.transport._host, args[1] + ) + + +def test_commit_rest_flattened_error(transport: str = "rest"): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.commit( + datastore.CommitRequest(), + project_id="project_id_value", + mode=datastore.CommitRequest.Mode.TRANSACTIONAL, + transaction=b"transaction_blob", + mutations=[ + datastore.Mutation( + insert=entity.Entity( + key=entity.Key( + partition_id=entity.PartitionId( + project_id="project_id_value" + ) + ) + ) + ) + ], + ) + + +def test_commit_rest_error(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore.RollbackRequest, + dict, + ], +) +def test_rollback_rest(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.RollbackResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastore.RollbackResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.rollback(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.RollbackResponse) + + +def test_rollback_rest_required_fields(request_type=datastore.RollbackRequest): + transport_class = transports.DatastoreRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["transaction"] = b"" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).rollback._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["transaction"] = b"transaction_blob" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).rollback._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "transaction" in jsonified_request + assert jsonified_request["transaction"] == b"transaction_blob" + + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastore.RollbackResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastore.RollbackResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.rollback(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_rollback_rest_unset_required_fields(): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.rollback._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "transaction", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_rollback_rest_interceptors(null_interceptor): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), + ) + client = DatastoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreRestInterceptor, "post_rollback" + ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "pre_rollback" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore.RollbackRequest.pb(datastore.RollbackRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastore.RollbackResponse.to_json( + datastore.RollbackResponse() + ) + + request = datastore.RollbackRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastore.RollbackResponse() + + client.rollback( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_rollback_rest_bad_request( + transport: str = "rest", request_type=datastore.RollbackRequest +): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.rollback(request) + + +def test_rollback_rest_flattened(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.RollbackResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + transaction=b"transaction_blob", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastore.RollbackResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.rollback(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}:rollback" % client.transport._host, args[1] + ) + + +def test_rollback_rest_flattened_error(transport: str = "rest"): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback( + datastore.RollbackRequest(), + project_id="project_id_value", + transaction=b"transaction_blob", + ) + + +def test_rollback_rest_error(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore.AllocateIdsRequest, + dict, + ], +) +def test_allocate_ids_rest(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.AllocateIdsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastore.AllocateIdsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.allocate_ids(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.AllocateIdsResponse) + + +def test_allocate_ids_rest_required_fields(request_type=datastore.AllocateIdsRequest): + transport_class = transports.DatastoreRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).allocate_ids._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).allocate_ids._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastore.AllocateIdsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastore.AllocateIdsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.allocate_ids(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_allocate_ids_rest_unset_required_fields(): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.allocate_ids._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "keys", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_allocate_ids_rest_interceptors(null_interceptor): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), + ) + client = DatastoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreRestInterceptor, "post_allocate_ids" + ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "pre_allocate_ids" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore.AllocateIdsRequest.pb(datastore.AllocateIdsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastore.AllocateIdsResponse.to_json( + datastore.AllocateIdsResponse() + ) + + request = datastore.AllocateIdsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastore.AllocateIdsResponse() + + client.allocate_ids( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_allocate_ids_rest_bad_request( + transport: str = "rest", request_type=datastore.AllocateIdsRequest +): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.allocate_ids(request) + + +def test_allocate_ids_rest_flattened(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.AllocateIdsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastore.AllocateIdsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.allocate_ids(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}:allocateIds" % client.transport._host, args[1] + ) + + +def test_allocate_ids_rest_flattened_error(transport: str = "rest"): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.allocate_ids( + datastore.AllocateIdsRequest(), + project_id="project_id_value", + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) + + +def test_allocate_ids_rest_error(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore.ReserveIdsRequest, + dict, + ], +) +def test_reserve_ids_rest(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.ReserveIdsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastore.ReserveIdsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.reserve_ids(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.ReserveIdsResponse) + + +def test_reserve_ids_rest_required_fields(request_type=datastore.ReserveIdsRequest): + transport_class = transports.DatastoreRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reserve_ids._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reserve_ids._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastore.ReserveIdsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastore.ReserveIdsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.reserve_ids(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_reserve_ids_rest_unset_required_fields(): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.reserve_ids._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "keys", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reserve_ids_rest_interceptors(null_interceptor): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), + ) + client = DatastoreClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreRestInterceptor, "post_reserve_ids" + ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "pre_reserve_ids" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore.ReserveIdsRequest.pb(datastore.ReserveIdsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastore.ReserveIdsResponse.to_json( + datastore.ReserveIdsResponse() + ) + + request = datastore.ReserveIdsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastore.ReserveIdsResponse() + + client.reserve_ids( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_reserve_ids_rest_bad_request( + transport: str = "rest", request_type=datastore.ReserveIdsRequest +): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.reserve_ids(request) + + +def test_reserve_ids_rest_flattened(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.ReserveIdsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastore.ReserveIdsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.reserve_ids(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}:reserveIds" % client.transport._host, args[1] + ) + + +def test_reserve_ids_rest_flattened_error(transport: str = "rest"): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reserve_ids( + datastore.ReserveIdsRequest(), + project_id="project_id_value", + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) + + +def test_reserve_ids_rest_error(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DatastoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DatastoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastoreClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DatastoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatastoreClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatastoreClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DatastoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastoreClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatastoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DatastoreClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatastoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DatastoreGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatastoreGrpcTransport, + transports.DatastoreGrpcAsyncIOTransport, + transports.DatastoreRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DatastoreClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DatastoreGrpcTransport, + ) + + +def test_datastore_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DatastoreTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_datastore_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.datastore_v1.services.datastore.transports.DatastoreTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DatastoreTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "lookup", + "run_query", + "run_aggregation_query", + "begin_transaction", + "commit", + "rollback", + "allocate_ids", + "reserve_ids", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_datastore_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.datastore_v1.services.datastore.transports.DatastoreTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DatastoreTransport( credentials_file="credentials.json", quota_project_id="octopus", @@ -2649,6 +4754,7 @@ def test_datastore_transport_auth_adc(transport_class): [ transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport, + transports.DatastoreRestTransport, ], ) def test_datastore_transport_auth_gdch_credentials(transport_class): @@ -2746,11 +4852,23 @@ def test_datastore_grpc_transport_client_cert_source_for_mtls(transport_class): ) +def test_datastore_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DatastoreRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_datastore_host_no_port(transport_name): @@ -2761,7 +4879,11 @@ def test_datastore_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("datastore.googleapis.com:443") + assert client.transport._host == ( + "datastore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://datastore.googleapis.com" + ) @pytest.mark.parametrize( @@ -2769,6 +4891,7 @@ def test_datastore_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_datastore_host_with_port(transport_name): @@ -2779,7 +4902,54 @@ def test_datastore_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("datastore.googleapis.com:8000") + assert client.transport._host == ( + "datastore.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://datastore.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_datastore_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DatastoreClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DatastoreClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.lookup._session + session2 = client2.transport.lookup._session + assert session1 != session2 + session1 = client1.transport.run_query._session + session2 = client2.transport.run_query._session + assert session1 != session2 + session1 = client1.transport.run_aggregation_query._session + session2 = client2.transport.run_aggregation_query._session + assert session1 != session2 + session1 = client1.transport.begin_transaction._session + session2 = client2.transport.begin_transaction._session + assert session1 != session2 + session1 = client1.transport.commit._session + session2 = client2.transport.commit._session + assert session1 != session2 + session1 = client1.transport.rollback._session + session2 = client2.transport.rollback._session + assert session1 != session2 + session1 = client1.transport.allocate_ids._session + session2 = client2.transport.allocate_ids._session + assert session1 != session2 + session1 = client1.transport.reserve_ids._session + session2 = client2.transport.reserve_ids._session + assert session1 != session2 def test_datastore_grpc_transport_channel(): @@ -3040,6 +5210,236 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + def test_delete_operation(transport: str = "grpc"): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3610,6 +6010,7 @@ async def test_list_operations_from_dict_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -3627,6 +6028,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: From 2ed4edd2ae38f406464f5903d85ce22c81075427 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 28 Feb 2023 18:44:50 -0500 Subject: [PATCH 501/611] chore(main): release 2.14.0 (#422) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 12 ++++++++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../google/cloud/datastore_admin/gapic_version.py | 2 +- .../google/cloud/datastore_admin_v1/gapic_version.py | 2 +- .../google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 18 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index bc12e128dd66..851649e8452b 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.13.2" + ".": "2.14.0" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 423aa9fe167f..f0dd6b140019 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.14.0](https://github.com/googleapis/python-datastore/compare/v2.13.2...v2.14.0) (2023-02-28) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([6785908](https://github.com/googleapis/python-datastore/commit/678590808a0867dbb2c2002c1ead39586d61be86)) + + +### Documentation + +* Minor documentation formatting and cleanup ([6785908](https://github.com/googleapis/python-datastore/commit/678590808a0867dbb2c2002c1ead39586d61be86)) + ## [2.13.2](https://github.com/googleapis/python-datastore/compare/v2.13.1...v2.13.2) (2023-01-23) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index 7eeb26dd316b..fe257fa248dc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.13.2" # {x-release-please-version} +__version__ = "2.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 0cae902c855f..ba8b4e8af8c9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.13.2" +__version__ = "2.14.0" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index c790ba98e3c2..8be002907dd0 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.2" # {x-release-please-version} +__version__ = "2.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index 6d50101819e4..f485e26eb50b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.13.2" # {x-release-please-version} +__version__ = "2.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index 6d50101819e4..f485e26eb50b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.13.2" # {x-release-please-version} +__version__ = "2.14.0" # {x-release-please-version} From edddb63e4c5da227f8084b7b7f494873f4e38711 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 1 Mar 2023 10:06:31 +0000 Subject: [PATCH 502/611] chore(deps): update dependency google-cloud-datastore to v2.14.0 (#423) --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index 21c16a12ab52..03b7d2f46454 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.13.2 \ No newline at end of file +google-cloud-datastore==2.14.0 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index 51cac2bed779..46e66f8d1bdb 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.13.2 +google-cloud-datastore==2.14.0 From ccebe3d2583a41cc6dc02723826b04e726947db3 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 3 Mar 2023 21:07:00 +0000 Subject: [PATCH 503/611] chore(deps): update dependency pytest to v7.2.2 (#424) --- .../samples/snippets/requirements-test.txt | 2 +- .../samples/snippets/schedule-export/requirements-test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt index c7b5651ff672..aa58f1dbefd9 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff===1.11.1; python_version < "3.7" backoff==2.2.1; python_version >= "3.7" -pytest==7.2.1 +pytest==7.2.2 flaky==3.7.0 diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt index dd3c7330bb99..b705adb655e6 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt @@ -1 +1 @@ -pytest==7.2.1 \ No newline at end of file +pytest==7.2.2 \ No newline at end of file From a2c3cbec53915ec1572dd4fb53ee20881ca7b189 Mon Sep 17 00:00:00 2001 From: Mariatta Wijaya Date: Thu, 9 Mar 2023 08:44:38 -0800 Subject: [PATCH 504/611] feat: OR Query implementation (#418) Introduce new Filter classes: - PropertyFilter - And - Or Add "filter" keyword args to "Query.add_filter()" UserWarning is now emitted when using "add_filter()" without keyword args --- .../google/cloud/datastore/query.py | 242 ++++++++++-- .../tests/system/index.yaml | 10 + .../tests/system/test_query.py | 117 +++++- .../tests/unit/test_query.py | 365 ++++++++++++++++-- 4 files changed, 660 insertions(+), 74 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 5907f3c1066c..2659ebc0fb18 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -15,15 +15,21 @@ """Create / interact with Google Cloud Datastore queries.""" import base64 +import warnings + from google.api_core import page_iterator from google.cloud._helpers import _ensure_tuple_or_list + from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore_v1.types import query as query_pb2 from google.cloud.datastore import helpers from google.cloud.datastore.key import Key +import abc +from abc import ABC + _NOT_FINISHED = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED _NO_MORE_RESULTS = query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS @@ -34,6 +40,100 @@ query_pb2.QueryResultBatch.MoreResultsType.MORE_RESULTS_AFTER_CURSOR, ) +KEY_PROPERTY_NAME = "__key__" + + +class BaseFilter(ABC): + """Base class for Filters""" + + @abc.abstractmethod + def build_pb(self, container_pb=None): + """Build the protobuf representation based on values in the Filter.""" + + +class PropertyFilter(BaseFilter): + """Class representation of a Property Filter""" + + def __init__(self, property_name, operator, value): + if property_name == KEY_PROPERTY_NAME and not isinstance(value, Key): + raise ValueError('Invalid key: "%s"' % value) + if Query.OPERATORS.get(operator) is None: + error_message = 'Invalid expression: "%s"' % (operator,) + choices_message = "Please use one of: =, <, <=, >, >=, !=, IN, NOT_IN." + raise ValueError(error_message, choices_message) + self.property_name = property_name + self.operator = operator + self.value = value + + def build_pb(self, container_pb=None): + """Build the protobuf representation based on values in the Property Filter.""" + container_pb.op = Query.OPERATORS.get(self.operator) + container_pb.property.name = self.property_name + if self.property_name == KEY_PROPERTY_NAME: + key_pb = self.value.to_protobuf() + container_pb.value.key_value.CopyFrom(key_pb._pb) + else: + helpers._set_protobuf_value(container_pb.value, self.value) + return container_pb + + def __repr__(self): + return f"<{self.property_name} {self.operator} '{self.value}'>" + + +class BaseCompositeFilter(BaseFilter): + """Base class for a Composite Filter. (either OR or AND).""" + + def __init__( + self, + operation=query_pb2.CompositeFilter.Operator.OPERATOR_UNSPECIFIED, + filters=None, + ): + self.operation = operation + if filters is None: + self.filters = [] + else: + self.filters = filters + + def __repr__(self): + repr = f"op: {self.operation}\nFilters:" + for filter in self.filters: + repr += f"\n\t{filter}" + return repr + + def build_pb(self, container_pb=None): + """Build the protobuf representation based on values in the Composite Filter.""" + container_pb.op = self.operation + for filter in self.filters: + if isinstance(filter, PropertyFilter): + child_pb = container_pb.filters.add().property_filter + elif isinstance(filter, BaseCompositeFilter): + child_pb = container_pb.filters.add().composite_filter + else: + # unpack to legacy filter + property_name, operator, value = filter + filter = PropertyFilter(property_name, operator, value) + child_pb = container_pb.filters.add().property_filter + filter.build_pb(container_pb=child_pb) + return container_pb + + +class Or(BaseCompositeFilter): + """Class representation of an OR Filter.""" + + def __init__(self, filters): + super().__init__( + operation=query_pb2.CompositeFilter.Operator.OR, filters=filters + ) + + +class And(BaseCompositeFilter): + """Class representation of an AND Filter.""" + + def __init__(self, filters): + super().__init__( + operation=query_pb2.CompositeFilter.Operator.AND, filters=filters + ) + class Query(object): """A Query against the Cloud Datastore. @@ -107,13 +207,31 @@ def __init__( self._client = client self._kind = kind - self._project = project or client.project - self._namespace = namespace or client.namespace + + if project: + self._project = project + elif hasattr(client, "project"): + self._project = client.project + else: + self._project = None + + if namespace: + self._namespace = namespace + elif hasattr(client, "namespace"): + self._namespace = client.namespace + else: + self._namespace = None + self._ancestor = ancestor self._filters = [] + # Verify filters passed in. - for property_name, operator, value in filters: - self.add_filter(property_name, operator, value) + for filter in filters: + if isinstance(filter, BaseFilter): + self.add_filter(filter=filter) + else: + property_name, operator, value = filter + self.add_filter(property_name, operator, value) self._projection = _ensure_tuple_or_list("projection", projection) self._order = _ensure_tuple_or_list("order", order) self._distinct_on = _ensure_tuple_or_list("distinct_on", distinct_on) @@ -209,30 +327,61 @@ def filters(self): """ return self._filters[:] - def add_filter(self, property_name, operator, value): + def add_filter( + self, + property_name=None, + operator=None, + value=None, + *, + filter=None, + ): """Filter the query based on a property name, operator and a value. Expressions take the form of:: - .add_filter('', '', ) + .add_filter( + filter=PropertyFilter('', '', ) + ) where property is a property stored on the entity in the datastore and operator is one of ``OPERATORS`` (ie, ``=``, ``<``, ``<=``, ``>``, ``>=``, ``!=``, ``IN``, ``NOT_IN``): + Both AND and OR operations are supported by passing in a `CompositeFilter` object to the `filter` parameter:: + + .add_filter( + filter=And( + [ + PropertyFilter('', '', ), + PropertyFilter('', '', ) + + ] + ) + ) + + .add_filter( + filter=Or( + [ + PropertyFilter('', '', ), + PropertyFilter('', '', ) + ] + ) + ) + .. testsetup:: query-filter import uuid from google.cloud import datastore + from google.cloud.datastore.query import PropertyFilter client = datastore.Client() .. doctest:: query-filter >>> query = client.query(kind='Person') - >>> query = query.add_filter('name', '=', 'James') - >>> query = query.add_filter('age', '>', 50) + >>> query = query.add_filter(filter=PropertyFilter('name', '=', 'James')) + >>> query = query.add_filter(filter=PropertyFilter('age', '>', 50)) :type property_name: str :param property_name: A property name. @@ -246,6 +395,9 @@ def add_filter(self, property_name, operator, value): :class:`google.cloud.datastore.key.Key` :param value: The value to filter on. + :type filter: :class:`CompositeFilter`, :class:`PropertyFiler` + :param filter: A instance of a `BaseFilter`, either a `CompositeFilter` or `PropertyFilter`. + :rtype: :class:`~google.cloud.datastore.query.Query` :returns: A query object. @@ -253,15 +405,39 @@ def add_filter(self, property_name, operator, value): specified values, or if a filter names ``'__key__'`` but passes an invalid value (a key is required). """ - if self.OPERATORS.get(operator) is None: - error_message = 'Invalid expression: "%s"' % (operator,) - choices_message = "Please use one of: =, <, <=, >, >=, !=, IN, NOT_IN." - raise ValueError(error_message, choices_message) + if isinstance(property_name, PropertyFilter): + raise ValueError( + "PropertyFilter object must be passed using keyword argument 'filter'" + ) + if isinstance(property_name, BaseCompositeFilter): + raise ValueError( + "'Or' and 'And' objects must be passed using keyword argument 'filter'" + ) - if property_name == "__key__" and not isinstance(value, Key): - raise ValueError('Invalid key: "%s"' % value) + if property_name is not None and operator is not None: + if filter is not None: + raise ValueError( + "Can't pass in both the positional arguments and 'filter' at the same time" + ) + + if property_name == KEY_PROPERTY_NAME and not isinstance(value, Key): + raise ValueError('Invalid key: "%s"' % value) + + if self.OPERATORS.get(operator) is None: + error_message = 'Invalid expression: "%s"' % (operator,) + choices_message = "Please use one of: =, <, <=, >, >=, !=, IN, NOT_IN." + raise ValueError(error_message, choices_message) + + warnings.warn( + "Detected filter using positional arguments. Prefer using the 'filter' keyword argument instead.", + UserWarning, + stacklevel=2, + ) + self._filters.append((property_name, operator, value)) + + if isinstance(filter, BaseFilter): + self._filters.append(filter) - self._filters.append((property_name, operator, value)) return self @property @@ -287,7 +463,7 @@ def projection(self, projection): def keys_only(self): """Set the projection to include only keys.""" - self._projection[:] = ["__key__"] + self._projection[:] = [KEY_PROPERTY_NAME] def key_filter(self, key, operator="="): """Filter on a key. @@ -299,7 +475,7 @@ def key_filter(self, key, operator="="): :param operator: (Optional) One of ``=``, ``<``, ``<=``, ``>``, ``>=``, ``!=``, ``IN``, ``NOT_IN``. Defaults to ``=``. """ - self.add_filter("__key__", operator, key) + self.add_filter(KEY_PROPERTY_NAME, operator, key) @property def order(self): @@ -368,7 +544,7 @@ def fetch( import uuid from google.cloud import datastore - + from google.cloud.datastore.query import PropertyFilter unique = str(uuid.uuid4())[0:8] client = datastore.Client(namespace='ns{}'.format(unique)) @@ -383,7 +559,7 @@ def fetch( >>> bobby['name'] = 'Bobby' >>> client.put_multi([andy, sally, bobby]) >>> query = client.query(kind='Person') - >>> result = list(query.add_filter('name', '=', 'Sally').fetch()) + >>> result = list(query.add_filter(filter=PropertyFilter('name', '=', 'Sally')).fetch()) >>> result [] @@ -688,30 +864,26 @@ def _pb_from_query(query): composite_filter = pb.filter.composite_filter composite_filter.op = query_pb2.CompositeFilter.Operator.AND + for filter in query.filters: + if isinstance(filter, BaseCompositeFilter): + pb_to_add = pb.filter.composite_filter.filters._pb.add().composite_filter + elif isinstance(filter, PropertyFilter): + pb_to_add = pb.filter.composite_filter.filters._pb.add().property_filter + else: + property_name, operator, value = filter + filter = PropertyFilter(property_name, operator, value) + pb_to_add = pb.filter.composite_filter.filters._pb.add().property_filter + filter.build_pb(container_pb=pb_to_add) + if query.ancestor: ancestor_pb = query.ancestor.to_protobuf() # Filter on __key__ HAS_ANCESTOR == ancestor. ancestor_filter = composite_filter.filters._pb.add().property_filter - ancestor_filter.property.name = "__key__" + ancestor_filter.property.name = KEY_PROPERTY_NAME ancestor_filter.op = query_pb2.PropertyFilter.Operator.HAS_ANCESTOR ancestor_filter.value.key_value.CopyFrom(ancestor_pb._pb) - for property_name, operator, value in query.filters: - pb_op_enum = query.OPERATORS.get(operator) - - # Add the specific filter - property_filter = composite_filter.filters._pb.add().property_filter - property_filter.property.name = property_name - property_filter.op = pb_op_enum - - # Set the value to filter on based on the type. - if property_name == "__key__": - key_pb = value.to_protobuf() - property_filter.value.key_value.CopyFrom(key_pb._pb) - else: - helpers._set_protobuf_value(property_filter.value, value) - if not composite_filter.filters: pb._pb.ClearField("filter") diff --git a/packages/google-cloud-datastore/tests/system/index.yaml b/packages/google-cloud-datastore/tests/system/index.yaml index 5a2d2b1a8bc9..08a50d094de4 100644 --- a/packages/google-cloud-datastore/tests/system/index.yaml +++ b/packages/google-cloud-datastore/tests/system/index.yaml @@ -21,3 +21,13 @@ indexes: properties: - name: name - name: family + +- kind: Character + properties: + - name: alive + - name: appearances + +- kind: Character + properties: + - name: Character + - name: appearances \ No newline at end of file diff --git a/packages/google-cloud-datastore/tests/system/test_query.py b/packages/google-cloud-datastore/tests/system/test_query.py index 499bc507a5a3..6b26629fdddb 100644 --- a/packages/google-cloud-datastore/tests/system/test_query.py +++ b/packages/google-cloud-datastore/tests/system/test_query.py @@ -21,6 +21,8 @@ from .utils import populate_datastore from . import _helpers +from google.cloud.datastore.query import PropertyFilter, And, Or + retry_503 = RetryErrors(exceptions.ServiceUnavailable) @@ -101,7 +103,7 @@ def test_query_w_limit_paging(ancestor_query): def test_query_w_simple_filter(ancestor_query): query = ancestor_query - query.add_filter("appearances", ">=", 20) + query.add_filter(filter=PropertyFilter("appearances", ">=", 20)) expected_matches = 6 # We expect 6, but allow the query to get 1 extra. @@ -112,8 +114,8 @@ def test_query_w_simple_filter(ancestor_query): def test_query_w_multiple_filters(ancestor_query): query = ancestor_query - query.add_filter("appearances", ">=", 26) - query = query.add_filter("family", "=", "Stark") + query.add_filter(filter=PropertyFilter("appearances", ">=", 26)) + query = query.add_filter(filter=PropertyFilter("family", "=", "Stark")) expected_matches = 4 # We expect 4, but allow the query to get 1 extra. @@ -348,10 +350,115 @@ def large_query(large_query_client): ) def test_large_query(large_query, limit, offset, expected): page_query = large_query - page_query.add_filter("family", "=", "Stark") - page_query.add_filter("alive", "=", False) + page_query.add_filter(filter=PropertyFilter("family", "=", "Stark")) + page_query.add_filter(filter=PropertyFilter("alive", "=", False)) iterator = page_query.fetch(limit=limit, offset=offset) entities = [e for e in iterator] assert len(entities) == expected + + +def test_query_add_property_filter(ancestor_query): + query = ancestor_query + + query.add_filter(filter=PropertyFilter("appearances", ">=", 26)) + expected_matches = 4 + + entities = _do_fetch(query, limit=expected_matches + 1) + + assert len(entities) == expected_matches + for e in entities: + assert e["appearances"] >= 26 + + +def test_query_and_composite_filter(ancestor_query): + query = ancestor_query + + query.add_filter( + filter=And( + [ + PropertyFilter("family", "=", "Stark"), + PropertyFilter("name", "=", "Jon Snow"), + ] + ) + ) + expected_matches = 1 + + entities = _do_fetch(query) + + assert len(entities) == expected_matches + assert entities[0]["family"] == "Stark" + assert entities[0]["name"] == "Jon Snow" + + +def test_query_or_composite_filter(ancestor_query): + query = ancestor_query + + # name = Arya or name = Jon Snow + query.add_filter( + filter=Or( + [ + PropertyFilter("name", "=", "Arya"), + PropertyFilter("name", "=", "Jon Snow"), + ] + ) + ) + expected_matches = 2 + + entities = _do_fetch(query) + + assert len(entities) == expected_matches + + assert entities[0]["name"] == "Arya" + assert entities[1]["name"] == "Jon Snow" + + +def test_query_add_filters(ancestor_query): + query = ancestor_query + + # family = Stark AND name = Jon Snow + query.add_filter(filter=PropertyFilter("family", "=", "Stark")) + query.add_filter(filter=PropertyFilter("name", "=", "Jon Snow")) + + expected_matches = 1 + + entities = _do_fetch(query) + + assert len(entities) == expected_matches + assert entities[0]["family"] == "Stark" + assert entities[0]["name"] == "Jon Snow" + + +def test_query_add_complex_filters(ancestor_query): + query = ancestor_query + + # (alive = True OR appearances >= 26) AND (family = Stark) + query.add_filter( + filter=( + Or( + [ + PropertyFilter("alive", "=", True), + PropertyFilter("appearances", ">=", 26), + ] + ) + ) + ) + query.add_filter(filter=PropertyFilter("family", "IN", ["Stark"])) + + entities = _do_fetch(query) + + alive_count = 0 + appearance_count = 0 + stark_family_count = 0 + for e in entities: + if e["appearances"] >= 26: + appearance_count += 1 + if e["alive"] is True: + alive_count += 1 + if "Stark" in e["family"]: + stark_family_count += 1 + + assert alive_count == 4 + assert appearance_count == 4 + assert stark_family_count == 5 diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index b473a8c71b1c..f94a98984f89 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -16,6 +16,15 @@ import mock import pytest + +from google.cloud.datastore.query import ( + Query, + PropertyFilter, + And, + Or, + BaseCompositeFilter, +) + _PROJECT = "PROJECT" @@ -33,7 +42,16 @@ def test_query_ctor_defaults(): assert query.distinct_on == [] -def test_query_ctor_explicit(): +@pytest.mark.parametrize( + "filters", + [ + [("foo", "=", "Qux"), ("bar", "<", 17)], + [PropertyFilter("foo", "=", "Qux"), PropertyFilter("bar", "<", 17)], + [And([PropertyFilter("foo", "=", "Qux"), PropertyFilter("bar", "<", 17)])], + [Or([PropertyFilter("foo", "=", "Qux"), PropertyFilter("bar", "<", 17)])], + ], +) +def test_query_ctor_explicit(filters): from google.cloud.datastore.key import Key _PROJECT = "OTHER_PROJECT" @@ -41,10 +59,11 @@ def test_query_ctor_explicit(): _NAMESPACE = "OTHER_NAMESPACE" client = _make_client() ancestor = Key("ANCESTOR", 123, project=_PROJECT) - FILTERS = [("foo", "=", "Qux"), ("bar", "<", 17)] + FILTERS = filters PROJECTION = ["foo", "bar", "baz"] ORDER = ["foo", "bar"] DISTINCT_ON = ["foo"] + query = _make_query( client, kind=_KIND, @@ -148,6 +167,17 @@ def test_query_ancestor_setter_w_key(): assert query.ancestor.path == key.path +def test_query_ancestor_setter_w_key_property_filter(): + from google.cloud.datastore.key import Key + + _NAME = "NAME" + key = Key("KIND", 123, project=_PROJECT) + query = _make_query(_make_client()) + query.add_filter(filter=PropertyFilter("name", "=", _NAME)) + query.ancestor = key + assert query.ancestor.path == key.path + + def test_query_ancestor_deleter_w_key(): from google.cloud.datastore.key import Key @@ -159,8 +189,18 @@ def test_query_ancestor_deleter_w_key(): def test_query_add_filter_setter_w_unknown_operator(): query = _make_query(_make_client()) - with pytest.raises(ValueError): + with pytest.raises(ValueError) as exc: query.add_filter("firstname", "~~", "John") + assert "Invalid expression:" in str(exc.value) + assert "Please use one of: =, <, <=, >, >=, !=, IN, NOT_IN." in str(exc.value) + + +def test_query_add_property_filter_setter_w_unknown_operator(): + query = _make_query(_make_client()) + with pytest.raises(ValueError) as exc: + query.add_filter(filter=PropertyFilter("firstname", "~~", "John")) + assert "Invalid expression:" in str(exc.value) + assert "Please use one of: =, <, <=, >, >=, !=, IN, NOT_IN." in str(exc.value) def test_query_add_filter_w_known_operator(): @@ -169,6 +209,13 @@ def test_query_add_filter_w_known_operator(): assert query.filters == [("firstname", "=", "John")] +def test_query_add_property_filter_w_known_operator(): + query = _make_query(_make_client()) + property_filter = PropertyFilter("firstname", "=", "John") + query.add_filter(filter=property_filter) + assert query.filters == [property_filter] + + def test_query_add_filter_w_all_operators(): query = _make_query(_make_client()) query.add_filter("leq_prop", "<=", "val1") @@ -190,6 +237,29 @@ def test_query_add_filter_w_all_operators(): assert query.filters[7] == ("not_in_prop", "NOT_IN", ["val13"]) +def test_query_add_property_filter_w_all_operators(): + query = _make_query(_make_client()) + filters = [ + ("leq_prop", "<=", "val1"), + ("geq_prop", ">=", "val2"), + ("lt_prop", "<", "val3"), + ("gt_prop", ">", "val4"), + ("eq_prop", "=", "val5"), + ("in_prop", "IN", ["val6"]), + ("neq_prop", "!=", "val9"), + ("not_in_prop", "NOT_IN", ["val13"]), + ] + property_filters = [PropertyFilter(*filter) for filter in filters] + + for filter in property_filters: + query.add_filter(filter=filter) + + assert len(query.filters) == 8 + + for i in range(8): + assert query.filters[i] == property_filters[i] + + def test_query_add_filter_w_known_operator_and_entity(): from google.cloud.datastore.entity import Entity @@ -201,6 +271,18 @@ def test_query_add_filter_w_known_operator_and_entity(): assert query.filters == [("other", "=", other)] +def test_query_add_property_filter_w_known_operator_and_entity(): + from google.cloud.datastore.entity import Entity + + query = _make_query(_make_client()) + other = Entity() + other["firstname"] = "John" + other["lastname"] = "Smith" + property_filter = PropertyFilter("other", "=", other) + query.add_filter(filter=property_filter) + assert query.filters == [property_filter] + + def test_query_add_filter_w_whitespace_property_name(): query = _make_query(_make_client()) PROPERTY_NAME = " property with lots of space " @@ -208,6 +290,14 @@ def test_query_add_filter_w_whitespace_property_name(): assert query.filters == [(PROPERTY_NAME, "=", "John")] +def test_query_add_property_filter_w_whitespace_property_name(): + query = _make_query(_make_client()) + PROPERTY_NAME = " property with lots of space " + property_filter = PropertyFilter(PROPERTY_NAME, "=", "John") + query.add_filter(filter=property_filter) + assert query.filters == [property_filter] + + def test_query_add_filter___key__valid_key(): from google.cloud.datastore.key import Key @@ -217,6 +307,16 @@ def test_query_add_filter___key__valid_key(): assert query.filters == [("__key__", "=", key)] +def test_query_add_property_filter___key__valid_key(): + from google.cloud.datastore.key import Key + + query = _make_query(_make_client()) + key = Key("Foo", project=_PROJECT) + property_filter = PropertyFilter("__key__", "=", key) + query.add_filter(filter=property_filter) + assert query.filters == [property_filter] + + def test_query_add_filter_return_query_obj(): from google.cloud.datastore.query import Query @@ -226,6 +326,81 @@ def test_query_add_filter_return_query_obj(): assert query_obj.filters == [("firstname", "=", "John")] +def test_query_add_property_filter_without_keyword_argument(): + + query = _make_query(_make_client()) + property_filter = PropertyFilter("firstname", "=", "John") + with pytest.raises(ValueError) as exc: + query.add_filter(property_filter) + + assert ( + "PropertyFilter object must be passed using keyword argument 'filter'" + in str(exc.value) + ) + + +def test_query_add_composite_filter_without_keyword_argument(): + + query = _make_query(_make_client()) + and_filter = And(["firstname", "=", "John"]) + with pytest.raises(ValueError) as exc: + query.add_filter(and_filter) + + assert ( + "'Or' and 'And' objects must be passed using keyword argument 'filter'" + in str(exc.value) + ) + + or_filter = Or(["firstname", "=", "John"]) + with pytest.raises(ValueError) as exc: + query.add_filter(or_filter) + + assert ( + "'Or' and 'And' objects must be passed using keyword argument 'filter'" + in str(exc.value) + ) + + +def test_query_positional_args_and_property_filter(): + + query = _make_query(_make_client()) + with pytest.raises(ValueError) as exc: + query.add_filter("firstname", "=", "John", filter=("name", "=", "Blabla")) + + assert ( + "Can't pass in both the positional arguments and 'filter' at the same time" + in str(exc.value) + ) + + +def test_query_positional_args_and_composite_filter(): + + query = _make_query(_make_client()) + and_filter = And(["firstname", "=", "John"]) + with pytest.raises(ValueError) as exc: + query.add_filter("firstname", "=", "John", filter=and_filter) + + assert ( + "Can't pass in both the positional arguments and 'filter' at the same time" + in str(exc.value) + ) + + +def test_query_add_filter_with_positional_args_raises_user_warning(): + query = _make_query(_make_client()) + with pytest.warns( + UserWarning, + match="Detected filter using positional arguments", + ): + query.add_filter("firstname", "=", "John") + + with pytest.warns( + UserWarning, + match="Detected filter using positional arguments", + ): + _make_stub_query(filters=[("name", "=", "John")]) + + def test_query_filter___key__not_equal_operator(): from google.cloud.datastore.key import Key @@ -235,10 +410,28 @@ def test_query_filter___key__not_equal_operator(): assert query.filters == [("__key__", "<", key)] +def test_query_property_filter___key__not_equal_operator(): + from google.cloud.datastore.key import Key + + key = Key("Foo", project=_PROJECT) + query = _make_query(_make_client()) + property_filter = PropertyFilter("__key__", "<", key) + query.add_filter(filter=property_filter) + assert query.filters == [property_filter] + + def test_query_filter___key__invalid_value(): query = _make_query(_make_client()) - with pytest.raises(ValueError): + with pytest.raises(ValueError) as exc: query.add_filter("__key__", "=", None) + assert "Invalid key:" in str(exc.value) + + +def test_query_property_filter___key__invalid_value(): + query = _make_query(_make_client()) + with pytest.raises(ValueError) as exc: + query.add_filter(filter=PropertyFilter("__key__", "=", None)) + assert "Invalid key:" in str(exc.value) def test_query_projection_setter_empty(): @@ -721,7 +914,7 @@ def test_pb_from_query_empty(): from google.cloud.datastore_v1.types import query as query_pb2 from google.cloud.datastore.query import _pb_from_query - pb = _pb_from_query(_Query()) + pb = _pb_from_query(_make_stub_query()) assert list(pb.projection) == [] assert list(pb.kind) == [] assert list(pb.order) == [] @@ -739,14 +932,14 @@ def test_pb_from_query_empty(): def test_pb_from_query_projection(): from google.cloud.datastore.query import _pb_from_query - pb = _pb_from_query(_Query(projection=["a", "b", "c"])) + pb = _pb_from_query(_make_stub_query(projection=["a", "b", "c"])) assert [item.property.name for item in pb.projection] == ["a", "b", "c"] def test_pb_from_query_kind(): from google.cloud.datastore.query import _pb_from_query - pb = _pb_from_query(_Query(kind="KIND")) + pb = _pb_from_query(_make_stub_query(kind="KIND")) assert [item.name for item in pb.kind] == ["KIND"] @@ -756,7 +949,7 @@ def test_pb_from_query_ancestor(): from google.cloud.datastore.query import _pb_from_query ancestor = Key("Ancestor", 123, project="PROJECT") - pb = _pb_from_query(_Query(ancestor=ancestor)) + pb = _pb_from_query(_make_stub_query(ancestor=ancestor)) cfilter = pb.filter.composite_filter assert cfilter.op == query_pb2.CompositeFilter.Operator.AND assert len(cfilter.filters) == 1 @@ -770,7 +963,7 @@ def test_pb_from_query_filter(): from google.cloud.datastore_v1.types import query as query_pb2 from google.cloud.datastore.query import _pb_from_query - query = _Query(filters=[("name", "=", "John")]) + query = _make_stub_query(filters=[("name", "=", "John")]) query.OPERATORS = {"=": query_pb2.PropertyFilter.Operator.EQUAL} pb = _pb_from_query(query) cfilter = pb.filter.composite_filter @@ -787,7 +980,7 @@ def test_pb_from_query_filter_key(): from google.cloud.datastore.query import _pb_from_query key = Key("Kind", 123, project="PROJECT") - query = _Query(filters=[("__key__", "=", key)]) + query = _make_stub_query(filters=[("__key__", "=", key)]) query.OPERATORS = {"=": query_pb2.PropertyFilter.Operator.EQUAL} pb = _pb_from_query(query) cfilter = pb.filter.composite_filter @@ -799,11 +992,114 @@ def test_pb_from_query_filter_key(): assert pfilter.value.key_value == key_pb +def test_pb_from_complex_filter(): + from google.cloud.datastore_v1.types import query as query_pb2 + from google.cloud.datastore.query import _pb_from_query + + query = _make_stub_query( + filters=[ + ("name", "=", "John"), + And( + [ + PropertyFilter("category", "=", "Grocery"), + PropertyFilter("price", ">", "100"), + ] + ), + Or( + [ + PropertyFilter("category", "=", "Stationery"), + PropertyFilter("price", "<", "50"), + ] + ), + PropertyFilter("name", "=", "Jana"), + ] + ) + query.OPERATORS = {"=": query_pb2.PropertyFilter.Operator.EQUAL} + pb = _pb_from_query(query) + filter = pb.filter.composite_filter + + assert filter.op == query_pb2.CompositeFilter.Operator.AND + assert len(filter.filters) == 4 + + filter_1 = filter.filters[0].property_filter + assert filter_1.property.name == "name" + assert filter_1.value.string_value == "John" + assert filter_1.op == Query.OPERATORS.get("=") + + filter_2 = filter.filters[1].composite_filter + assert len(filter_2.filters) == 2 + assert filter_2.op == query_pb2.CompositeFilter.Operator.AND + + filter_2_1 = filter_2.filters[0].property_filter + assert filter_2_1.property.name == "category" + assert filter_2_1.op == Query.OPERATORS.get("=") + assert filter_2_1.value.string_value == "Grocery" + + filter_2_2 = filter_2.filters[1].property_filter + assert filter_2_2.property.name == "price" + assert filter_2_2.op == Query.OPERATORS.get(">") + assert filter_2_2.value.string_value == "100" + + filter_3 = filter.filters[2].composite_filter + assert len(filter_3.filters) == 2 + assert filter_3.op == query_pb2.CompositeFilter.Operator.OR + + filter_3_1 = filter_3.filters[0].property_filter + assert filter_3_1.property.name == "category" + assert filter_3_1.op == Query.OPERATORS.get("=") + assert filter_3_1.value.string_value == "Stationery" + + filter_3_2 = filter_3.filters[1].property_filter + assert filter_3_2.property.name == "price" + assert filter_3_2.op == Query.OPERATORS.get("<") + assert filter_3_2.value.string_value == "50" + + filter_4 = filter.filters[3].property_filter + assert filter_4.property.name == "name" + assert filter_4.value.string_value == "Jana" + assert filter_4.op == Query.OPERATORS.get("=") + + +def test_build_pb_for_and(): + + and_filter = And( + [ + ("name", "=", "John"), + And( + [ + PropertyFilter("category", "=", "Grocery"), + PropertyFilter("price", ">", "100"), + ] + ), + PropertyFilter("category", "=", "Grocery"), + ] + ) + from google.cloud.datastore_v1.types import query as query_pb2 + + container_pb = ( + query_pb2.Filter().composite_filter.filters._pb.add().composite_filter + ) + pb = and_filter.build_pb(container_pb=container_pb) + + assert pb.op == query_pb2.CompositeFilter.Operator.AND + assert len(pb.filters) == 3 + + +def test_base_composite_filter(): + from google.cloud.datastore_v1.types import query as query_pb2 + + comp_filter = BaseCompositeFilter() + assert len(comp_filter.filters) == 0 + assert ( + comp_filter.operation == query_pb2.CompositeFilter.Operator.OPERATOR_UNSPECIFIED + ) + + def test_pb_from_query_order(): from google.cloud.datastore_v1.types import query as query_pb2 from google.cloud.datastore.query import _pb_from_query - pb = _pb_from_query(_Query(order=["a", "-b", "c"])) + pb = _pb_from_query(_make_stub_query(order=["a", "-b", "c"])) assert [item.property.name for item in pb.order] == ["a", "b", "c"] expected_directions = [ query_pb2.PropertyOrder.Direction.ASCENDING, @@ -816,32 +1112,33 @@ def test_pb_from_query_order(): def test_pb_from_query_distinct_on(): from google.cloud.datastore.query import _pb_from_query - pb = _pb_from_query(_Query(distinct_on=["a", "b", "c"])) + pb = _pb_from_query(_make_stub_query(distinct_on=["a", "b", "c"])) assert [item.name for item in pb.distinct_on] == ["a", "b", "c"] -class _Query(object): - def __init__( - self, - client=object(), - kind=None, - project=None, - namespace=None, - ancestor=None, - filters=(), - projection=(), - order=(), - distinct_on=(), - ): - self._client = client - self.kind = kind - self.project = project - self.namespace = namespace - self.ancestor = ancestor - self.filters = filters - self.projection = projection - self.order = order - self.distinct_on = distinct_on +def _make_stub_query( + client=object(), + kind=None, + project=None, + namespace=None, + ancestor=None, + filters=(), + projection=(), + order=(), + distinct_on=(), +): + query = Query( + client, + kind=kind, + project=project, + namespace=namespace, + ancestor=ancestor, + filters=filters, + projection=projection, + order=order, + distinct_on=distinct_on, + ) + return query class _Client(object): From 4dc7592ee5ca7d53c668c5ecec3a6126b74d2546 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 9 Mar 2023 09:00:13 -0800 Subject: [PATCH 505/611] chore(main): release 2.15.0 (#425) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-datastore/.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../google/cloud/datastore_admin/gapic_version.py | 2 +- .../google/cloud/datastore_admin_v1/gapic_version.py | 2 +- .../google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 13 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index 851649e8452b..a73bb826a85b 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.14.0" + ".": "2.15.0" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index f0dd6b140019..0b91d61202df 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.15.0](https://github.com/googleapis/python-datastore/compare/v2.14.0...v2.15.0) (2023-03-09) + + +### Features + +* OR Query implementation ([#418](https://github.com/googleapis/python-datastore/issues/418)) ([3256951](https://github.com/googleapis/python-datastore/commit/325695197497f9da4976fe9220d1d72114bdc12b)) + ## [2.14.0](https://github.com/googleapis/python-datastore/compare/v2.13.2...v2.14.0) (2023-02-28) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index fe257fa248dc..7431d8e5d620 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.14.0" # {x-release-please-version} +__version__ = "2.15.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index ba8b4e8af8c9..a8381fff6ee7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.14.0" +__version__ = "2.15.0" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index 8be002907dd0..2788e5e55993 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.14.0" # {x-release-please-version} +__version__ = "2.15.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index f485e26eb50b..8af4dfed9ecf 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.14.0" # {x-release-please-version} +__version__ = "2.15.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index f485e26eb50b..8af4dfed9ecf 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.14.0" # {x-release-please-version} +__version__ = "2.15.0" # {x-release-please-version} From 00831f68b5b0c8c58bd95b2a88b42d546512816d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 16 Mar 2023 08:34:02 -0400 Subject: [PATCH 506/611] chore(deps): Update nox in .kokoro/requirements.in [autoapprove] (#427) Source-Link: https://github.com/googleapis/synthtool/commit/92006bb3cdc84677aa93c7f5235424ec2b157146 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 2 +- .../google-cloud-datastore/.kokoro/requirements.in | 2 +- .../.kokoro/requirements.txt | 14 +++++--------- 3 files changed, 7 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 5fc5daa31783..b8edda51cf46 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 + digest: sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 diff --git a/packages/google-cloud-datastore/.kokoro/requirements.in b/packages/google-cloud-datastore/.kokoro/requirements.in index 882178ce6001..ec867d9fd65a 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.in +++ b/packages/google-cloud-datastore/.kokoro/requirements.in @@ -5,6 +5,6 @@ typing-extensions twine wheel setuptools -nox +nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index fa99c12908f0..66a2172a76a8 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.10 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: # # pip-compile --allow-unsafe --generate-hashes requirements.in # @@ -335,9 +335,9 @@ more-itertools==9.0.0 \ --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes -nox==2022.8.7 \ - --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ - --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c +nox==2022.11.21 \ + --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \ + --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684 # via -r requirements.in packaging==21.3 \ --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ @@ -380,10 +380,6 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core -py==1.11.0 \ - --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ - --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 - # via nox pyasn1==0.4.8 \ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba From 7b6a983cdcf0fd062eedd5a7634d4da71b4904ea Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 17 Mar 2023 00:29:05 +0000 Subject: [PATCH 507/611] chore(deps): update dependency google-cloud-datastore to v2.15.0 (#426) Co-authored-by: Mariatta Wijaya --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index 03b7d2f46454..9492cd23dd5b 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.14.0 \ No newline at end of file +google-cloud-datastore==2.15.0 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index 46e66f8d1bdb..b276b14e235e 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.14.0 +google-cloud-datastore==2.15.0 From a9e8695ba31162eb6e1ca0a08e5446f651059399 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 23 Mar 2023 09:26:12 -0400 Subject: [PATCH 508/611] docs: Fix formatting of request arg in docstring (#428) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Fix formatting of request arg in docstring chore: Update gapic-generator-python to v1.9.1 PiperOrigin-RevId: 518604533 Source-Link: https://github.com/googleapis/googleapis/commit/8a085aeddfa010af5bcef090827aac5255383d7e Source-Link: https://github.com/googleapis/googleapis-gen/commit/b2ab4b0a0ae2907e812c209198a74e0898afcb04 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjJhYjRiMGEwYWUyOTA3ZTgxMmMyMDkxOThhNzRlMDg5OGFmY2IwNCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/datastore_admin/transports/rest.py | 6 ------ .../datastore_v1/services/datastore/transports/rest.py | 8 -------- 2 files changed, 14 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py index 8c6339a945d5..41aea81197e9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py @@ -576,7 +576,6 @@ def __call__( request (~.datastore_admin.CreateIndexRequest): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.CreateIndex][google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -663,7 +662,6 @@ def __call__( request (~.datastore_admin.DeleteIndexRequest): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.DeleteIndex][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -751,7 +749,6 @@ def __call__( request (~.datastore_admin.ExportEntitiesRequest): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -839,7 +836,6 @@ def __call__( request (~.datastore_admin.GetIndexRequest): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.GetIndex][google.datastore.admin.v1.DatastoreAdmin.GetIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -926,7 +922,6 @@ def __call__( request (~.datastore_admin.ImportEntitiesRequest): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1014,7 +1009,6 @@ def __call__( request (~.datastore_admin.ListIndexesRequest): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py index 842a2f8b6d1c..e1bced6ef0c2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py @@ -528,7 +528,6 @@ def __call__( request (~.datastore.AllocateIdsRequest): The request object. The request for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -627,7 +626,6 @@ def __call__( request (~.datastore.BeginTransactionRequest): The request object. The request for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -728,7 +726,6 @@ def __call__( request (~.datastore.CommitRequest): The request object. The request for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -827,7 +824,6 @@ def __call__( request (~.datastore.LookupRequest): The request object. The request for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -926,7 +922,6 @@ def __call__( request (~.datastore.ReserveIdsRequest): The request object. The request for [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1025,7 +1020,6 @@ def __call__( request (~.datastore.RollbackRequest): The request object. The request for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1125,7 +1119,6 @@ def __call__( request (~.datastore.RunAggregationQueryRequest): The request object. The request for [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1226,7 +1219,6 @@ def __call__( request (~.datastore.RunQueryRequest): The request object. The request for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. From 085677227fcb6608611fe75f556278ce76724d5f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 24 Mar 2023 07:25:58 -0700 Subject: [PATCH 509/611] docs: improve query API documentation (#430) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add SUM/AVG aggregation query support in the preview branch and improve query API documentation PiperOrigin-RevId: 518940761 Source-Link: https://github.com/googleapis/googleapis/commit/b1efb3270a3639831c35ccd7552c2bced807279c Source-Link: https://github.com/googleapis/googleapis-gen/commit/f7c205414a5c6579438b5806d3dc5f20d09ef83c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZjdjMjA1NDE0YTVjNjU3OTQzOGI1ODA2ZDNkYzVmMjBkMDllZjgzYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google/cloud/datastore_v1/types/entity.py | 4 ++-- .../google/cloud/datastore_v1/types/query.py | 22 ++++++++++++------- 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index ec6564da5e70..14fc43e61cfa 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -373,8 +373,8 @@ class Entity(proto.Message): The entity's properties. The map's keys are property names. A property name matching regex ``__.*__`` is reserved. A reserved property name is forbidden in certain documented - contexts. The name must not contain more than 500 - characters. The name cannot be ``""``. + contexts. The map keys, represented as UTF-8, must not + exceed 1,500 bytes and cannot be empty. """ key: "Key" = proto.Field( diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index 17e1cca502d8..afa05d19b82b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -146,10 +146,16 @@ class Query(proto.Message): The order to apply to the query results (if empty, order is unspecified). distinct_on (MutableSequence[google.cloud.datastore_v1.types.PropertyReference]): - The properties to make distinct. The query - results will contain the first result for each - distinct combination of values for the given - properties (if empty, all results are returned). + The properties to make distinct. The query results will + contain the first result for each distinct combination of + values for the given properties (if empty, all results are + returned). + + Requires: + + - If ``order`` is specified, the set of distinct on + properties must appear before the non-distinct on + properties in ``order``. start_cursor (bytes): A starting point for the query results. Query cursors are returned in query result batches and `can only be used to @@ -238,7 +244,7 @@ class AggregationQuery(proto.Message): """ class Aggregation(proto.Message): - r"""Defines a aggregation that produces a single result. + r"""Defines an aggregation that produces a single result. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -261,7 +267,7 @@ class Aggregation(proto.Message): COUNT_UP_TO(1) AS count_up_to_1, COUNT_UP_TO(2), COUNT_UP_TO(3) AS count_up_to_3, - COUNT_UP_TO(4) + COUNT(*) OVER ( ... ); @@ -274,7 +280,7 @@ class Aggregation(proto.Message): COUNT_UP_TO(1) AS count_up_to_1, COUNT_UP_TO(2) AS property_1, COUNT_UP_TO(3) AS count_up_to_3, - COUNT_UP_TO(4) AS property_2 + COUNT(*) AS property_2 OVER ( ... ); @@ -298,7 +304,7 @@ class Count(proto.Message): entities to count. This provides a way to set an upper bound on the number of - entities to scan, limiting latency and cost. + entities to scan, limiting latency, and cost. Unspecified is interpreted as no bound. From bad62e0cef8ee6b0e4a35a29144542ddea315ba6 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 10:47:44 -0400 Subject: [PATCH 510/611] chore(main): release 2.15.1 (#429) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-datastore/.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 8 ++++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../google/cloud/datastore_admin/gapic_version.py | 2 +- .../google/cloud/datastore_admin_v1/gapic_version.py | 2 +- .../google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 14 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index a73bb826a85b..cef131c9ff07 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.15.0" + ".": "2.15.1" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 0b91d61202df..364bc6b7a5f6 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.15.1](https://github.com/googleapis/python-datastore/compare/v2.15.0...v2.15.1) (2023-03-24) + + +### Documentation + +* Fix formatting of request arg in docstring ([#428](https://github.com/googleapis/python-datastore/issues/428)) ([da86a02](https://github.com/googleapis/python-datastore/commit/da86a02744f02c34d9cf1c83ea20a4ca2bbde232)) +* Improve query API documentation ([#430](https://github.com/googleapis/python-datastore/issues/430)) ([915daf5](https://github.com/googleapis/python-datastore/commit/915daf5443c74953f9d531c2bf2029e4a8e87d47)) + ## [2.15.0](https://github.com/googleapis/python-datastore/compare/v2.14.0...v2.15.0) (2023-03-09) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index 7431d8e5d620..5aaea4e5d326 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.15.0" # {x-release-please-version} +__version__ = "2.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index a8381fff6ee7..585af14bb331 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.15.0" +__version__ = "2.15.1" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index 2788e5e55993..505a42f155da 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.0" # {x-release-please-version} +__version__ = "2.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index 8af4dfed9ecf..47da20c7d57f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.15.0" # {x-release-please-version} +__version__ = "2.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index 8af4dfed9ecf..47da20c7d57f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.15.0" # {x-release-please-version} +__version__ = "2.15.1" # {x-release-please-version} From 3421d374bcf417bae43eaac6f46af8d5d6616228 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 6 Apr 2023 17:10:43 +0100 Subject: [PATCH 511/611] chore(deps): update dependency google-cloud-datastore to v2.15.1 (#431) --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index 9492cd23dd5b..b5827b3587d3 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.15.0 \ No newline at end of file +google-cloud-datastore==2.15.1 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index b276b14e235e..acd4bf9213f0 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.15.0 +google-cloud-datastore==2.15.1 From e4de5deb1f18970d064c3fb33fb3865394a6d8c5 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 18 Apr 2023 18:01:28 +0200 Subject: [PATCH 512/611] chore(deps): update dependency pytest to v7.3.1 (#433) --- .../samples/snippets/requirements-test.txt | 2 +- .../samples/snippets/schedule-export/requirements-test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt index aa58f1dbefd9..18625156bb78 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff===1.11.1; python_version < "3.7" backoff==2.2.1; python_version >= "3.7" -pytest==7.2.2 +pytest==7.3.1 flaky==3.7.0 diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt index b705adb655e6..a6510db8d39a 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt @@ -1 +1 @@ -pytest==7.2.2 \ No newline at end of file +pytest==7.3.1 \ No newline at end of file From 5fdb38d7e02628c0cb07951837b2f148da089643 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 4 May 2023 11:23:14 -0400 Subject: [PATCH 513/611] docs: minor comment update for Entity message (#434) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: minor comment update for Entity message PiperOrigin-RevId: 528936266 Source-Link: https://github.com/googleapis/googleapis/commit/4f9e1a058e186df999eb85d7abec8fb092b7a145 Source-Link: https://github.com/googleapis/googleapis-gen/commit/14fce92515a1ae05b89ce2b28d9bc57ec12dcba6 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTRmY2U5MjUxNWExYWUwNWI4OWNlMmIyOGQ5YmM1N2VjMTJkY2JhNiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google/cloud/datastore_v1/types/entity.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index 14fc43e61cfa..9fd055b73ec3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -356,10 +356,7 @@ class Value(proto.Message): class Entity(proto.Message): r"""A Datastore data object. - - An entity is limited to 1 megabyte when stored. That *roughly* - corresponds to a limit of 1 megabyte for the serialized form of this - message. + Must not exceed 1 MiB - 4 bytes. Attributes: key (google.cloud.datastore_v1.types.Key): From ef4aace94a64ed256cbce2348606b30b5fcf45f2 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 9 May 2023 09:48:16 -0700 Subject: [PATCH 514/611] chore(main): release 2.15.2 (#435) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-datastore/.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../google/cloud/datastore_admin/gapic_version.py | 2 +- .../google/cloud/datastore_admin_v1/gapic_version.py | 2 +- .../google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 13 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index cef131c9ff07..f4de5340c336 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.15.1" + ".": "2.15.2" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 364bc6b7a5f6..91c974ea3339 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.15.2](https://github.com/googleapis/python-datastore/compare/v2.15.1...v2.15.2) (2023-05-04) + + +### Documentation + +* Minor comment update for Entity message ([#434](https://github.com/googleapis/python-datastore/issues/434)) ([b31a944](https://github.com/googleapis/python-datastore/commit/b31a94455365445f5abc4eefc5edbef275ecc750)) + ## [2.15.1](https://github.com/googleapis/python-datastore/compare/v2.15.0...v2.15.1) (2023-03-24) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index 5aaea4e5d326..0a2bac493697 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.15.1" # {x-release-please-version} +__version__ = "2.15.2" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 585af14bb331..31e212c0d679 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.15.1" +__version__ = "2.15.2" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index 505a42f155da..db31fdc2ac14 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.1" # {x-release-please-version} +__version__ = "2.15.2" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index 47da20c7d57f..cc1c66a7b788 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.15.1" # {x-release-please-version} +__version__ = "2.15.2" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index 47da20c7d57f..cc1c66a7b788 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.15.1" # {x-release-please-version} +__version__ = "2.15.2" # {x-release-please-version} From 7a21c5e4a8349118dd42cc5f08319a26438da86f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 25 May 2023 12:42:17 -0400 Subject: [PATCH 515/611] build(deps): bump requests from 2.28.1 to 2.31.0 in /synthtool/gcp/templates/python_library/.kokoro (#441) Source-Link: https://github.com/googleapis/synthtool/commit/30bd01b4ab78bf1b2a425816e15b3e7e090993dd Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 3 ++- packages/google-cloud-datastore/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index b8edda51cf46..32b3c486591a 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 + digest: sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b +# created: 2023-05-25T14:56:16.294623272Z diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index 66a2172a76a8..3b8d7ee81848 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -419,9 +419,9 @@ readme-renderer==37.3 \ --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine -requests==2.28.1 \ - --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ - --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 +requests==2.31.0 \ + --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ + --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 # via # gcp-releasetool # google-api-core From 35cbb89d980f2d5de954f02f7c734d8c71b43452 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 1 Jun 2023 12:33:39 +0200 Subject: [PATCH 516/611] chore(deps): update dependency google-cloud-datastore to v2.15.2 (#438) Co-authored-by: meredithslota --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index b5827b3587d3..d0195bcdbdf6 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.15.1 \ No newline at end of file +google-cloud-datastore==2.15.2 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index acd4bf9213f0..ff812cc4f0c5 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.15.1 +google-cloud-datastore==2.15.2 From 0686509434438a59b80563157dd4154d6944f3d1 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 3 Jun 2023 18:26:50 -0400 Subject: [PATCH 517/611] build(deps): bump cryptography from 39.0.1 to 41.0.0 in /synthtool/gcp/templates/python_library/.kokoro (#443) Source-Link: https://github.com/googleapis/synthtool/commit/d0f51a0c2a9a6bcca86911eabea9e484baadf64b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 42 +++++++++---------- 2 files changed, 22 insertions(+), 24 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 32b3c486591a..02a4dedced74 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b -# created: 2023-05-25T14:56:16.294623272Z + digest: sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc +# created: 2023-06-03T21:25:37.968717478Z diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index 3b8d7ee81848..c7929db6d152 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -113,28 +113,26 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==39.0.1 \ - --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ - --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ - --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ - --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ - --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ - --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ - --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ - --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ - --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ - --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ - --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ - --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ - --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ - --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ - --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ - --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ - --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ - --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ - --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ - --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ - --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 +cryptography==41.0.0 \ + --hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \ + --hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \ + --hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \ + --hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \ + --hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \ + --hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \ + --hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \ + --hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \ + --hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \ + --hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \ + --hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \ + --hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \ + --hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \ + --hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \ + --hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \ + --hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \ + --hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \ + --hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \ + --hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be # via # gcp-releasetool # secretstorage From ff8059703a756ab347be36b22a3326a94e17dcc0 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 12 Jun 2023 22:58:15 +0200 Subject: [PATCH 518/611] chore(deps): update dependency pytest to v7.3.2 (#445) --- .../samples/snippets/requirements-test.txt | 2 +- .../samples/snippets/schedule-export/requirements-test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt index 18625156bb78..d700e917ed72 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff===1.11.1; python_version < "3.7" backoff==2.2.1; python_version >= "3.7" -pytest==7.3.1 +pytest==7.3.2 flaky==3.7.0 diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt index a6510db8d39a..28706bebc1f8 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt @@ -1 +1 @@ -pytest==7.3.1 \ No newline at end of file +pytest==7.3.2 \ No newline at end of file From 8eabeca31787d85e0203ef4c9ce709a857bb0439 Mon Sep 17 00:00:00 2001 From: Mariatta Date: Wed, 21 Jun 2023 03:36:39 -0700 Subject: [PATCH 519/611] feat: named database support (#439) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: named database support (#398) * feat: Add named database support * test: Use named db in system tests * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Handle the case when client doesn't have database property * fix: add custom routing headers * Fixing tests for easier merge * fixing code coverage * addressing pr comments * feat: Multi db test parametrization (#436) * Feat: Parametrize the tests for multidb support Remove "database" argument from Query and AggregationQuery constructors. Use the "database" from the client instead. Once set in the client, the "database" will be used throughout and cannot be re-set. Parametrize the tests where-ever clients are used. Use the `system-tests-named-db` in the system test. * Add test case for when parent database name != child database name * Update owlbot, removing the named db parameter * Reverted test fixes * fixing tests * fix code coverage * pr suggestion * address pr comments --------- Co-authored-by: Vishwaraj Anand --------- Co-authored-by: Bob "Wombat" Hogg Co-authored-by: Owl Bot Co-authored-by: Vishwaraj Anand Co-authored-by: meredithslota --- .../google/cloud/datastore/__init__.py | 6 +- .../google/cloud/datastore/_http.py | 38 +- .../google/cloud/datastore/aggregation.py | 30 +- .../google/cloud/datastore/batch.py | 31 +- .../google/cloud/datastore/client.py | 62 +- .../google/cloud/datastore/helpers.py | 14 +- .../google/cloud/datastore/key.py | 81 ++- .../google/cloud/datastore/query.py | 35 +- .../google/cloud/datastore/transaction.py | 14 +- .../google/cloud/datastore_v1/types/entity.py | 12 +- .../tests/system/_helpers.py | 8 +- .../tests/system/conftest.py | 17 +- .../tests/system/index.yaml | 16 +- .../tests/system/test_aggregation_query.py | 39 +- .../tests/system/test_allocate_reserve_ids.py | 15 +- .../tests/system/test_put.py | 30 +- .../tests/system/test_query.py | 57 +- .../tests/system/test_read_consistency.py | 10 +- .../tests/system/test_transaction.py | 12 +- .../tests/system/utils/clear_datastore.py | 19 +- .../tests/system/utils/populate_datastore.py | 17 +- .../tests/unit/test__http.py | 256 +++++--- .../tests/unit/test_aggregation.py | 97 ++- .../tests/unit/test_batch.py | 286 +++++---- .../tests/unit/test_client.py | 598 +++++++++++------- .../tests/unit/test_helpers.py | 27 +- .../tests/unit/test_key.py | 279 ++++++-- .../tests/unit/test_query.py | 429 ++++++++----- .../tests/unit/test_transaction.py | 228 ++++--- 29 files changed, 1842 insertions(+), 921 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py index c188e1b9f3ca..b2b4c1724b31 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py @@ -34,9 +34,9 @@ The main concepts with this API are: - :class:`~google.cloud.datastore.client.Client` - which represents a project (string) and namespace (string) bundled with - a connection and has convenience methods for constructing objects with that - project / namespace. + which represents a project (string), database (string), and namespace + (string) bundled with a connection and has convenience methods for + constructing objects with that project/database/namespace. - :class:`~google.cloud.datastore.entity.Entity` which represents a single entity in the datastore diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index 61209e98f870..a4441c095165 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -59,6 +59,7 @@ def _request( data, base_url, client_info, + database, retry=None, timeout=None, ): @@ -84,6 +85,9 @@ def _request( :type client_info: :class:`google.api_core.client_info.ClientInfo` :param client_info: used to generate user agent. + :type database: str + :param database: The database to make the request for. + :type retry: :class:`google.api_core.retry.Retry` :param retry: (Optional) retry policy for the request @@ -101,6 +105,7 @@ def _request( "User-Agent": user_agent, connection_module.CLIENT_INFO_HEADER: user_agent, } + _update_headers(headers, project, database) api_url = build_api_url(project, method, base_url) requester = http.request @@ -136,6 +141,7 @@ def _rpc( client_info, request_pb, response_pb_cls, + database, retry=None, timeout=None, ): @@ -165,6 +171,9 @@ def _rpc( :param response_pb_cls: The class used to unmarshall the response protobuf. + :type database: str + :param database: The database to make the request for. + :type retry: :class:`google.api_core.retry.Retry` :param retry: (Optional) retry policy for the request @@ -177,7 +186,7 @@ def _rpc( req_data = request_pb._pb.SerializeToString() kwargs = _make_retry_timeout_kwargs(retry, timeout) response = _request( - http, project, method, req_data, base_url, client_info, **kwargs + http, project, method, req_data, base_url, client_info, database, **kwargs ) return response_pb_cls.deserialize(response) @@ -236,6 +245,7 @@ def lookup(self, request, retry=None, timeout=None): """ request_pb = _make_request_pb(request, _datastore_pb2.LookupRequest) project_id = request_pb.project_id + database_id = request_pb.database_id return _rpc( self.client._http, @@ -245,6 +255,7 @@ def lookup(self, request, retry=None, timeout=None): self.client._client_info, request_pb, _datastore_pb2.LookupResponse, + database_id, retry=retry, timeout=timeout, ) @@ -267,6 +278,7 @@ def run_query(self, request, retry=None, timeout=None): """ request_pb = _make_request_pb(request, _datastore_pb2.RunQueryRequest) project_id = request_pb.project_id + database_id = request_pb.database_id return _rpc( self.client._http, @@ -276,6 +288,7 @@ def run_query(self, request, retry=None, timeout=None): self.client._client_info, request_pb, _datastore_pb2.RunQueryResponse, + database_id, retry=retry, timeout=timeout, ) @@ -300,6 +313,7 @@ def run_aggregation_query(self, request, retry=None, timeout=None): request, _datastore_pb2.RunAggregationQueryRequest ) project_id = request_pb.project_id + database_id = request_pb.database_id return _rpc( self.client._http, @@ -309,6 +323,7 @@ def run_aggregation_query(self, request, retry=None, timeout=None): self.client._client_info, request_pb, _datastore_pb2.RunAggregationQueryResponse, + database_id, retry=retry, timeout=timeout, ) @@ -331,6 +346,7 @@ def begin_transaction(self, request, retry=None, timeout=None): """ request_pb = _make_request_pb(request, _datastore_pb2.BeginTransactionRequest) project_id = request_pb.project_id + database_id = request_pb.database_id return _rpc( self.client._http, @@ -340,6 +356,7 @@ def begin_transaction(self, request, retry=None, timeout=None): self.client._client_info, request_pb, _datastore_pb2.BeginTransactionResponse, + database_id, retry=retry, timeout=timeout, ) @@ -362,6 +379,7 @@ def commit(self, request, retry=None, timeout=None): """ request_pb = _make_request_pb(request, _datastore_pb2.CommitRequest) project_id = request_pb.project_id + database_id = request_pb.database_id return _rpc( self.client._http, @@ -371,6 +389,7 @@ def commit(self, request, retry=None, timeout=None): self.client._client_info, request_pb, _datastore_pb2.CommitResponse, + database_id, retry=retry, timeout=timeout, ) @@ -393,6 +412,7 @@ def rollback(self, request, retry=None, timeout=None): """ request_pb = _make_request_pb(request, _datastore_pb2.RollbackRequest) project_id = request_pb.project_id + database_id = request_pb.database_id return _rpc( self.client._http, @@ -402,6 +422,7 @@ def rollback(self, request, retry=None, timeout=None): self.client._client_info, request_pb, _datastore_pb2.RollbackResponse, + database_id, retry=retry, timeout=timeout, ) @@ -424,6 +445,7 @@ def allocate_ids(self, request, retry=None, timeout=None): """ request_pb = _make_request_pb(request, _datastore_pb2.AllocateIdsRequest) project_id = request_pb.project_id + database_id = request_pb.database_id return _rpc( self.client._http, @@ -433,6 +455,7 @@ def allocate_ids(self, request, retry=None, timeout=None): self.client._client_info, request_pb, _datastore_pb2.AllocateIdsResponse, + database_id, retry=retry, timeout=timeout, ) @@ -455,6 +478,7 @@ def reserve_ids(self, request, retry=None, timeout=None): """ request_pb = _make_request_pb(request, _datastore_pb2.ReserveIdsRequest) project_id = request_pb.project_id + database_id = request_pb.database_id return _rpc( self.client._http, @@ -464,6 +488,18 @@ def reserve_ids(self, request, retry=None, timeout=None): self.client._client_info, request_pb, _datastore_pb2.ReserveIdsResponse, + database_id, retry=retry, timeout=timeout, ) + + +def _update_headers(headers, project_id, database_id=None): + """Update the request headers. + Pass the project id, or optionally the database_id if provided. + """ + headers["x-goog-request-params"] = f"project_id={project_id}" + if database_id: + headers[ + "x-goog-request-params" + ] = f"project_id={project_id}&database_id={database_id}" diff --git a/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py b/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py index 24d2abcc6552..421ffc9392da 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py @@ -376,6 +376,7 @@ def _next_page(self): partition_id = entity_pb2.PartitionId( project_id=self._aggregation_query.project, + database_id=self.client.database, namespace_id=self._aggregation_query.namespace, ) @@ -386,14 +387,15 @@ def _next_page(self): if self._timeout is not None: kwargs["timeout"] = self._timeout - + request = { + "project_id": self._aggregation_query.project, + "partition_id": partition_id, + "read_options": read_options, + "aggregation_query": query_pb, + } + helpers.set_database_id_to_request(request, self.client.database) response_pb = self.client._datastore_api.run_aggregation_query( - request={ - "project_id": self._aggregation_query.project, - "partition_id": partition_id, - "read_options": read_options, - "aggregation_query": query_pb, - }, + request=request, **kwargs, ) @@ -406,13 +408,15 @@ def _next_page(self): query_pb = query_pb2.AggregationQuery() query_pb._pb.CopyFrom(old_query_pb._pb) # copy for testability + request = { + "project_id": self._aggregation_query.project, + "partition_id": partition_id, + "read_options": read_options, + "aggregation_query": query_pb, + } + helpers.set_database_id_to_request(request, self.client.database) response_pb = self.client._datastore_api.run_aggregation_query( - request={ - "project_id": self._aggregation_query.project, - "partition_id": partition_id, - "read_options": read_options, - "aggregation_query": query_pb, - }, + request=request, **kwargs, ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/batch.py b/packages/google-cloud-datastore/google/cloud/datastore/batch.py index ba8fe6b7551b..e0dbf26dc5ff 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/batch.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/batch.py @@ -122,6 +122,15 @@ def project(self): """ return self._client.project + @property + def database(self): + """Getter for database in which the batch will run. + + :rtype: :class:`str` + :returns: The database in which the batch will run. + """ + return self._client.database + @property def namespace(self): """Getter for namespace in which the batch will run. @@ -218,6 +227,9 @@ def put(self, entity): if self.project != entity.key.project: raise ValueError("Key must be from same project as batch") + if self.database != entity.key.database: + raise ValueError("Key must be from same database as batch") + if entity.key.is_partial: entity_pb = self._add_partial_key_entity_pb() self._partial_key_entities.append(entity) @@ -245,6 +257,9 @@ def delete(self, key): if self.project != key.project: raise ValueError("Key must be from same project as batch") + if self.database != key.database: + raise ValueError("Key must be from same database as batch") + key_pb = key.to_protobuf() self._add_delete_key_pb()._pb.CopyFrom(key_pb._pb) @@ -281,13 +296,17 @@ def _commit(self, retry, timeout): if timeout is not None: kwargs["timeout"] = timeout + request = { + "project_id": self.project, + "mode": mode, + "transaction": self._id, + "mutations": self._mutations, + } + + helpers.set_database_id_to_request(request, self._client.database) + commit_response_pb = self._client._datastore_api.commit( - request={ - "project_id": self.project, - "mode": mode, - "transaction": self._id, - "mutations": self._mutations, - }, + request=request, **kwargs, ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index e90a3415c5ac..fe25a0e05d77 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -126,6 +126,7 @@ def _extended_lookup( retry=None, timeout=None, read_time=None, + database=None, ): """Repeat lookup until all keys found (unless stop requested). @@ -179,6 +180,10 @@ def _extended_lookup( ``eventual==True`` or ``transaction_id``. This feature is in private preview. + :type database: str + :param database: + (Optional) Database from which to fetch data. Defaults to the (default) database. + :rtype: list of :class:`.entity_pb2.Entity` :returns: The requested entities. :raises: :class:`ValueError` if missing / deferred are not null or @@ -198,12 +203,14 @@ def _extended_lookup( read_options = helpers.get_read_options(eventual, transaction_id, read_time) while loop_num < _MAX_LOOPS: # loop against possible deferred. loop_num += 1 + request = { + "project_id": project, + "keys": key_pbs, + "read_options": read_options, + } + helpers.set_database_id_to_request(request, database) lookup_response = datastore_api.lookup( - request={ - "project_id": project, - "keys": key_pbs, - "read_options": read_options, - }, + request=request, **kwargs, ) @@ -276,6 +283,9 @@ class Client(ClientWithProject): environment variable. This parameter should be considered private, and could change in the future. + + :type database: str + :param database: (Optional) database to pass to proxied API methods. """ SCOPE = ("https://www.googleapis.com/auth/datastore",) @@ -288,6 +298,7 @@ def __init__( credentials=None, client_info=_CLIENT_INFO, client_options=None, + database=None, _http=None, _use_grpc=None, ): @@ -311,6 +322,7 @@ def __init__( self._client_options = client_options self._batch_stack = _LocalStack() self._datastore_api_internal = None + self._database = database if _use_grpc is None: self._use_grpc = _USE_GRPC @@ -345,6 +357,11 @@ def base_url(self, value): """Setter for API base URL.""" self._base_url = value + @property + def database(self): + """Getter for database""" + return self._database + @property def _datastore_api(self): """Getter for a wrapped API object.""" @@ -557,6 +574,7 @@ def get_multi( retry=retry, timeout=timeout, read_time=read_time, + database=self.database, ) if missing is not None: @@ -739,8 +757,13 @@ def allocate_ids(self, incomplete_key, num_ids, retry=None, timeout=None): kwargs = _make_retry_timeout_kwargs(retry, timeout) + request = { + "project_id": incomplete_key.project, + "keys": incomplete_key_pbs, + } + helpers.set_database_id_to_request(request, self.database) response_pb = self._datastore_api.allocate_ids( - request={"project_id": incomplete_key.project, "keys": incomplete_key_pbs}, + request=request, **kwargs, ) allocated_ids = [ @@ -753,11 +776,14 @@ def allocate_ids(self, incomplete_key, num_ids, retry=None, timeout=None): def key(self, *path_args, **kwargs): """Proxy to :class:`google.cloud.datastore.key.Key`. - Passes our ``project``. + Passes our ``project`` and our ``database``. """ if "project" in kwargs: raise TypeError("Cannot pass project") kwargs["project"] = self.project + if "database" in kwargs: + raise TypeError("Cannot pass database") + kwargs["database"] = self.database if "namespace" not in kwargs: kwargs["namespace"] = self.namespace return Key(*path_args, **kwargs) @@ -963,18 +989,27 @@ def reserve_ids_sequential(self, complete_key, num_ids, retry=None, timeout=None key_class = type(complete_key) namespace = complete_key._namespace project = complete_key._project + database = complete_key._database flat_path = list(complete_key._flat_path[:-1]) start_id = complete_key._flat_path[-1] key_pbs = [] for id in range(start_id, start_id + num_ids): path = flat_path + [id] - key = key_class(*path, project=project, namespace=namespace) + key = key_class( + *path, project=project, database=database, namespace=namespace + ) key_pbs.append(key.to_protobuf()) kwargs = _make_retry_timeout_kwargs(retry, timeout) + request = { + "project_id": complete_key.project, + "keys": key_pbs, + } + helpers.set_database_id_to_request(request, self.database) self._datastore_api.reserve_ids( - request={"project_id": complete_key.project, "keys": key_pbs}, **kwargs + request=request, + **kwargs, ) return None @@ -1020,8 +1055,15 @@ def reserve_ids_multi(self, complete_keys, retry=None, timeout=None): kwargs = _make_retry_timeout_kwargs(retry, timeout) key_pbs = [key.to_protobuf() for key in complete_keys] + request = { + "project_id": complete_keys[0].project, + "keys": key_pbs, + } + helpers.set_database_id_to_request(request, complete_keys[0].database) + self._datastore_api.reserve_ids( - request={"project_id": complete_keys[0].project, "keys": key_pbs}, **kwargs + request=request, + **kwargs, ) return None diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index 123f356efbcf..2deecabedd35 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -300,11 +300,15 @@ def key_from_protobuf(pb): project = None if pb.partition_id.project_id: # Simple field (string) project = pb.partition_id.project_id + database = None + + if pb.partition_id.database_id: # Simple field (string) + database = pb.partition_id.database_id namespace = None if pb.partition_id.namespace_id: # Simple field (string) namespace = pb.partition_id.namespace_id - return Key(*path_args, namespace=namespace, project=project) + return Key(*path_args, namespace=namespace, project=project, database=database) def _pb_attr_value(val): @@ -486,6 +490,14 @@ def _set_protobuf_value(value_pb, val): setattr(value_pb, attr, val) +def set_database_id_to_request(request, database_id=None): + """ + Set the "database_id" field to the request only if it was provided. + """ + if database_id is not None: + request["database_id"] = database_id + + class GeoPoint(object): """Simple container for a geo point value. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/key.py b/packages/google-cloud-datastore/google/cloud/datastore/key.py index 1a8e3645fc8e..4384131c58fc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/key.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/key.py @@ -87,6 +87,13 @@ class Key(object): >>> client.key('Parent', 'foo', 'Child') + To create a key from a non-default database: + + .. doctest:: key-ctor + + >>> Key('EntityKind', 1234, project=project, database='mydb') + + :type path_args: tuple of string and integer :param path_args: May represent a partial (odd length) or full (even length) key path. @@ -97,6 +104,7 @@ class Key(object): * namespace (string): A namespace identifier for the key. * project (string): The project associated with the key. + * database (string): The database associated with the key. * parent (:class:`~google.cloud.datastore.key.Key`): The parent of the key. The project argument is required unless it has been set implicitly. @@ -106,10 +114,12 @@ def __init__(self, *path_args, **kwargs): self._flat_path = path_args parent = self._parent = kwargs.get("parent") self._namespace = kwargs.get("namespace") + self._database = kwargs.get("database") + project = kwargs.get("project") self._project = _validate_project(project, parent) - # _flat_path, _parent, _namespace and _project must be set before - # _combine_args() is called. + # _flat_path, _parent, _database, _namespace, and _project must be set + # before _combine_args() is called. self._path = self._combine_args() def __eq__(self, other): @@ -118,7 +128,9 @@ def __eq__(self, other): Incomplete keys never compare equal to any other key. Completed keys compare equal if they have the same path, project, - and namespace. + database, and namespace. + + (Note that database=None is considered to refer to the default database.) :rtype: bool :returns: True if the keys compare equal, else False. @@ -133,6 +145,7 @@ def __eq__(self, other): self.flat_path == other.flat_path and self.project == other.project and self.namespace == other.namespace + and self.database == other.database ) def __ne__(self, other): @@ -141,7 +154,9 @@ def __ne__(self, other): Incomplete keys never compare equal to any other key. Completed keys compare equal if they have the same path, project, - and namespace. + database, and namespace. + + (Note that database=None is considered to refer to the default database.) :rtype: bool :returns: False if the keys compare equal, else True. @@ -149,12 +164,15 @@ def __ne__(self, other): return not self == other def __hash__(self): - """Hash a keys for use in a dictionary lookp. + """Hash this key for use in a dictionary lookup. :rtype: int :returns: a hash of the key's state. """ - return hash(self.flat_path) + hash(self.project) + hash(self.namespace) + hash_val = hash(self.flat_path) + hash(self.project) + hash(self.namespace) + if self.database: + hash_val = hash_val + hash(self.database) + return hash_val @staticmethod def _parse_path(path_args): @@ -204,7 +222,7 @@ def _combine_args(self): """Sets protected data by combining raw data set from the constructor. If a ``_parent`` is set, updates the ``_flat_path`` and sets the - ``_namespace`` and ``_project`` if not already set. + ``_namespace``, ``_database``, and ``_project`` if not already set. :rtype: :class:`list` of :class:`dict` :returns: A list of key parts with kind and ID or name set. @@ -227,6 +245,9 @@ def _combine_args(self): self._namespace = self._parent.namespace if self._project is not None and self._project != self._parent.project: raise ValueError("Child project must agree with parent's.") + if self._database is not None and self._database != self._parent.database: + raise ValueError("Child database must agree with parent's.") + self._database = self._parent.database self._project = self._parent.project return child_path @@ -241,7 +262,10 @@ def _clone(self): :returns: A new ``Key`` instance with the same data as the current one. """ cloned_self = self.__class__( - *self.flat_path, project=self.project, namespace=self.namespace + *self.flat_path, + project=self.project, + database=self.database, + namespace=self.namespace ) # If the current parent has already been set, we re-use # the same instance @@ -283,6 +307,8 @@ def to_protobuf(self): """ key = _entity_pb2.Key() key.partition_id.project_id = self.project + if self.database: + key.partition_id.database_id = self.database if self.namespace: key.partition_id.namespace_id = self.namespace @@ -314,6 +340,9 @@ def to_legacy_urlsafe(self, location_prefix=None): prefix may need to be specified to obtain identical urlsafe keys. + .. note:: + to_legacy_urlsafe only supports the default database + :type location_prefix: str :param location_prefix: The location prefix of an App Engine project ID. Often this value is 's~', but may also be @@ -323,6 +352,9 @@ def to_legacy_urlsafe(self, location_prefix=None): :rtype: bytes :returns: A bytestring containing the key encoded as URL-safe base64. """ + if self.database: + raise ValueError("to_legacy_urlsafe only supports the default database") + if location_prefix is None: project_id = self.project else: @@ -345,6 +377,9 @@ def from_legacy_urlsafe(cls, urlsafe): "Reference"). This assumes that ``urlsafe`` was created within an App Engine app via something like ``ndb.Key(...).urlsafe()``. + .. note:: + from_legacy_urlsafe only supports the default database. + :type urlsafe: bytes or unicode :param urlsafe: The base64 encoded (ASCII) string corresponding to a datastore "Key" / "Reference". @@ -376,6 +411,15 @@ def is_partial(self): """ return self.id_or_name is None + @property + def database(self): + """Database getter. + + :rtype: str + :returns: The database of the current key. + """ + return self._database + @property def namespace(self): """Namespace getter. @@ -457,7 +501,7 @@ def _make_parent(self): """Creates a parent key for the current path. Extracts all but the last element in the key path and creates a new - key, while still matching the namespace and the project. + key, while still matching the namespace, the database, and the project. :rtype: :class:`google.cloud.datastore.key.Key` or :class:`NoneType` :returns: A new ``Key`` instance, whose path consists of all but the @@ -470,7 +514,10 @@ def _make_parent(self): parent_args = self.flat_path[:-2] if parent_args: return self.__class__( - *parent_args, project=self.project, namespace=self.namespace + *parent_args, + project=self.project, + database=self.database, + namespace=self.namespace ) @property @@ -488,7 +535,15 @@ def parent(self): return self._parent def __repr__(self): - return "" % (self._flat_path, self.project) + """String representation of this key. + + Includes the project and database, but suppresses them if they are + equal to the default values. + """ + repr = "" def _validate_project(project, parent): @@ -549,12 +604,14 @@ def _get_empty(value, empty_value): def _check_database_id(database_id): """Make sure a "Reference" database ID is empty. + Here, "empty" means either ``None`` or ``""``. + :type database_id: unicode :param database_id: The ``database_id`` field from a "Reference" protobuf. :raises: :exc:`ValueError` if the ``database_id`` is not empty. """ - if database_id != "": + if database_id is not None and database_id != "": msg = _DATABASE_ID_TEMPLATE.format(database_id) raise ValueError(msg) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 2659ebc0fb18..289605bb7821 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -789,7 +789,9 @@ def _next_page(self): ) partition_id = entity_pb2.PartitionId( - project_id=self._query.project, namespace_id=self._query.namespace + project_id=self._query.project, + database_id=self.client.database, + namespace_id=self._query.namespace, ) kwargs = {} @@ -800,13 +802,17 @@ def _next_page(self): if self._timeout is not None: kwargs["timeout"] = self._timeout + request = { + "project_id": self._query.project, + "partition_id": partition_id, + "read_options": read_options, + "query": query_pb, + } + + helpers.set_database_id_to_request(request, self.client.database) + response_pb = self.client._datastore_api.run_query( - request={ - "project_id": self._query.project, - "partition_id": partition_id, - "read_options": read_options, - "query": query_pb, - }, + request=request, **kwargs, ) @@ -824,13 +830,16 @@ def _next_page(self): query_pb.start_cursor = response_pb.batch.skipped_cursor query_pb.offset -= response_pb.batch.skipped_results + request = { + "project_id": self._query.project, + "partition_id": partition_id, + "read_options": read_options, + "query": query_pb, + } + helpers.set_database_id_to_request(request, self.client.database) + response_pb = self.client._datastore_api.run_query( - request={ - "project_id": self._query.project, - "partition_id": partition_id, - "read_options": read_options, - "query": query_pb, - }, + request=request, **kwargs, ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index dc18e64daecc..3e71ae269754 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -18,6 +18,8 @@ from google.cloud.datastore_v1.types import TransactionOptions from google.protobuf import timestamp_pb2 +from google.cloud.datastore.helpers import set_database_id_to_request + def _make_retry_timeout_kwargs(retry, timeout): """Helper: make optional retry / timeout kwargs dict.""" @@ -227,6 +229,8 @@ def begin(self, retry=None, timeout=None): "project_id": self.project, "transaction_options": self._options, } + set_database_id_to_request(request, self._client.database) + try: response_pb = self._client._datastore_api.begin_transaction( request=request, **kwargs @@ -258,9 +262,13 @@ def rollback(self, retry=None, timeout=None): try: # No need to use the response it contains nothing. - self._client._datastore_api.rollback( - request={"project_id": self.project, "transaction": self._id}, **kwargs - ) + request = { + "project_id": self.project, + "transaction": self._id, + } + + set_database_id_to_request(request, self._client.database) + self._client._datastore_api.rollback(request=request, **kwargs) finally: super(Transaction, self).rollback() # Clear our own ID in case this gets accidentally reused. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index 9fd055b73ec3..ed66e490b1d1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -38,11 +38,11 @@ class PartitionId(proto.Message): r"""A partition ID identifies a grouping of entities. The grouping is - always by project and namespace, however the namespace ID may be - empty. + always by project. database. and namespace, however the namespace ID may be + empty. Empty database ID refers to the default database. - A partition ID contains several dimensions: project ID and namespace - ID. + A partition ID contains several dimensions: project ID, database ID, + and namespace ID. Partition dimensions: @@ -54,7 +54,7 @@ class PartitionId(proto.Message): ID is forbidden in certain documented contexts. Foreign partition IDs (in which the project ID does not match the - context project ID ) are discouraged. Reads and writes of foreign + context project ID) are discouraged. Reads and writes of foreign partition IDs may fail if the project is not in an active state. Attributes: @@ -63,7 +63,7 @@ class PartitionId(proto.Message): belong. database_id (str): If not empty, the ID of the database to which - the entities belong. + the entities belong. Empty corresponds to the default database. namespace_id (str): If not empty, the ID of the namespace to which the entities belong. diff --git a/packages/google-cloud-datastore/tests/system/_helpers.py b/packages/google-cloud-datastore/tests/system/_helpers.py index e8b5cf1cb391..137356256b04 100644 --- a/packages/google-cloud-datastore/tests/system/_helpers.py +++ b/packages/google-cloud-datastore/tests/system/_helpers.py @@ -18,6 +18,8 @@ from google.cloud.datastore.client import DATASTORE_DATASET from test_utils.system import unique_resource_id +_DATASTORE_DATABASE = "SYSTEM_TESTS_DATABASE" +TEST_DATABASE = os.getenv(_DATASTORE_DATABASE, "system-tests-named-db") EMULATOR_DATASET = os.getenv(DATASTORE_DATASET) @@ -28,16 +30,20 @@ def unique_id(prefix, separator="-"): _SENTINEL = object() -def clone_client(base_client, namespace=_SENTINEL): +def clone_client(base_client, namespace=_SENTINEL, database=_SENTINEL): if namespace is _SENTINEL: namespace = base_client.namespace + if database is _SENTINEL: + database = base_client.database + kwargs = {} if EMULATOR_DATASET is None: kwargs["credentials"] = base_client._credentials return datastore.Client( project=base_client.project, + database=database, namespace=namespace, _http=base_client._http, **kwargs, diff --git a/packages/google-cloud-datastore/tests/system/conftest.py b/packages/google-cloud-datastore/tests/system/conftest.py index b0547f83396a..1840556b5af2 100644 --- a/packages/google-cloud-datastore/tests/system/conftest.py +++ b/packages/google-cloud-datastore/tests/system/conftest.py @@ -24,22 +24,33 @@ def in_emulator(): return _helpers.EMULATOR_DATASET is not None +@pytest.fixture(scope="session") +def database_id(request): + return request.param + + @pytest.fixture(scope="session") def test_namespace(): return _helpers.unique_id("ns") @pytest.fixture(scope="session") -def datastore_client(test_namespace): +def datastore_client(test_namespace, database_id): + if _helpers.TEST_DATABASE is not None: + database_id = _helpers.TEST_DATABASE if _helpers.EMULATOR_DATASET is not None: http = requests.Session() # Un-authorized. - return datastore.Client( + client = datastore.Client( project=_helpers.EMULATOR_DATASET, + database=database_id, namespace=test_namespace, _http=http, ) else: - return datastore.Client(namespace=test_namespace) + client = datastore.Client(database=database_id, namespace=test_namespace) + + assert client.database == database_id + return client @pytest.fixture(scope="function") diff --git a/packages/google-cloud-datastore/tests/system/index.yaml b/packages/google-cloud-datastore/tests/system/index.yaml index 08a50d094de4..f9cc2a5bc677 100644 --- a/packages/google-cloud-datastore/tests/system/index.yaml +++ b/packages/google-cloud-datastore/tests/system/index.yaml @@ -30,4 +30,18 @@ indexes: - kind: Character properties: - name: Character - - name: appearances \ No newline at end of file + - name: appearances + +- kind: Character + ancestor: yes + properties: + - name: alive + - name: family + - name: appearances + + +- kind: Character + ancestor: yes + properties: + - name: family + - name: appearances diff --git a/packages/google-cloud-datastore/tests/system/test_aggregation_query.py b/packages/google-cloud-datastore/tests/system/test_aggregation_query.py index b912e96b763e..51045003b632 100644 --- a/packages/google-cloud-datastore/tests/system/test_aggregation_query.py +++ b/packages/google-cloud-datastore/tests/system/test_aggregation_query.py @@ -40,8 +40,8 @@ def _do_fetch(aggregation_query, **kw): @pytest.fixture(scope="session") -def aggregation_query_client(datastore_client): - return _helpers.clone_client(datastore_client, namespace=None) +def aggregation_query_client(datastore_client, database_id=None): + return _helpers.clone_client(datastore_client, namespace=None, database=database_id) @pytest.fixture(scope="session") @@ -69,7 +69,8 @@ def nested_query(aggregation_query_client, ancestor_key): return _make_query(aggregation_query_client, ancestor_key) -def test_aggregation_query_default(aggregation_query_client, nested_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_aggregation_query_default(aggregation_query_client, nested_query, database_id): query = nested_query aggregation_query = aggregation_query_client.aggregation_query(query) @@ -81,7 +82,10 @@ def test_aggregation_query_default(aggregation_query_client, nested_query): assert r.value == 8 -def test_aggregation_query_with_alias(aggregation_query_client, nested_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_aggregation_query_with_alias( + aggregation_query_client, nested_query, database_id +): query = nested_query aggregation_query = aggregation_query_client.aggregation_query(query) @@ -93,7 +97,10 @@ def test_aggregation_query_with_alias(aggregation_query_client, nested_query): assert r.value > 0 -def test_aggregation_query_with_limit(aggregation_query_client, nested_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_aggregation_query_with_limit( + aggregation_query_client, nested_query, database_id +): query = nested_query aggregation_query = aggregation_query_client.aggregation_query(query) @@ -113,8 +120,9 @@ def test_aggregation_query_with_limit(aggregation_query_client, nested_query): assert r.value == 2 +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) def test_aggregation_query_multiple_aggregations( - aggregation_query_client, nested_query + aggregation_query_client, nested_query, database_id ): query = nested_query @@ -128,7 +136,10 @@ def test_aggregation_query_multiple_aggregations( assert r.value > 0 -def test_aggregation_query_add_aggregation(aggregation_query_client, nested_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_aggregation_query_add_aggregation( + aggregation_query_client, nested_query, database_id +): from google.cloud.datastore.aggregation import CountAggregation query = nested_query @@ -143,7 +154,10 @@ def test_aggregation_query_add_aggregation(aggregation_query_client, nested_quer assert r.value > 0 -def test_aggregation_query_add_aggregations(aggregation_query_client, nested_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_aggregation_query_add_aggregations( + aggregation_query_client, nested_query, database_id +): from google.cloud.datastore.aggregation import CountAggregation query = nested_query @@ -159,8 +173,9 @@ def test_aggregation_query_add_aggregations(aggregation_query_client, nested_que assert r.value > 0 +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) def test_aggregation_query_add_aggregations_duplicated_alias( - aggregation_query_client, nested_query + aggregation_query_client, nested_query, database_id ): from google.cloud.datastore.aggregation import CountAggregation from google.api_core.exceptions import BadRequest @@ -187,8 +202,9 @@ def test_aggregation_query_add_aggregations_duplicated_alias( _do_fetch(aggregation_query) +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) def test_aggregation_query_with_nested_query_filtered( - aggregation_query_client, nested_query + aggregation_query_client, nested_query, database_id ): query = nested_query @@ -210,8 +226,9 @@ def test_aggregation_query_with_nested_query_filtered( assert r.value == 6 +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) def test_aggregation_query_with_nested_query_multiple_filters( - aggregation_query_client, nested_query + aggregation_query_client, nested_query, database_id ): query = nested_query diff --git a/packages/google-cloud-datastore/tests/system/test_allocate_reserve_ids.py b/packages/google-cloud-datastore/tests/system/test_allocate_reserve_ids.py index f934d067a769..2d7c37004eb8 100644 --- a/packages/google-cloud-datastore/tests/system/test_allocate_reserve_ids.py +++ b/packages/google-cloud-datastore/tests/system/test_allocate_reserve_ids.py @@ -12,10 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest import warnings +from . import _helpers -def test_client_allocate_ids(datastore_client): + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_client_allocate_ids(datastore_client, database_id): num_ids = 10 allocated_keys = datastore_client.allocate_ids( datastore_client.key("Kind"), @@ -32,7 +36,8 @@ def test_client_allocate_ids(datastore_client): assert len(unique_ids) == num_ids -def test_client_reserve_ids_sequential(datastore_client): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_client_reserve_ids_sequential(datastore_client, database_id): num_ids = 10 key = datastore_client.key("Kind", 1234) @@ -41,7 +46,8 @@ def test_client_reserve_ids_sequential(datastore_client): datastore_client.reserve_ids_sequential(key, num_ids) -def test_client_reserve_ids_deprecated(datastore_client): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_client_reserve_ids_deprecated(datastore_client, database_id): num_ids = 10 key = datastore_client.key("Kind", 1234) @@ -53,7 +59,8 @@ def test_client_reserve_ids_deprecated(datastore_client): assert "reserve_ids_sequential" in str(warned[0].message) -def test_client_reserve_ids_multi(datastore_client): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_client_reserve_ids_multi(datastore_client, database_id): key1 = datastore_client.key("Kind", 1234) key2 = datastore_client.key("Kind", 1235) diff --git a/packages/google-cloud-datastore/tests/system/test_put.py b/packages/google-cloud-datastore/tests/system/test_put.py index 2f8de3a06295..4cb5f6e831be 100644 --- a/packages/google-cloud-datastore/tests/system/test_put.py +++ b/packages/google-cloud-datastore/tests/system/test_put.py @@ -54,7 +54,8 @@ def _get_post(datastore_client, id_or_name=None, post_content=None): @pytest.mark.parametrize( "name,key_id", [(None, None), ("post1", None), (None, 123456789)] ) -def test_client_put(datastore_client, entities_to_delete, name, key_id): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_client_put(datastore_client, entities_to_delete, name, key_id, database_id): entity = _get_post(datastore_client, id_or_name=(name or key_id)) datastore_client.put(entity) entities_to_delete.append(entity) @@ -65,11 +66,14 @@ def test_client_put(datastore_client, entities_to_delete, name, key_id): assert entity.key.id == key_id retrieved_entity = datastore_client.get(entity.key) - # Check the given and retrieved are the the same. + # Check the given and retrieved are the same. assert retrieved_entity == entity -def test_client_put_w_multiple_in_txn(datastore_client, entities_to_delete): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_client_put_w_multiple_in_txn( + datastore_client, entities_to_delete, database_id +): with datastore_client.transaction() as xact: entity1 = _get_post(datastore_client) xact.put(entity1) @@ -98,14 +102,18 @@ def test_client_put_w_multiple_in_txn(datastore_client, entities_to_delete): assert len(matches) == 2 -def test_client_query_w_empty_kind(datastore_client): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_client_query_w_empty_kind(datastore_client, database_id): query = datastore_client.query(kind="Post") query.ancestor = parent_key(datastore_client) posts = query.fetch(limit=2) assert list(posts) == [] -def test_client_put_w_all_value_types(datastore_client, entities_to_delete): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_client_put_w_all_value_types( + datastore_client, entities_to_delete, database_id +): key = datastore_client.key("TestPanObject", 1234) entity = datastore.Entity(key=key) entity["timestamp"] = datetime.datetime(2014, 9, 9, tzinfo=UTC) @@ -127,12 +135,15 @@ def test_client_put_w_all_value_types(datastore_client, entities_to_delete): datastore_client.put(entity) entities_to_delete.append(entity) - # Check the original and retrieved are the the same. + # Check the original and retrieved are the same. retrieved_entity = datastore_client.get(entity.key) assert retrieved_entity == entity -def test_client_put_w_entity_w_self_reference(datastore_client, entities_to_delete): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_client_put_w_entity_w_self_reference( + datastore_client, entities_to_delete, database_id +): parent_key = datastore_client.key("Residence", "NewYork") key = datastore_client.key("Person", "name", parent=parent_key) entity = datastore.Entity(key=key) @@ -151,11 +162,12 @@ def test_client_put_w_entity_w_self_reference(datastore_client, entities_to_dele assert stored_persons == [entity] -def test_client_put_w_empty_array(datastore_client, entities_to_delete): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_client_put_w_empty_array(datastore_client, entities_to_delete, database_id): local_client = _helpers.clone_client(datastore_client) key = local_client.key("EmptyArray", 1234) - local_client = datastore.Client() + local_client = datastore.Client(database=local_client.database) entity = datastore.Entity(key=key) entity["children"] = [] local_client.put(entity) diff --git a/packages/google-cloud-datastore/tests/system/test_query.py b/packages/google-cloud-datastore/tests/system/test_query.py index 6b26629fdddb..864bab570678 100644 --- a/packages/google-cloud-datastore/tests/system/test_query.py +++ b/packages/google-cloud-datastore/tests/system/test_query.py @@ -71,7 +71,8 @@ def ancestor_query(query_client, ancestor_key): return _make_ancestor_query(query_client, ancestor_key) -def test_query_w_ancestor(ancestor_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_w_ancestor(ancestor_query, database_id): query = ancestor_query expected_matches = 8 @@ -81,7 +82,8 @@ def test_query_w_ancestor(ancestor_query): assert len(entities) == expected_matches -def test_query_w_limit_paging(ancestor_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_w_limit_paging(ancestor_query, database_id): query = ancestor_query limit = 5 @@ -101,7 +103,8 @@ def test_query_w_limit_paging(ancestor_query): assert len(new_character_entities) == characters_remaining -def test_query_w_simple_filter(ancestor_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_w_simple_filter(ancestor_query, database_id): query = ancestor_query query.add_filter(filter=PropertyFilter("appearances", ">=", 20)) expected_matches = 6 @@ -112,7 +115,8 @@ def test_query_w_simple_filter(ancestor_query): assert len(entities) == expected_matches -def test_query_w_multiple_filters(ancestor_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_w_multiple_filters(ancestor_query, database_id): query = ancestor_query query.add_filter(filter=PropertyFilter("appearances", ">=", 26)) query = query.add_filter(filter=PropertyFilter("family", "=", "Stark")) @@ -124,7 +128,8 @@ def test_query_w_multiple_filters(ancestor_query): assert len(entities) == expected_matches -def test_query_key_filter(query_client, ancestor_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_key_filter(query_client, ancestor_query, database_id): # Use the client for this test instead of the global. query = ancestor_query rickard_key = query_client.key(*populate_datastore.RICKARD) @@ -137,7 +142,8 @@ def test_query_key_filter(query_client, ancestor_query): assert len(entities) == expected_matches -def test_query_w_order(ancestor_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_w_order(ancestor_query, database_id): query = ancestor_query query.order = "appearances" expected_matches = 8 @@ -152,7 +158,8 @@ def test_query_w_order(ancestor_query): assert entities[7]["name"] == populate_datastore.CHARACTERS[3]["name"] -def test_query_w_projection(ancestor_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_w_projection(ancestor_query, database_id): filtered_query = ancestor_query filtered_query.projection = ["name", "family"] filtered_query.order = ["name", "family"] @@ -181,7 +188,8 @@ def test_query_w_projection(ancestor_query): assert dict(sansa_entity) == {"name": "Sansa", "family": "Stark"} -def test_query_w_paginate_simple_uuid_keys(query_client): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_w_paginate_simple_uuid_keys(query_client, database_id): # See issue #4264 page_query = query_client.query(kind="uuid_key") @@ -199,7 +207,8 @@ def test_query_w_paginate_simple_uuid_keys(query_client): assert page_count > 1 -def test_query_paginate_simple_timestamp_keys(query_client): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_paginate_simple_timestamp_keys(query_client, database_id): # See issue #4264 page_query = query_client.query(kind="timestamp_key") @@ -217,7 +226,8 @@ def test_query_paginate_simple_timestamp_keys(query_client): assert page_count > 1 -def test_query_w_offset_w_timestamp_keys(query_client): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_w_offset_w_timestamp_keys(query_client, database_id): # See issue #4675 max_all = 10000 offset = 1 @@ -231,7 +241,8 @@ def test_query_w_offset_w_timestamp_keys(query_client): assert offset_w_limit == all_w_limit[offset:] -def test_query_paginate_with_offset(ancestor_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_paginate_with_offset(ancestor_query, database_id): page_query = ancestor_query page_query.order = "appearances" offset = 2 @@ -259,7 +270,8 @@ def test_query_paginate_with_offset(ancestor_query): assert entities[2]["name"] == "Arya" -def test_query_paginate_with_start_cursor(query_client, ancestor_key): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_paginate_with_start_cursor(query_client, ancestor_key, database_id): # Don't use fixture, because we need to create a clean copy later. page_query = _make_ancestor_query(query_client, ancestor_key) page_query.order = "appearances" @@ -287,7 +299,8 @@ def test_query_paginate_with_start_cursor(query_client, ancestor_key): assert new_entities[2]["name"] == "Arya" -def test_query_distinct_on(ancestor_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_distinct_on(ancestor_query, database_id): query = ancestor_query query.distinct_on = ["alive"] expected_matches = 2 @@ -348,7 +361,8 @@ def large_query(large_query_client): (200, populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS + 1000, 0), ], ) -def test_large_query(large_query, limit, offset, expected): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_large_query(large_query, limit, offset, expected, database_id): page_query = large_query page_query.add_filter(filter=PropertyFilter("family", "=", "Stark")) page_query.add_filter(filter=PropertyFilter("alive", "=", False)) @@ -359,7 +373,8 @@ def test_large_query(large_query, limit, offset, expected): assert len(entities) == expected -def test_query_add_property_filter(ancestor_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_add_property_filter(ancestor_query, database_id): query = ancestor_query query.add_filter(filter=PropertyFilter("appearances", ">=", 26)) @@ -372,7 +387,8 @@ def test_query_add_property_filter(ancestor_query): assert e["appearances"] >= 26 -def test_query_and_composite_filter(ancestor_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_and_composite_filter(ancestor_query, database_id): query = ancestor_query query.add_filter( @@ -392,7 +408,8 @@ def test_query_and_composite_filter(ancestor_query): assert entities[0]["name"] == "Jon Snow" -def test_query_or_composite_filter(ancestor_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_or_composite_filter(ancestor_query, database_id): query = ancestor_query # name = Arya or name = Jon Snow @@ -414,7 +431,8 @@ def test_query_or_composite_filter(ancestor_query): assert entities[1]["name"] == "Jon Snow" -def test_query_add_filters(ancestor_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_add_filters(ancestor_query, database_id): query = ancestor_query # family = Stark AND name = Jon Snow @@ -430,7 +448,8 @@ def test_query_add_filters(ancestor_query): assert entities[0]["name"] == "Jon Snow" -def test_query_add_complex_filters(ancestor_query): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_add_complex_filters(ancestor_query, database_id): query = ancestor_query # (alive = True OR appearances >= 26) AND (family = Stark) diff --git a/packages/google-cloud-datastore/tests/system/test_read_consistency.py b/packages/google-cloud-datastore/tests/system/test_read_consistency.py index 9435c5f7d638..33004352633c 100644 --- a/packages/google-cloud-datastore/tests/system/test_read_consistency.py +++ b/packages/google-cloud-datastore/tests/system/test_read_consistency.py @@ -11,12 +11,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import pytest import time from datetime import datetime, timezone from google.cloud import datastore +from . import _helpers def _parent_key(datastore_client): @@ -33,9 +34,9 @@ def _put_entity(datastore_client, entity_id): return entity -def test_get_w_read_time(datastore_client, entities_to_delete): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_get_w_read_time(datastore_client, entities_to_delete, database_id): entity = _put_entity(datastore_client, 1) - entities_to_delete.append(entity) # Add some sleep to accommodate server & client clock discrepancy. @@ -62,7 +63,8 @@ def test_get_w_read_time(datastore_client, entities_to_delete): assert retrieved_entity_from_xact["field"] == "old_value" -def test_query_w_read_time(datastore_client, entities_to_delete): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_w_read_time(datastore_client, entities_to_delete, database_id): entity0 = _put_entity(datastore_client, 1) entity1 = _put_entity(datastore_client, 2) entity2 = _put_entity(datastore_client, 3) diff --git a/packages/google-cloud-datastore/tests/system/test_transaction.py b/packages/google-cloud-datastore/tests/system/test_transaction.py index b380561f45c7..a93538fbeb43 100644 --- a/packages/google-cloud-datastore/tests/system/test_transaction.py +++ b/packages/google-cloud-datastore/tests/system/test_transaction.py @@ -20,7 +20,10 @@ from . import _helpers -def test_transaction_via_with_statement(datastore_client, entities_to_delete): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_transaction_via_with_statement( + datastore_client, entities_to_delete, database_id +): key = datastore_client.key("Company", "Google") entity = datastore.Entity(key=key) entity["url"] = "www.google.com" @@ -38,9 +41,9 @@ def test_transaction_via_with_statement(datastore_client, entities_to_delete): assert retrieved_entity == entity +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) def test_transaction_via_explicit_begin_get_commit( - datastore_client, - entities_to_delete, + datastore_client, entities_to_delete, database_id ): # See # github.com/GoogleCloudPlatform/google-cloud-python/issues/1859 @@ -80,7 +83,8 @@ def test_transaction_via_explicit_begin_get_commit( assert after2["balance"] == before_2 + transfer_amount -def test_failure_with_contention(datastore_client, entities_to_delete): +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_failure_with_contention(datastore_client, entities_to_delete, database_id): contention_prop_name = "baz" local_client = _helpers.clone_client(datastore_client) diff --git a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py index fa976f60907f..cd552c26a459 100644 --- a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py @@ -36,6 +36,10 @@ MAX_DEL_ENTITIES = 500 +def get_system_test_db(): + return os.getenv("SYSTEM_TESTS_DATABASE") or "system-tests-named-db" + + def print_func(message): if os.getenv("GOOGLE_CLOUD_NO_PRINT") != "true": print(message) @@ -85,14 +89,18 @@ def remove_all_entities(client): client.delete_multi(keys) -def main(): - client = datastore.Client() +def run(database): + client = datastore.Client(database=database) kinds = sys.argv[1:] if len(kinds) == 0: kinds = ALL_KINDS - print_func("This command will remove all entities for " "the following kinds:") + print_func( + "This command will remove all entities from the database " + + database + + " for the following kinds:" + ) print_func("\n".join("- " + val for val in kinds)) response = input("Is this OK [y/n]? ") @@ -105,5 +113,10 @@ def main(): print_func("Doing nothing.") +def main(): + for database in ["", get_system_test_db()]: + run(database) + + if __name__ == "__main__": main() diff --git a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py index 473950708e49..9077241f76f5 100644 --- a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py @@ -59,6 +59,10 @@ LARGE_CHARACTER_KIND = "LargeCharacter" +def get_system_test_db(): + return os.getenv("SYSTEM_TESTS_DATABASE") or "system-tests-named-db" + + def print_func(message): if os.getenv("GOOGLE_CLOUD_NO_PRINT") != "true": print(message) @@ -119,7 +123,7 @@ def put_objects(count): def add_characters(client=None): if client is None: # Get a client that uses the test dataset. - client = datastore.Client() + client = datastore.Client(database_id="mw-other-db") with client.transaction() as xact: for key_path, character in zip(KEY_PATHS, CHARACTERS): if key_path[-1] != character["name"]: @@ -135,7 +139,7 @@ def add_characters(client=None): def add_uid_keys(client=None): if client is None: # Get a client that uses the test dataset. - client = datastore.Client() + client = datastore.Client(database_id="mw-other-db") num_batches = 2 batch_size = 500 @@ -175,8 +179,8 @@ def add_timestamp_keys(client=None): batch.put(entity) -def main(): - client = datastore.Client() +def run(database): + client = datastore.Client(database=database) flags = sys.argv[1:] if len(flags) == 0: @@ -192,5 +196,10 @@ def main(): add_timestamp_keys(client) +def main(): + for database in ["", get_system_test_db()]: + run(database) + + if __name__ == "__main__": main() diff --git a/packages/google-cloud-datastore/tests/unit/test__http.py b/packages/google-cloud-datastore/tests/unit/test__http.py index f9e0a29f7bc9..48e7f5b6873a 100644 --- a/packages/google-cloud-datastore/tests/unit/test__http.py +++ b/packages/google-cloud-datastore/tests/unit/test__http.py @@ -18,6 +18,8 @@ import pytest import requests +from google.cloud.datastore.helpers import set_database_id_to_request + def test__make_retry_timeout_kwargs_w_empty(): from google.cloud.datastore._http import _make_retry_timeout_kwargs @@ -97,9 +99,9 @@ def test__make_request_pb_w_instance(): assert foo is passed -def _request_helper(retry=None, timeout=None): +def _request_helper(retry=None, timeout=None, database=None): from google.cloud import _http as connection_module - from google.cloud.datastore._http import _request + from google.cloud.datastore._http import _request, _update_headers project = "PROJECT" method = "METHOD" @@ -113,7 +115,9 @@ def _request_helper(retry=None, timeout=None): kwargs = _retry_timeout_kw(retry, timeout, http) - response = _request(http, project, method, data, base_url, client_info, **kwargs) + response = _request( + http, project, method, data, base_url, client_info, database=database, **kwargs + ) assert response == response_data # Check that the mocks were called as expected. @@ -122,8 +126,9 @@ def _request_helper(retry=None, timeout=None): "Content-Type": "application/x-protobuf", "User-Agent": user_agent, connection_module.CLIENT_INFO_HEADER: user_agent, + "x-goog-request-params": f"project_id={project}", } - + _update_headers(expected_headers, project, database_id=database) if retry is not None: retry.assert_called_once_with(http.request) @@ -133,18 +138,21 @@ def _request_helper(retry=None, timeout=None): ) -def test__request_defaults(): - _request_helper() +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test__request_defaults(database_id): + _request_helper(database=database_id) -def test__request_w_retry(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test__request_w_retry(database_id): retry = mock.MagicMock() - _request_helper(retry=retry) + _request_helper(retry=retry, database=database_id) -def test__request_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test__request_w_timeout(database_id): timeout = 5.0 - _request_helper(timeout=timeout) + _request_helper(timeout=timeout, database=database_id) def test__request_failure(): @@ -169,13 +177,13 @@ def test__request_failure(): ) with pytest.raises(BadRequest) as exc: - _request(session, project, method, data, uri, client_info) + _request(session, project, method, data, uri, client_info, None) expected_message = "400 Entity value is indexed." assert exc.match(expected_message) -def _rpc_helper(retry=None, timeout=None): +def _rpc_helper(retry=None, timeout=None, database=None): from google.cloud.datastore._http import _rpc from google.cloud.datastore_v1.types import datastore as datastore_pb2 @@ -203,7 +211,8 @@ def _rpc_helper(retry=None, timeout=None): client_info, request_pb, datastore_pb2.BeginTransactionResponse, - **kwargs + database, + **kwargs, ) assert result == response_pb._pb @@ -215,22 +224,26 @@ def _rpc_helper(retry=None, timeout=None): request_pb._pb.SerializeToString(), base_url, client_info, - **kwargs + database, + **kwargs, ) -def test__rpc_defaults(): - _rpc_helper() +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test__rpc_defaults(database_id): + _rpc_helper(database=database_id) -def test__rpc_w_retry(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test__rpc_w_retry(database_id): retry = mock.MagicMock() - _rpc_helper(retry=retry) + _rpc_helper(retry=retry, database=database_id) -def test__rpc_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test__rpc_w_timeout(database_id): timeout = 5.0 - _rpc_helper(timeout=timeout) + _rpc_helper(timeout=timeout, database=database_id) def test_api_ctor(): @@ -245,6 +258,7 @@ def _lookup_single_helper( empty=True, retry=None, timeout=None, + database=None, ): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 @@ -283,6 +297,7 @@ def _lookup_single_helper( "keys": [key_pb], "read_options": read_options, } + set_database_id_to_request(request, database) kwargs = _retry_timeout_kw(retry, timeout, http) response = ds_api.lookup(request=request, **kwargs) @@ -301,9 +316,11 @@ def _lookup_single_helper( request = _verify_protobuf_call( http, uri, - datastore_pb2.LookupRequest(), + datastore_pb2.LookupRequest(project_id=project), retry=retry, timeout=timeout, + project=project, + database=database, ) if retry is not None: @@ -344,11 +361,7 @@ def test_api_lookup_single_key_hit_w_timeout(): def _lookup_multiple_helper( - found=0, - missing=0, - deferred=0, - retry=None, - timeout=None, + found=0, missing=0, deferred=0, retry=None, timeout=None, database=None ): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 @@ -413,9 +426,11 @@ def _lookup_multiple_helper( request = _verify_protobuf_call( http, uri, - datastore_pb2.LookupRequest(), + datastore_pb2.LookupRequest(project_id=project), retry=retry, timeout=timeout, + project=project, + database=database, ) assert list(request.keys) == [key_pb1._pb, key_pb2._pb] assert request.read_options == read_options._pb @@ -454,6 +469,7 @@ def _run_query_helper( found=0, retry=None, timeout=None, + database=None, ): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 @@ -517,9 +533,11 @@ def _run_query_helper( request = _verify_protobuf_call( http, uri, - datastore_pb2.RunQueryRequest(), + datastore_pb2.RunQueryRequest(project_id=project), retry=retry, timeout=timeout, + project=project, + database=database, ) assert request.partition_id == partition_id._pb assert request.query == query_pb._pb @@ -558,9 +576,7 @@ def test_api_run_query_w_namespace_nonempty_result(): def _run_aggregation_query_helper( - transaction=None, - retry=None, - timeout=None, + transaction=None, retry=None, timeout=None, database=None ): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 @@ -620,9 +636,11 @@ def _run_aggregation_query_helper( request = _verify_protobuf_call( http, uri, - datastore_pb2.RunAggregationQueryRequest(), + datastore_pb2.RunAggregationQueryRequest(project_id=project), retry=retry, timeout=timeout, + project=project, + database=database, ) assert request.partition_id == partition_id._pb @@ -649,7 +667,7 @@ def test_api_run_aggregation_query_w_transaction(): _run_aggregation_query_helper(transaction=transaction) -def _begin_transaction_helper(options=None, retry=None, timeout=None): +def _begin_transaction_helper(options=None, retry=None, timeout=None, database=None): from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" @@ -672,7 +690,7 @@ def _begin_transaction_helper(options=None, retry=None, timeout=None): # Make request. ds_api = _make_http_datastore_api(client) request = {"project_id": project} - + set_database_id_to_request(request, database) if options is not None: request["transaction_options"] = options @@ -687,40 +705,46 @@ def _begin_transaction_helper(options=None, retry=None, timeout=None): request = _verify_protobuf_call( http, uri, - datastore_pb2.BeginTransactionRequest(), + datastore_pb2.BeginTransactionRequest(project_id=project), retry=retry, timeout=timeout, + project=project, + database=database, ) -def test_api_begin_transaction_wo_options(): - _begin_transaction_helper() +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_begin_transaction_wo_options(database_id): + _begin_transaction_helper(database=database_id) -def test_api_begin_transaction_w_options(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_begin_transaction_w_options(database_id): from google.cloud.datastore_v1.types import TransactionOptions read_only = TransactionOptions.ReadOnly._meta.pb() options = TransactionOptions(read_only=read_only) - _begin_transaction_helper(options=options) + _begin_transaction_helper(options=options, database=database_id) -def test_api_begin_transaction_w_retry(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_begin_transaction_w_retry(database_id): retry = mock.MagicMock() - _begin_transaction_helper(retry=retry) + _begin_transaction_helper(retry=retry, database=database_id) -def test_api_begin_transaction_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_begin_transaction_w_timeout(database_id): timeout = 5.0 - _begin_transaction_helper(timeout=timeout) + _begin_transaction_helper(timeout=timeout, database=database_id) -def _commit_helper(transaction=None, retry=None, timeout=None): +def _commit_helper(transaction=None, retry=None, timeout=None, database=None): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb project = "PROJECT" - key_pb = _make_key_pb(project) + key_pb = _make_key_pb(project, database=database) rsp_pb = datastore_pb2.CommitResponse() req_pb = datastore_pb2.CommitRequest() mutation = req_pb._pb.mutations.add() @@ -744,7 +768,7 @@ def _commit_helper(transaction=None, retry=None, timeout=None): ds_api = _make_http_datastore_api(client) request = {"project_id": project, "mutations": [mutation]} - + set_database_id_to_request(request, database) if transaction is not None: request["transaction"] = transaction mode = request["mode"] = rq_class.Mode.TRANSACTIONAL @@ -761,9 +785,11 @@ def _commit_helper(transaction=None, retry=None, timeout=None): request = _verify_protobuf_call( http, uri, - rq_class(), + rq_class(project_id=project), retry=retry, timeout=timeout, + project=project, + database=database, ) assert list(request.mutations) == [mutation] assert request.mode == mode @@ -774,27 +800,31 @@ def _commit_helper(transaction=None, retry=None, timeout=None): assert request.transaction == b"" -def test_api_commit_wo_transaction(): - _commit_helper() +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_commit_wo_transaction(database_id): + _commit_helper(database=database_id) -def test_api_commit_w_transaction(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_commit_w_transaction(database_id): transaction = b"xact" - _commit_helper(transaction=transaction) + _commit_helper(transaction=transaction, database=database_id) -def test_api_commit_w_retry(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_commit_w_retry(database_id): retry = mock.MagicMock() - _commit_helper(retry=retry) + _commit_helper(retry=retry, database=database_id) -def test_api_commit_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_commit_w_timeout(database_id): timeout = 5.0 - _commit_helper(timeout=timeout) + _commit_helper(timeout=timeout, database=database_id) -def _rollback_helper(retry=None, timeout=None): +def _rollback_helper(retry=None, timeout=None, database=None): from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" @@ -816,6 +846,7 @@ def _rollback_helper(retry=None, timeout=None): # Make request. ds_api = _make_http_datastore_api(client) request = {"project_id": project, "transaction": transaction} + set_database_id_to_request(request, database) kwargs = _retry_timeout_kw(retry, timeout, http) response = ds_api.rollback(request=request, **kwargs) @@ -827,28 +858,33 @@ def _rollback_helper(retry=None, timeout=None): request = _verify_protobuf_call( http, uri, - datastore_pb2.RollbackRequest(), + datastore_pb2.RollbackRequest(project_id=project), retry=retry, timeout=timeout, + project=project, + database=database, ) assert request.transaction == transaction -def test_api_rollback_ok(): - _rollback_helper() +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_rollback_ok(database_id): + _rollback_helper(database=database_id) -def test_api_rollback_w_retry(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_rollback_w_retry(database_id): retry = mock.MagicMock() - _rollback_helper(retry=retry) + _rollback_helper(retry=retry, database=database_id) -def test_api_rollback_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_rollback_w_timeout(database_id): timeout = 5.0 - _rollback_helper(timeout=timeout) + _rollback_helper(timeout=timeout, database=database_id) -def _allocate_ids_helper(count=0, retry=None, timeout=None): +def _allocate_ids_helper(count=0, retry=None, timeout=None, database=None): from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" @@ -857,9 +893,9 @@ def _allocate_ids_helper(count=0, retry=None, timeout=None): rsp_pb = datastore_pb2.AllocateIdsResponse() for i_count in range(count): - requested = _make_key_pb(project, id_=None) + requested = _make_key_pb(project, id_=None, database=database) before_key_pbs.append(requested) - allocated = _make_key_pb(project, id_=i_count) + allocated = _make_key_pb(project, id_=i_count, database=database) after_key_pbs.append(allocated) rsp_pb._pb.keys.add().CopyFrom(allocated._pb) @@ -876,6 +912,7 @@ def _allocate_ids_helper(count=0, retry=None, timeout=None): ds_api = _make_http_datastore_api(client) request = {"project_id": project, "keys": before_key_pbs} + set_database_id_to_request(request, database) kwargs = _retry_timeout_kw(retry, timeout, http) response = ds_api.allocate_ids(request=request, **kwargs) @@ -887,34 +924,40 @@ def _allocate_ids_helper(count=0, retry=None, timeout=None): request = _verify_protobuf_call( http, uri, - datastore_pb2.AllocateIdsRequest(), + datastore_pb2.AllocateIdsRequest(project_id=project), retry=retry, timeout=timeout, + project=project, + database=database, ) assert len(request.keys) == len(before_key_pbs) for key_before, key_after in zip(before_key_pbs, request.keys): assert key_before == key_after -def test_api_allocate_ids_empty(): - _allocate_ids_helper() +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_allocate_ids_empty(database_id): + _allocate_ids_helper(database=database_id) -def test_api_allocate_ids_non_empty(): - _allocate_ids_helper(count=2) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_allocate_ids_non_empty(database_id): + _allocate_ids_helper(count=2, database=database_id) -def test_api_allocate_ids_w_retry(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_allocate_ids_w_retry(database_id): retry = mock.MagicMock() - _allocate_ids_helper(retry=retry) + _allocate_ids_helper(retry=retry, database=database_id) -def test_api_allocate_ids_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_allocate_ids_w_timeout(database_id): timeout = 5.0 - _allocate_ids_helper(timeout=timeout) + _allocate_ids_helper(timeout=timeout, database=database_id) -def _reserve_ids_helper(count=0, retry=None, timeout=None): +def _reserve_ids_helper(count=0, retry=None, timeout=None, database=None): from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" @@ -922,7 +965,7 @@ def _reserve_ids_helper(count=0, retry=None, timeout=None): rsp_pb = datastore_pb2.ReserveIdsResponse() for i_count in range(count): - requested = _make_key_pb(project, id_=i_count) + requested = _make_key_pb(project, id_=i_count, database=database) before_key_pbs.append(requested) http = _make_requests_session( @@ -938,6 +981,7 @@ def _reserve_ids_helper(count=0, retry=None, timeout=None): ds_api = _make_http_datastore_api(client) request = {"project_id": project, "keys": before_key_pbs} + set_database_id_to_request(request, database) kwargs = _retry_timeout_kw(retry, timeout, http) response = ds_api.reserve_ids(request=request, **kwargs) @@ -948,31 +992,59 @@ def _reserve_ids_helper(count=0, retry=None, timeout=None): request = _verify_protobuf_call( http, uri, - datastore_pb2.AllocateIdsRequest(), + datastore_pb2.AllocateIdsRequest(project_id=project), retry=retry, timeout=timeout, + project=project, + database=database, ) assert len(request.keys) == len(before_key_pbs) for key_before, key_after in zip(before_key_pbs, request.keys): assert key_before == key_after -def test_api_reserve_ids_empty(): - _reserve_ids_helper() +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_reserve_ids_empty(database_id): + _reserve_ids_helper(database=database_id) -def test_api_reserve_ids_non_empty(): - _reserve_ids_helper(count=2) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_reserve_ids_non_empty(database_id): + _reserve_ids_helper(count=2, database=database_id) -def test_api_reserve_ids_w_retry(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_reserve_ids_w_retry(database_id): retry = mock.MagicMock() - _reserve_ids_helper(retry=retry) + _reserve_ids_helper(retry=retry, database=database_id) -def test_api_reserve_ids_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_api_reserve_ids_w_timeout(database_id): timeout = 5.0 - _reserve_ids_helper(timeout=timeout) + _reserve_ids_helper(timeout=timeout, database=database_id) + + +def test_update_headers_without_database_id(): + from google.cloud.datastore._http import _update_headers + + headers = {} + project_id = "someproject" + _update_headers(headers, project_id) + assert headers["x-goog-request-params"] == f"project_id={project_id}" + + +def test_update_headers_with_database_id(): + from google.cloud.datastore._http import _update_headers + + headers = {} + project_id = "someproject" + database_id = "somedb" + _update_headers(headers, project_id, database_id=database_id) + assert ( + headers["x-goog-request-params"] + == f"project_id={project_id}&database_id={database_id}" + ) def _make_http_datastore_api(*args, **kwargs): @@ -1002,13 +1074,13 @@ def _build_expected_url(api_base_url, project, method): return "/".join([api_base_url, API_VERSION, "projects", project + ":" + method]) -def _make_key_pb(project, id_=1234): +def _make_key_pb(project, id_=1234, database=None): from google.cloud.datastore.key import Key path_args = ("Kind",) if id_ is not None: path_args += (id_,) - return Key(*path_args, project=project).to_protobuf() + return Key(*path_args, project=project, database=database).to_protobuf() _USER_AGENT = "TESTING USER AGENT" @@ -1022,15 +1094,19 @@ def _make_client_info(user_agent=_USER_AGENT): return client_info -def _verify_protobuf_call(http, expected_url, pb, retry=None, timeout=None): +def _verify_protobuf_call( + http, expected_url, pb, retry=None, timeout=None, project=None, database=None +): from google.cloud import _http as connection_module + from google.cloud.datastore._http import _update_headers expected_headers = { "Content-Type": "application/x-protobuf", "User-Agent": _USER_AGENT, connection_module.CLIENT_INFO_HEADER: _USER_AGENT, + "x-goog-request-params": f"project_id={pb.project_id}", } - + _update_headers(expected_headers, project, database_id=database) if retry is not None: retry.assert_called_once_with(http.request) diff --git a/packages/google-cloud-datastore/tests/unit/test_aggregation.py b/packages/google-cloud-datastore/tests/unit/test_aggregation.py index afa9dc536d62..ebfa9a3f67c3 100644 --- a/packages/google-cloud-datastore/tests/unit/test_aggregation.py +++ b/packages/google-cloud-datastore/tests/unit/test_aggregation.py @@ -16,6 +16,7 @@ import pytest from google.cloud.datastore.aggregation import CountAggregation, AggregationQuery +from google.cloud.datastore.helpers import set_database_id_to_request from tests.unit.test_query import _make_query, _make_client @@ -34,11 +35,44 @@ def test_count_aggregation_to_pb(): @pytest.fixture -def client(): - return _make_client() +def database_id(request): + return request.param -def test_pb_over_query(client): +@pytest.fixture +def client(database_id): + return _make_client(database=database_id) + + +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +def test_project(client, database_id): + # Fallback to client + query = _make_query(client) + aggregation_query = _make_aggregation_query(client=client, query=query) + assert aggregation_query.project == _PROJECT + + # Fallback to query + query = _make_query(client, project="other-project") + aggregation_query = _make_aggregation_query(client=client, query=query) + assert aggregation_query.project == "other-project" + + +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +def test_namespace(client, database_id): + # Fallback to client + client.namespace = "other-namespace" + query = _make_query(client) + aggregation_query = _make_aggregation_query(client=client, query=query) + assert aggregation_query.namespace == "other-namespace" + + # Fallback to query + query = _make_query(client, namespace="third-namespace") + aggregation_query = _make_aggregation_query(client=client, query=query) + assert aggregation_query.namespace == "third-namespace" + + +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +def test_pb_over_query(client, database_id): from google.cloud.datastore.query import _pb_from_query query = _make_query(client) @@ -48,7 +82,8 @@ def test_pb_over_query(client): assert pb.aggregations == [] -def test_pb_over_query_with_count(client): +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +def test_pb_over_query_with_count(client, database_id): from google.cloud.datastore.query import _pb_from_query query = _make_query(client) @@ -61,7 +96,8 @@ def test_pb_over_query_with_count(client): assert pb.aggregations[0] == CountAggregation(alias="total")._to_pb() -def test_pb_over_query_with_add_aggregation(client): +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +def test_pb_over_query_with_add_aggregation(client, database_id): from google.cloud.datastore.query import _pb_from_query query = _make_query(client) @@ -74,7 +110,8 @@ def test_pb_over_query_with_add_aggregation(client): assert pb.aggregations[0] == CountAggregation(alias="total")._to_pb() -def test_pb_over_query_with_add_aggregations(client): +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +def test_pb_over_query_with_add_aggregations(client, database_id): from google.cloud.datastore.query import _pb_from_query aggregations = [ @@ -93,7 +130,8 @@ def test_pb_over_query_with_add_aggregations(client): assert pb.aggregations[1] == CountAggregation(alias="all")._to_pb() -def test_query_fetch_defaults_w_client_attr(client): +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +def test_query_fetch_defaults_w_client_attr(client, database_id): from google.cloud.datastore.aggregation import AggregationResultIterator query = _make_query(client) @@ -107,10 +145,11 @@ def test_query_fetch_defaults_w_client_attr(client): assert iterator._timeout is None -def test_query_fetch_w_explicit_client_w_retry_w_timeout(client): +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +def test_query_fetch_w_explicit_client_w_retry_w_timeout(client, database_id): from google.cloud.datastore.aggregation import AggregationResultIterator - other_client = _make_client() + other_client = _make_client(database=database_id) query = _make_query(client) aggregation_query = _make_aggregation_query(client=client, query=query) retry = mock.Mock() @@ -127,10 +166,11 @@ def test_query_fetch_w_explicit_client_w_retry_w_timeout(client): assert iterator._timeout == timeout -def test_query_fetch_w_explicit_client_w_limit(client): +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +def test_query_fetch_w_explicit_client_w_limit(client, database_id): from google.cloud.datastore.aggregation import AggregationResultIterator - other_client = _make_client() + other_client = _make_client(database=database_id) query = _make_query(client) aggregation_query = _make_aggregation_query(client=client, query=query) limit = 2 @@ -300,7 +340,7 @@ def test_iterator__next_page_no_more(): ds_api.run_aggregation_query.assert_not_called() -def _next_page_helper(txn_id=None, retry=None, timeout=None): +def _next_page_helper(txn_id=None, retry=None, timeout=None, database_id=None): from google.api_core import page_iterator from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 @@ -318,10 +358,12 @@ def _next_page_helper(txn_id=None, retry=None, timeout=None): project = "prujekt" ds_api = _make_datastore_api_for_aggregation(result_1, result_2) if txn_id is None: - client = _Client(project, datastore_api=ds_api) + client = _Client(project, datastore_api=ds_api, database=database_id) else: transaction = mock.Mock(id=txn_id, spec=["id"]) - client = _Client(project, datastore_api=ds_api, transaction=transaction) + client = _Client( + project, datastore_api=ds_api, transaction=transaction, database=database_id + ) query = _make_query(client) kwargs = {} @@ -350,14 +392,16 @@ def _next_page_helper(txn_id=None, retry=None, timeout=None): aggregation_query = AggregationQuery(client=client, query=query) assert ds_api.run_aggregation_query.call_count == 2 + expected_request = { + "project_id": project, + "partition_id": partition_id, + "read_options": read_options, + "aggregation_query": aggregation_query._to_pb(), + } + set_database_id_to_request(expected_request, database_id) expected_call = mock.call( - request={ - "project_id": project, - "partition_id": partition_id, - "read_options": read_options, - "aggregation_query": aggregation_query._to_pb(), - }, - **kwargs + request=expected_request, + **kwargs, ) assert ds_api.run_aggregation_query.call_args_list == ( [expected_call, expected_call] @@ -383,8 +427,17 @@ def test__item_to_aggregation_result(): class _Client(object): - def __init__(self, project, datastore_api=None, namespace=None, transaction=None): + def __init__( + self, + project, + datastore_api=None, + namespace=None, + transaction=None, + *, + database=None, + ): self.project = project + self.database = database self._datastore_api = datastore_api self.namespace = namespace self._transaction = transaction diff --git a/packages/google-cloud-datastore/tests/unit/test_batch.py b/packages/google-cloud-datastore/tests/unit/test_batch.py index 0e45ed97906d..67f5cff55f6c 100644 --- a/packages/google-cloud-datastore/tests/unit/test_batch.py +++ b/packages/google-cloud-datastore/tests/unit/test_batch.py @@ -18,6 +18,8 @@ import mock import pytest +from google.cloud.datastore.helpers import set_database_id_to_request + def _make_batch(client): from google.cloud.datastore.batch import Batch @@ -25,14 +27,16 @@ def _make_batch(client): return Batch(client) -def test_batch_ctor(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_ctor(database_id): project = "PROJECT" namespace = "NAMESPACE" - client = _Client(project, namespace=namespace) + client = _Client(project, database=database_id, namespace=namespace) batch = _make_batch(client) assert batch.project == project assert batch._client is client + assert batch.database == database_id assert batch.namespace == namespace assert batch._id is None assert batch._status == batch._INITIAL @@ -40,11 +44,12 @@ def test_batch_ctor(): assert batch._partial_key_entities == [] -def test_batch_current(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_current(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" - client = _Client(project) + client = _Client(project, database=database_id) batch1 = _make_batch(client) batch2 = _make_batch(client) @@ -68,19 +73,20 @@ def test_batch_current(): commit_method = client._datastore_api.commit assert commit_method.call_count == 2 mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL - commit_method.assert_called_with( - request={ - "project_id": project, - "mode": mode, - "mutations": [], - "transaction": None, - } - ) - - -def test_batch_put_w_entity_wo_key(): + expected_request = { + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": None, + } + set_database_id_to_request(expected_request, database_id) + commit_method.assert_called_with(request=expected_request) + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_put_w_entity_wo_key(database_id): project = "PROJECT" - client = _Client(project) + client = _Client(project, database=database_id) batch = _make_batch(client) entity = _Entity() @@ -89,37 +95,52 @@ def test_batch_put_w_entity_wo_key(): batch.put(entity) -def test_batch_put_w_wrong_status(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_put_w_wrong_status(database_id): project = "PROJECT" - client = _Client(project) + client = _Client(project, database=database_id) batch = _make_batch(client) entity = _Entity() - entity.key = _Key(project=project) + entity.key = _Key(project=project, database=database_id) assert batch._status == batch._INITIAL with pytest.raises(ValueError): batch.put(entity) -def test_batch_put_w_key_wrong_project(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_put_w_key_wrong_project(database_id): + project = "PROJECT" + client = _Client(project, database=database_id) + batch = _make_batch(client) + entity = _Entity() + entity.key = _Key(project="OTHER", database=database_id) + + batch.begin() + with pytest.raises(ValueError): + batch.put(entity) + + +def test_batch_put_w_key_wrong_database(): project = "PROJECT" client = _Client(project) batch = _make_batch(client) entity = _Entity() - entity.key = _Key(project="OTHER") + entity.key = _Key(project=project, database="somedb") batch.begin() with pytest.raises(ValueError): batch.put(entity) -def test_batch_put_w_entity_w_partial_key(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_put_w_entity_w_partial_key(database_id): project = "PROJECT" properties = {"foo": "bar"} - client = _Client(project) + client = _Client(project, database=database_id) batch = _make_batch(client) entity = _Entity(properties) - key = entity.key = _Key(project) + key = entity.key = _Key(project, database=database_id) key._id = None batch.begin() @@ -130,14 +151,15 @@ def test_batch_put_w_entity_w_partial_key(): assert batch._partial_key_entities == [entity] -def test_batch_put_w_entity_w_completed_key(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_put_w_entity_w_completed_key(database_id): project = "PROJECT" properties = {"foo": "bar", "baz": "qux", "spam": [1, 2, 3], "frotz": []} - client = _Client(project) + client = _Client(project, database=database_id) batch = _make_batch(client) entity = _Entity(properties) entity.exclude_from_indexes = ("baz", "spam") - key = entity.key = _Key(project) + key = entity.key = _Key(project, database=database_id) batch.begin() batch.put(entity) @@ -158,11 +180,12 @@ def test_batch_put_w_entity_w_completed_key(): assert "frotz" in prop_dict -def test_batch_delete_w_wrong_status(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_delete_w_wrong_status(database_id): project = "PROJECT" - client = _Client(project) + client = _Client(project, database=database_id) batch = _make_batch(client) - key = _Key(project=project) + key = _Key(project=project, database=database_id) key._id = None assert batch._status == batch._INITIAL @@ -171,11 +194,12 @@ def test_batch_delete_w_wrong_status(): batch.delete(key) -def test_batch_delete_w_partial_key(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_delete_w_partial_key(database_id): project = "PROJECT" - client = _Client(project) + client = _Client(project, database=database_id) batch = _make_batch(client) - key = _Key(project=project) + key = _Key(project=project, database=database_id) key._id = None batch.begin() @@ -184,23 +208,36 @@ def test_batch_delete_w_partial_key(): batch.delete(key) -def test_batch_delete_w_key_wrong_project(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_delete_w_key_wrong_project(database_id): project = "PROJECT" - client = _Client(project) + client = _Client(project, database=database_id) batch = _make_batch(client) - key = _Key(project="OTHER") + key = _Key(project="OTHER", database=database_id) batch.begin() + with pytest.raises(ValueError): + batch.delete(key) + + +def test_batch_delete_w_key_wrong_database(): + project = "PROJECT" + database = "DATABASE" + client = _Client(project, database=database) + batch = _make_batch(client) + key = _Key(project=project, database=None) + batch.begin() with pytest.raises(ValueError): batch.delete(key) -def test_batch_delete_w_completed_key(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_delete_w_completed_key(database_id): project = "PROJECT" - client = _Client(project) + client = _Client(project, database=database_id) batch = _make_batch(client) - key = _Key(project) + key = _Key(project, database=database_id) batch.begin() batch.delete(key) @@ -209,9 +246,10 @@ def test_batch_delete_w_completed_key(): assert mutated_key == key._key -def test_batch_begin_w_wrong_status(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_begin_w_wrong_status(database_id): project = "PROJECT" - client = _Client(project, None) + client = _Client(project, database=database_id) batch = _make_batch(client) batch._status = batch._IN_PROGRESS @@ -219,9 +257,10 @@ def test_batch_begin_w_wrong_status(): batch.begin() -def test_batch_begin(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_begin(database_id): project = "PROJECT" - client = _Client(project, None) + client = _Client(project, database=database_id) batch = _make_batch(client) assert batch._status == batch._INITIAL @@ -230,9 +269,10 @@ def test_batch_begin(): assert batch._status == batch._IN_PROGRESS -def test_batch_rollback_w_wrong_status(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_rollback_w_wrong_status(database_id): project = "PROJECT" - client = _Client(project, None) + client = _Client(project, database=database_id) batch = _make_batch(client) assert batch._status == batch._INITIAL @@ -240,9 +280,10 @@ def test_batch_rollback_w_wrong_status(): batch.rollback() -def test_batch_rollback(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_rollback(database_id): project = "PROJECT" - client = _Client(project, None) + client = _Client(project, database=database_id) batch = _make_batch(client) batch.begin() assert batch._status == batch._IN_PROGRESS @@ -252,9 +293,10 @@ def test_batch_rollback(): assert batch._status == batch._ABORTED -def test_batch_commit_wrong_status(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_commit_wrong_status(database_id): project = "PROJECT" - client = _Client(project) + client = _Client(project, database=database_id) batch = _make_batch(client) assert batch._status == batch._INITIAL @@ -262,11 +304,11 @@ def test_batch_commit_wrong_status(): batch.commit() -def _batch_commit_helper(timeout=None, retry=None): +def _batch_commit_helper(timeout=None, retry=None, database=None): from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" - client = _Client(project) + client = _Client(project, database=database) batch = _make_batch(client) assert batch._status == batch._INITIAL @@ -286,38 +328,41 @@ def _batch_commit_helper(timeout=None, retry=None): commit_method = client._datastore_api.commit mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL - commit_method.assert_called_with( - request={ - "project_id": project, - "mode": mode, - "mutations": [], - "transaction": None, - }, - **kwargs - ) + expected_request = { + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": None, + } + set_database_id_to_request(expected_request, database) + commit_method.assert_called_with(request=expected_request, **kwargs) -def test_batch_commit(): - _batch_commit_helper() +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_commit(database_id): + _batch_commit_helper(database=database_id) -def test_batch_commit_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_commit_w_timeout(database_id): timeout = 100000 - _batch_commit_helper(timeout=timeout) + _batch_commit_helper(timeout=timeout, database=database_id) -def test_batch_commit_w_retry(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_commit_w_retry(database_id): retry = mock.Mock(spec=[]) - _batch_commit_helper(retry=retry) + _batch_commit_helper(retry=retry, database=database_id) -def test_batch_commit_w_partial_key_entity(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_commit_w_partial_key_entity(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" new_id = 1234 ds_api = _make_datastore_api(new_id) - client = _Client(project, datastore_api=ds_api) + client = _Client(project, datastore_api=ds_api, database=database_id) batch = _make_batch(client) entity = _Entity({}) key = entity.key = _Key(project) @@ -332,27 +377,29 @@ def test_batch_commit_w_partial_key_entity(): assert batch._status == batch._FINISHED mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL - ds_api.commit.assert_called_once_with( - request={ - "project_id": project, - "mode": mode, - "mutations": [], - "transaction": None, - } - ) + expected_request = { + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": None, + } + set_database_id_to_request(expected_request, database_id) + ds_api.commit.assert_called_once_with(request=expected_request) + assert not entity.key.is_partial assert entity.key._id == new_id -def test_batch_as_context_mgr_wo_error(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_as_context_mgr_wo_error(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" properties = {"foo": "bar"} entity = _Entity(properties) - key = entity.key = _Key(project) + key = entity.key = _Key(project, database=database_id) - client = _Client(project) + client = _Client(project, database=database_id) assert list(client._batches) == [] with _make_batch(client) as batch: @@ -366,27 +413,28 @@ def test_batch_as_context_mgr_wo_error(): commit_method = client._datastore_api.commit mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL - commit_method.assert_called_with( - request={ - "project_id": project, - "mode": mode, - "mutations": batch.mutations, - "transaction": None, - } - ) - - -def test_batch_as_context_mgr_nested(): + expected_request = { + "project_id": project, + "mode": mode, + "mutations": batch.mutations, + "transaction": None, + } + set_database_id_to_request(expected_request, database_id) + commit_method.assert_called_with(request=expected_request) + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_as_context_mgr_nested(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" properties = {"foo": "bar"} entity1 = _Entity(properties) - key1 = entity1.key = _Key(project) + key1 = entity1.key = _Key(project, database=database_id) entity2 = _Entity(properties) - key2 = entity2.key = _Key(project) + key2 = entity2.key = _Key(project, database=database_id) - client = _Client(project) + client = _Client(project, database=database_id) assert list(client._batches) == [] with _make_batch(client) as batch1: @@ -411,31 +459,33 @@ def test_batch_as_context_mgr_nested(): assert commit_method.call_count == 2 mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL - commit_method.assert_called_with( - request={ - "project_id": project, - "mode": mode, - "mutations": batch1.mutations, - "transaction": None, - } - ) - commit_method.assert_called_with( - request={ - "project_id": project, - "mode": mode, - "mutations": batch2.mutations, - "transaction": None, - } - ) - - -def test_batch_as_context_mgr_w_error(): + expected_request_1 = { + "project_id": project, + "mode": mode, + "mutations": batch1.mutations, + "transaction": None, + } + expected_request_2 = { + "project_id": project, + "mode": mode, + "mutations": batch1.mutations, + "transaction": None, + } + set_database_id_to_request(expected_request_1, database_id) + set_database_id_to_request(expected_request_2, database_id) + + commit_method.assert_called_with(request=expected_request_1) + commit_method.assert_called_with(request=expected_request_2) + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_batch_as_context_mgr_w_error(database_id): project = "PROJECT" properties = {"foo": "bar"} entity = _Entity(properties) - key = entity.key = _Key(project) + key = entity.key = _Key(project, database=database_id) - client = _Client(project) + client = _Client(project, database=database_id) assert list(client._batches) == [] try: @@ -511,8 +561,9 @@ class _Key(object): _id = 1234 _stored = None - def __init__(self, project): + def __init__(self, project, database=None): self.project = project + self.database = database @property def is_partial(self): @@ -534,18 +585,19 @@ def to_protobuf(self): def completed_key(self, new_id): assert self.is_partial - new_key = self.__class__(self.project) + new_key = self.__class__(self.project, self.database) new_key._id = new_id return new_key class _Client(object): - def __init__(self, project, datastore_api=None, namespace=None): + def __init__(self, project, datastore_api=None, namespace=None, database=None): self.project = project if datastore_api is None: datastore_api = _make_datastore_api() self._datastore_api = datastore_api self.namespace = namespace + self.database = database self._batches = [] def _push_batch(self, batch): diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 3e35f74e4d17..119bab79bd20 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -18,7 +18,10 @@ import mock import pytest +from google.cloud.datastore.helpers import set_database_id_to_request + PROJECT = "dummy-project-123" +DATABASE = "dummy-database-123" def test__get_gcd_project_wo_value_set(): @@ -98,11 +101,13 @@ def _make_client( client_options=None, _http=None, _use_grpc=None, + database="", ): from google.cloud.datastore.client import Client return Client( project=project, + database=database, namespace=namespace, credentials=credentials, client_info=client_info, @@ -123,7 +128,8 @@ def test_client_ctor_w_project_no_environ(): _make_client(project=None) -def test_client_ctor_w_implicit_inputs(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_ctor_w_implicit_inputs(database_id): from google.cloud.datastore.client import Client from google.cloud.datastore.client import _CLIENT_INFO from google.cloud.datastore.client import _DATASTORE_BASE_URL @@ -139,9 +145,10 @@ def test_client_ctor_w_implicit_inputs(): with patch1 as _determine_default_project: with patch2 as default: - client = Client() + client = Client(database=database_id) assert client.project == other + assert client.database == database_id assert client.namespace is None assert client._credentials is creds assert client._client_info is _CLIENT_INFO @@ -158,10 +165,12 @@ def test_client_ctor_w_implicit_inputs(): _determine_default_project.assert_called_once_with(None) -def test_client_ctor_w_explicit_inputs(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_ctor_w_explicit_inputs(database_id): from google.api_core.client_options import ClientOptions other = "other" + database = "database" namespace = "namespace" creds = _make_credentials() client_info = mock.Mock() @@ -169,6 +178,7 @@ def test_client_ctor_w_explicit_inputs(): http = object() client = _make_client( project=other, + database=database, namespace=namespace, credentials=creds, client_info=client_info, @@ -176,6 +186,7 @@ def test_client_ctor_w_explicit_inputs(): _http=http, ) assert client.project == other + assert client.database == database assert client.namespace == namespace assert client._credentials is creds assert client._client_info is client_info @@ -185,7 +196,8 @@ def test_client_ctor_w_explicit_inputs(): assert list(client._batch_stack) == [] -def test_client_ctor_use_grpc_default(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_ctor_use_grpc_default(database_id): import google.cloud.datastore.client as MUT project = "PROJECT" @@ -193,20 +205,32 @@ def test_client_ctor_use_grpc_default(): http = object() with mock.patch.object(MUT, "_USE_GRPC", new=True): - client1 = _make_client(project=PROJECT, credentials=creds, _http=http) + client1 = _make_client( + project=PROJECT, credentials=creds, _http=http, database=database_id + ) assert client1._use_grpc # Explicitly over-ride the environment. client2 = _make_client( - project=project, credentials=creds, _http=http, _use_grpc=False + project=project, + credentials=creds, + _http=http, + _use_grpc=False, + database=database_id, ) assert not client2._use_grpc with mock.patch.object(MUT, "_USE_GRPC", new=False): - client3 = _make_client(project=PROJECT, credentials=creds, _http=http) + client3 = _make_client( + project=PROJECT, credentials=creds, _http=http, database=database_id + ) assert not client3._use_grpc # Explicitly over-ride the environment. client4 = _make_client( - project=project, credentials=creds, _http=http, _use_grpc=True + project=project, + credentials=creds, + _http=http, + _use_grpc=True, + database=database_id, ) assert client4._use_grpc @@ -407,12 +431,13 @@ def test_client_get_multi_no_keys(): ds_api.lookup.assert_not_called() -def test_client_get_multi_miss(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_get_multi_miss(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore.key import Key creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) ds_api = _make_datastore_api() client._datastore_api_internal = ds_api @@ -421,16 +446,17 @@ def test_client_get_multi_miss(): assert results == [] read_options = datastore_pb2.ReadOptions() - ds_api.lookup.assert_called_once_with( - request={ - "project_id": PROJECT, - "keys": [key.to_protobuf()], - "read_options": read_options, - } - ) + expected_request = { + "project_id": PROJECT, + "keys": [key.to_protobuf()], + "read_options": read_options, + } + set_database_id_to_request(expected_request, database_id) + ds_api.lookup.assert_called_once_with(request=expected_request) -def test_client_get_multi_miss_w_missing(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_get_multi_miss_w_missing(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.key import Key @@ -441,18 +467,19 @@ def test_client_get_multi_miss_w_missing(): # Make a missing entity pb to be returned from mock backend. missed = entity_pb2.Entity() missed.key.partition_id.project_id = PROJECT + missed.key.partition_id.database_id = database_id path_element = missed._pb.key.path.add() path_element.kind = KIND path_element.id = ID creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) # Set missing entity on mock connection. lookup_response = _make_lookup_response(missing=[missed._pb]) ds_api = _make_datastore_api(lookup_response=lookup_response) client._datastore_api_internal = ds_api - key = Key(KIND, ID, project=PROJECT) + key = Key(KIND, ID, project=PROJECT, database=database_id) missing = [] entities = client.get_multi([key], missing=missing) assert entities == [] @@ -460,9 +487,13 @@ def test_client_get_multi_miss_w_missing(): assert [missed.key.to_protobuf() for missed in missing] == [key_pb._pb] read_options = datastore_pb2.ReadOptions() - ds_api.lookup.assert_called_once_with( - request={"project_id": PROJECT, "keys": [key_pb], "read_options": read_options} - ) + expected_request = { + "project_id": PROJECT, + "keys": [key_pb], + "read_options": read_options, + } + set_database_id_to_request(expected_request, database_id) + ds_api.lookup.assert_called_once_with(request=expected_request) def test_client_get_multi_w_missing_non_empty(): @@ -489,16 +520,17 @@ def test_client_get_multi_w_deferred_non_empty(): client.get_multi([key], deferred=deferred) -def test_client_get_multi_miss_w_deferred(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_get_multi_miss_w_deferred(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore.key import Key - key = Key("Kind", 1234, project=PROJECT) + key = Key("Kind", 1234, project=PROJECT, database=database_id) key_pb = key.to_protobuf() # Set deferred entity on mock connection. creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) lookup_response = _make_lookup_response(deferred=[key_pb]) ds_api = _make_datastore_api(lookup_response=lookup_response) client._datastore_api_internal = ds_api @@ -507,22 +539,27 @@ def test_client_get_multi_miss_w_deferred(): entities = client.get_multi([key], deferred=deferred) assert entities == [] assert [def_key.to_protobuf() for def_key in deferred] == [key_pb] - read_options = datastore_pb2.ReadOptions() - ds_api.lookup.assert_called_once_with( - request={"project_id": PROJECT, "keys": [key_pb], "read_options": read_options} - ) + expected_request = { + "project_id": PROJECT, + "keys": [key_pb], + "read_options": read_options, + } + set_database_id_to_request(expected_request, database_id) + + ds_api.lookup.assert_called_once_with(request=expected_request) -def test_client_get_multi_w_deferred_from_backend_but_not_passed(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_get_multi_w_deferred_from_backend_but_not_passed(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key - key1 = Key("Kind", project=PROJECT) + key1 = Key("Kind", project=PROJECT, database=database_id) key1_pb = key1.to_protobuf() - key2 = Key("Kind", 2345, project=PROJECT) + key2 = Key("Kind", 2345, project=PROJECT, database=database_id) key2_pb = key2.to_protobuf() entity1_pb = entity_pb2.Entity() @@ -531,7 +568,7 @@ def test_client_get_multi_w_deferred_from_backend_but_not_passed(): entity2_pb._pb.key.CopyFrom(key2_pb._pb) creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) # Mock up two separate requests. Using an iterable as side_effect # allows multiple return values. lookup_response1 = _make_lookup_response(results=[entity1_pb], deferred=[key2_pb]) @@ -549,32 +586,39 @@ def test_client_get_multi_w_deferred_from_backend_but_not_passed(): assert isinstance(found[0], Entity) assert found[0].key.path == key1.path assert found[0].key.project == key1.project + assert found[0].key.database == key1.database assert isinstance(found[1], Entity) assert found[1].key.path == key2.path assert found[1].key.project == key2.project + assert found[1].key.database == key2.database assert ds_api.lookup.call_count == 2 read_options = datastore_pb2.ReadOptions() + expected_request_1 = { + "project_id": PROJECT, + "keys": [key2_pb], + "read_options": read_options, + } + set_database_id_to_request(expected_request_1, database_id) ds_api.lookup.assert_any_call( - request={ - "project_id": PROJECT, - "keys": [key2_pb], - "read_options": read_options, - }, + request=expected_request_1, ) + expected_request_2 = { + "project_id": PROJECT, + "keys": [key1_pb, key2_pb], + "read_options": read_options, + } + set_database_id_to_request(expected_request_2, database_id) ds_api.lookup.assert_any_call( - request={ - "project_id": PROJECT, - "keys": [key1_pb, key2_pb], - "read_options": read_options, - }, + request=expected_request_2, ) -def test_client_get_multi_hit_w_retry_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_get_multi_hit_w_retry_w_timeout(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore.key import Key @@ -585,7 +629,7 @@ def test_client_get_multi_hit_w_retry_w_timeout(): timeout = 100000 # Make a found entity pb to be returned from mock backend. - entity_pb = _make_entity_pb(PROJECT, kind, id_, "foo", "Foo") + entity_pb = _make_entity_pb(PROJECT, kind, id_, "foo", "Foo", database=database_id) # Make a connection to return the entity pb. creds = _make_credentials() @@ -610,6 +654,7 @@ def test_client_get_multi_hit_w_retry_w_timeout(): ds_api.lookup.assert_called_once_with( request={ "project_id": PROJECT, + "database_id": "", "keys": [key.to_protobuf()], "read_options": read_options, }, @@ -618,7 +663,8 @@ def test_client_get_multi_hit_w_retry_w_timeout(): ) -def test_client_get_multi_hit_w_transaction(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_get_multi_hit_w_transaction(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore.key import Key @@ -628,16 +674,16 @@ def test_client_get_multi_hit_w_transaction(): path = [{"kind": kind, "id": id_}] # Make a found entity pb to be returned from mock backend. - entity_pb = _make_entity_pb(PROJECT, kind, id_, "foo", "Foo") + entity_pb = _make_entity_pb(PROJECT, kind, id_, "foo", "Foo", database=database_id) # Make a connection to return the entity pb. creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) lookup_response = _make_lookup_response(results=[entity_pb]) ds_api = _make_datastore_api(lookup_response=lookup_response) client._datastore_api_internal = ds_api - key = Key(kind, id_, project=PROJECT) + key = Key(kind, id_, project=PROJECT, database=database_id) txn = client.transaction() txn._id = txn_id (result,) = client.get_multi([key], transaction=txn) @@ -651,16 +697,17 @@ def test_client_get_multi_hit_w_transaction(): assert result["foo"] == "Foo" read_options = datastore_pb2.ReadOptions(transaction=txn_id) - ds_api.lookup.assert_called_once_with( - request={ - "project_id": PROJECT, - "keys": [key.to_protobuf()], - "read_options": read_options, - } - ) + expected_request = { + "project_id": PROJECT, + "keys": [key.to_protobuf()], + "read_options": read_options, + } + set_database_id_to_request(expected_request, database_id) + ds_api.lookup.assert_called_once_with(request=expected_request) -def test_client_get_multi_hit_w_read_time(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_get_multi_hit_w_read_time(database_id): from datetime import datetime from google.cloud.datastore.key import Key @@ -674,16 +721,16 @@ def test_client_get_multi_hit_w_read_time(): path = [{"kind": kind, "id": id_}] # Make a found entity pb to be returned from mock backend. - entity_pb = _make_entity_pb(PROJECT, kind, id_, "foo", "Foo") + entity_pb = _make_entity_pb(PROJECT, kind, id_, "foo", "Foo", database=database_id) # Make a connection to return the entity pb. creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) lookup_response = _make_lookup_response(results=[entity_pb]) ds_api = _make_datastore_api(lookup_response=lookup_response) client._datastore_api_internal = ds_api - key = Key(kind, id_, project=PROJECT) + key = Key(kind, id_, project=PROJECT, database=database_id) (result,) = client.get_multi([key], read_time=read_time) new_key = result.key @@ -695,16 +742,17 @@ def test_client_get_multi_hit_w_read_time(): assert result["foo"] == "Foo" read_options = datastore_pb2.ReadOptions(read_time=read_time_pb) - ds_api.lookup.assert_called_once_with( - request={ - "project_id": PROJECT, - "keys": [key.to_protobuf()], - "read_options": read_options, - } - ) + expected_request = { + "project_id": PROJECT, + "keys": [key.to_protobuf()], + "read_options": read_options, + } + set_database_id_to_request(expected_request, database_id) + ds_api.lookup.assert_called_once_with(request=expected_request) -def test_client_get_multi_hit_multiple_keys_same_project(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_get_multi_hit_multiple_keys_same_project(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore.key import Key @@ -713,18 +761,18 @@ def test_client_get_multi_hit_multiple_keys_same_project(): id2 = 2345 # Make a found entity pb to be returned from mock backend. - entity_pb1 = _make_entity_pb(PROJECT, kind, id1) - entity_pb2 = _make_entity_pb(PROJECT, kind, id2) + entity_pb1 = _make_entity_pb(PROJECT, kind, id1, database=database_id) + entity_pb2 = _make_entity_pb(PROJECT, kind, id2, database=database_id) # Make a connection to return the entity pbs. creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) lookup_response = _make_lookup_response(results=[entity_pb1, entity_pb2]) ds_api = _make_datastore_api(lookup_response=lookup_response) client._datastore_api_internal = ds_api - key1 = Key(kind, id1, project=PROJECT) - key2 = Key(kind, id2, project=PROJECT) + key1 = Key(kind, id1, project=PROJECT, database=database_id) + key2 = Key(kind, id2, project=PROJECT, database=database_id) retrieved1, retrieved2 = client.get_multi([key1, key2]) # Check values match. @@ -734,48 +782,50 @@ def test_client_get_multi_hit_multiple_keys_same_project(): assert dict(retrieved2) == {} read_options = datastore_pb2.ReadOptions() - ds_api.lookup.assert_called_once_with( - request={ - "project_id": PROJECT, - "keys": [key1.to_protobuf(), key2.to_protobuf()], - "read_options": read_options, - } - ) + expected_request = { + "project_id": PROJECT, + "keys": [key1.to_protobuf(), key2.to_protobuf()], + "read_options": read_options, + } + set_database_id_to_request(expected_request, database_id) + ds_api.lookup.assert_called_once_with(request=expected_request) -def test_client_get_multi_hit_multiple_keys_different_project(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_get_multi_hit_multiple_keys_different_project(database_id): from google.cloud.datastore.key import Key PROJECT1 = "PROJECT" PROJECT2 = "PROJECT-ALT" - key1 = Key("KIND", 1234, project=PROJECT1) - key2 = Key("KIND", 1234, project=PROJECT2) + key1 = Key("KIND", 1234, project=PROJECT1, database=database_id) + key2 = Key("KIND", 1234, project=PROJECT2, database=database_id) creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) with pytest.raises(ValueError): client.get_multi([key1, key2]) -def test_client_get_multi_max_loops(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_get_multi_max_loops(database_id): from google.cloud.datastore.key import Key kind = "Kind" id_ = 1234 # Make a found entity pb to be returned from mock backend. - entity_pb = _make_entity_pb(PROJECT, kind, id_, "foo", "Foo") + entity_pb = _make_entity_pb(PROJECT, kind, id_, "foo", "Foo", database=database_id) # Make a connection to return the entity pb. creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) lookup_response = _make_lookup_response(results=[entity_pb]) ds_api = _make_datastore_api(lookup_response=lookup_response) client._datastore_api_internal = ds_api - key = Key(kind, id_, project=PROJECT) + key = Key(kind, id_, project=PROJECT, database=database_id) deferred = [] missing = [] @@ -791,10 +841,11 @@ def test_client_get_multi_max_loops(): ds_api.lookup.assert_not_called() -def test_client_put(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_put(database_id): creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) put_multi = client.put_multi = mock.Mock() entity = mock.Mock() @@ -803,10 +854,11 @@ def test_client_put(): put_multi.assert_called_once_with(entities=[entity], retry=None, timeout=None) -def test_client_put_w_retry_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_put_w_retry_w_timeout(database_id): creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) put_multi = client.put_multi = mock.Mock() entity = mock.Mock() retry = mock.Mock() @@ -817,32 +869,35 @@ def test_client_put_w_retry_w_timeout(): put_multi.assert_called_once_with(entities=[entity], retry=retry, timeout=timeout) -def test_client_put_multi_no_entities(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_put_multi_no_entities(database_id): creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) assert client.put_multi([]) is None -def test_client_put_multi_w_single_empty_entity(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_put_multi_w_single_empty_entity(database_id): # https://github.com/GoogleCloudPlatform/google-cloud-python/issues/649 from google.cloud.datastore.entity import Entity creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) with pytest.raises(ValueError): client.put_multi(Entity()) -def test_client_put_multi_no_batch_w_partial_key_w_retry_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_put_multi_no_batch_w_partial_key_w_retry_w_timeout(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 entity = _Entity(foo="bar") - key = entity.key = _Key(_Key.kind, None) + key = entity.key = _Key(_Key.kind, None, database=database_id) retry = mock.Mock() timeout = 100000 creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) key_pb = _make_key(234) ds_api = _make_datastore_api(key_pb) client._datastore_api_internal = ds_api @@ -850,13 +905,15 @@ def test_client_put_multi_no_batch_w_partial_key_w_retry_w_timeout(): result = client.put_multi([entity], retry=retry, timeout=timeout) assert result is None + expected_request = { + "project_id": PROJECT, + "mode": datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL, + "mutations": mock.ANY, + "transaction": None, + } + set_database_id_to_request(expected_request, database_id) ds_api.commit.assert_called_once_with( - request={ - "project_id": PROJECT, - "mode": datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL, - "mutations": mock.ANY, - "transaction": None, - }, + request=expected_request, retry=retry, timeout=timeout, ) @@ -872,11 +929,12 @@ def test_client_put_multi_no_batch_w_partial_key_w_retry_w_timeout(): assert value_pb.string_value == "bar" -def test_client_put_multi_existing_batch_w_completed_key(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_put_multi_existing_batch_w_completed_key(database_id): creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) entity = _Entity(foo="bar") - key = entity.key = _Key() + key = entity.key = _Key(database=database_id) with _NoCommitBatch(client) as CURR_BATCH: result = client.put_multi([entity]) @@ -916,9 +974,10 @@ def test_client_delete_w_retry_w_timeout(): delete_multi.assert_called_once_with(keys=[key], retry=retry, timeout=timeout) -def test_client_delete_multi_no_keys(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_delete_multi_no_keys(database_id): creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) client._datastore_api_internal = _make_datastore_api() result = client.delete_multi([]) @@ -926,28 +985,31 @@ def test_client_delete_multi_no_keys(): client._datastore_api_internal.commit.assert_not_called() -def test_client_delete_multi_no_batch_w_retry_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_delete_multi_no_batch_w_retry_w_timeout(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 - key = _Key() + key = _Key(database=database_id) retry = mock.Mock() timeout = 100000 creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) ds_api = _make_datastore_api() client._datastore_api_internal = ds_api result = client.delete_multi([key], retry=retry, timeout=timeout) assert result is None + expected_request = { + "project_id": PROJECT, + "mode": datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL, + "mutations": mock.ANY, + "transaction": None, + } + set_database_id_to_request(expected_request, database_id) ds_api.commit.assert_called_once_with( - request={ - "project_id": PROJECT, - "mode": datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL, - "mutations": mock.ANY, - "transaction": None, - }, + request=expected_request, retry=retry, timeout=timeout, ) @@ -957,12 +1019,13 @@ def test_client_delete_multi_no_batch_w_retry_w_timeout(): assert mutated_key == key.to_protobuf() -def test_client_delete_multi_w_existing_batch(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_delete_multi_w_existing_batch(database_id): creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) client._datastore_api_internal = _make_datastore_api() - key = _Key() + key = _Key(database=database_id) with _NoCommitBatch(client) as CURR_BATCH: result = client.delete_multi([key]) @@ -973,12 +1036,13 @@ def test_client_delete_multi_w_existing_batch(): client._datastore_api_internal.commit.assert_not_called() -def test_client_delete_multi_w_existing_transaction(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_delete_multi_w_existing_transaction(database_id): creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) client._datastore_api_internal = _make_datastore_api() - key = _Key() + key = _Key(database=database_id) with _NoCommitTransaction(client) as CURR_XACT: result = client.delete_multi([key]) @@ -989,14 +1053,15 @@ def test_client_delete_multi_w_existing_transaction(): client._datastore_api_internal.commit.assert_not_called() -def test_client_delete_multi_w_existing_transaction_entity(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_delete_multi_w_existing_transaction_entity(database_id): from google.cloud.datastore.entity import Entity creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) client._datastore_api_internal = _make_datastore_api() - key = _Key() + key = _Key(database=database_id) entity = Entity(key=key) with _NoCommitTransaction(client) as CURR_XACT: @@ -1008,22 +1073,24 @@ def test_client_delete_multi_w_existing_transaction_entity(): client._datastore_api_internal.commit.assert_not_called() -def test_client_allocate_ids_w_completed_key(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_allocate_ids_w_completed_key(database_id): creds = _make_credentials() client = _make_client(credentials=creds) - complete_key = _Key() + complete_key = _Key(database=database_id) with pytest.raises(ValueError): client.allocate_ids(complete_key, 2) -def test_client_allocate_ids_w_partial_key(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_allocate_ids_w_partial_key(database_id): num_ids = 2 - incomplete_key = _Key(_Key.kind, None) + incomplete_key = _Key(_Key.kind, None, database=database_id) creds = _make_credentials() - client = _make_client(credentials=creds, _use_grpc=False) + client = _make_client(credentials=creds, _use_grpc=False, database=database_id) allocated = mock.Mock(keys=[_KeyPB(i) for i in range(num_ids)], spec=["keys"]) alloc_ids = mock.Mock(return_value=allocated, spec=[]) ds_api = mock.Mock(allocate_ids=alloc_ids, spec=["allocate_ids"]) @@ -1035,20 +1102,21 @@ def test_client_allocate_ids_w_partial_key(): assert [key.id for key in result] == list(range(num_ids)) expected_keys = [incomplete_key.to_protobuf()] * num_ids - alloc_ids.assert_called_once_with( - request={"project_id": PROJECT, "keys": expected_keys} - ) + expected_request = {"project_id": PROJECT, "keys": expected_keys} + set_database_id_to_request(expected_request, database_id) + alloc_ids.assert_called_once_with(request=expected_request) -def test_client_allocate_ids_w_partial_key_w_retry_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_allocate_ids_w_partial_key_w_retry_w_timeout(database_id): num_ids = 2 - incomplete_key = _Key(_Key.kind, None) + incomplete_key = _Key(_Key.kind, None, database=database_id) retry = mock.Mock() timeout = 100000 creds = _make_credentials() - client = _make_client(credentials=creds, _use_grpc=False) + client = _make_client(credentials=creds, _use_grpc=False, database=database_id) allocated = mock.Mock(keys=[_KeyPB(i) for i in range(num_ids)], spec=["keys"]) alloc_ids = mock.Mock(return_value=allocated, spec=[]) ds_api = mock.Mock(allocate_ids=alloc_ids, spec=["allocate_ids"]) @@ -1060,17 +1128,20 @@ def test_client_allocate_ids_w_partial_key_w_retry_w_timeout(): assert [key.id for key in result] == list(range(num_ids)) expected_keys = [incomplete_key.to_protobuf()] * num_ids + expected_request = {"project_id": PROJECT, "keys": expected_keys} + set_database_id_to_request(expected_request, database_id) alloc_ids.assert_called_once_with( - request={"project_id": PROJECT, "keys": expected_keys}, + request=expected_request, retry=retry, timeout=timeout, ) -def test_client_reserve_ids_sequential_w_completed_key(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_reserve_ids_sequential_w_completed_key(database_id): num_ids = 2 creds = _make_credentials() - client = _make_client(credentials=creds, _use_grpc=False) + client = _make_client(credentials=creds, _use_grpc=False, database=database_id) complete_key = _Key() reserve_ids = mock.Mock() ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) @@ -1083,19 +1154,20 @@ def test_client_reserve_ids_sequential_w_completed_key(): _Key(_Key.kind, id) for id in range(complete_key.id, complete_key.id + num_ids) ) expected_keys = [key.to_protobuf() for key in reserved_keys] - reserve_ids.assert_called_once_with( - request={"project_id": PROJECT, "keys": expected_keys} - ) + expected_request = {"project_id": PROJECT, "keys": expected_keys} + set_database_id_to_request(expected_request, database_id) + reserve_ids.assert_called_once_with(request=expected_request) -def test_client_reserve_ids_sequential_w_completed_key_w_retry_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_reserve_ids_sequential_w_completed_key_w_retry_w_timeout(database_id): num_ids = 2 retry = mock.Mock() timeout = 100000 creds = _make_credentials() - client = _make_client(credentials=creds, _use_grpc=False) - complete_key = _Key() + client = _make_client(credentials=creds, _use_grpc=False, database=database_id) + complete_key = _Key(database=database_id) assert not complete_key.is_partial reserve_ids = mock.Mock() ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) @@ -1107,17 +1179,20 @@ def test_client_reserve_ids_sequential_w_completed_key_w_retry_w_timeout(): _Key(_Key.kind, id) for id in range(complete_key.id, complete_key.id + num_ids) ) expected_keys = [key.to_protobuf() for key in reserved_keys] + expected_request = {"project_id": PROJECT, "keys": expected_keys} + set_database_id_to_request(expected_request, database_id) reserve_ids.assert_called_once_with( - request={"project_id": PROJECT, "keys": expected_keys}, + request=expected_request, retry=retry, timeout=timeout, ) -def test_client_reserve_ids_sequential_w_completed_key_w_ancestor(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_reserve_ids_sequential_w_completed_key_w_ancestor(database_id): num_ids = 2 creds = _make_credentials() - client = _make_client(credentials=creds, _use_grpc=False) + client = _make_client(credentials=creds, _use_grpc=False, database=database_id) complete_key = _Key("PARENT", "SINGLETON", _Key.kind, 1234) reserve_ids = mock.Mock() ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) @@ -1127,38 +1202,41 @@ def test_client_reserve_ids_sequential_w_completed_key_w_ancestor(): client.reserve_ids_sequential(complete_key, num_ids) reserved_keys = ( - _Key("PARENT", "SINGLETON", _Key.kind, id) + _Key("PARENT", "SINGLETON", _Key.kind, id, database=database_id) for id in range(complete_key.id, complete_key.id + num_ids) ) expected_keys = [key.to_protobuf() for key in reserved_keys] - reserve_ids.assert_called_once_with( - request={"project_id": PROJECT, "keys": expected_keys} - ) + expected_request = {"project_id": PROJECT, "keys": expected_keys} + set_database_id_to_request(expected_request, database_id) + reserve_ids.assert_called_once_with(request=expected_request) -def test_client_reserve_ids_sequential_w_partial_key(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_reserve_ids_sequential_w_partial_key(database_id): num_ids = 2 - incomplete_key = _Key(_Key.kind, None) + incomplete_key = _Key(_Key.kind, None, database=database_id) creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) with pytest.raises(ValueError): client.reserve_ids_sequential(incomplete_key, num_ids) -def test_client_reserve_ids_sequential_w_wrong_num_ids(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_reserve_ids_sequential_w_wrong_num_ids(database_id): num_ids = "2" - complete_key = _Key() + complete_key = _Key(database=database_id) creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) with pytest.raises(ValueError): client.reserve_ids_sequential(complete_key, num_ids) -def test_client_reserve_ids_sequential_w_non_numeric_key_name(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_reserve_ids_sequential_w_non_numeric_key_name(database_id): num_ids = 2 - complete_key = _Key(_Key.kind, "batman") + complete_key = _Key(_Key.kind, "batman", database=database_id) creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) with pytest.raises(ValueError): client.reserve_ids_sequential(complete_key, num_ids) @@ -1168,13 +1246,14 @@ def _assert_reserve_ids_warning(warned): assert "Client.reserve_ids is deprecated." in str(warned[0].message) -def test_client_reserve_ids_w_partial_key(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_reserve_ids_w_partial_key(database_id): import warnings num_ids = 2 incomplete_key = _Key(_Key.kind, None) creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) with pytest.raises(ValueError): with warnings.catch_warnings(record=True) as warned: client.reserve_ids(incomplete_key, num_ids) @@ -1182,13 +1261,14 @@ def test_client_reserve_ids_w_partial_key(): _assert_reserve_ids_warning(warned) -def test_client_reserve_ids_w_wrong_num_ids(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_reserve_ids_w_wrong_num_ids(database_id): import warnings num_ids = "2" - complete_key = _Key() + complete_key = _Key(database=database_id) creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) with pytest.raises(ValueError): with warnings.catch_warnings(record=True) as warned: client.reserve_ids(complete_key, num_ids) @@ -1196,13 +1276,14 @@ def test_client_reserve_ids_w_wrong_num_ids(): _assert_reserve_ids_warning(warned) -def test_client_reserve_ids_w_non_numeric_key_name(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_reserve_ids_w_non_numeric_key_name(database_id): import warnings num_ids = 2 - complete_key = _Key(_Key.kind, "batman") + complete_key = _Key(_Key.kind, "batman", database=database_id) creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) with pytest.raises(ValueError): with warnings.catch_warnings(record=True) as warned: client.reserve_ids(complete_key, num_ids) @@ -1210,13 +1291,14 @@ def test_client_reserve_ids_w_non_numeric_key_name(): _assert_reserve_ids_warning(warned) -def test_client_reserve_ids_w_completed_key(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_reserve_ids_w_completed_key(database_id): import warnings num_ids = 2 creds = _make_credentials() - client = _make_client(credentials=creds, _use_grpc=False) - complete_key = _Key() + client = _make_client(credentials=creds, _use_grpc=False, database=database_id) + complete_key = _Key(database=database_id) reserve_ids = mock.Mock() ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) client._datastore_api_internal = ds_api @@ -1226,16 +1308,18 @@ def test_client_reserve_ids_w_completed_key(): client.reserve_ids(complete_key, num_ids) reserved_keys = ( - _Key(_Key.kind, id) for id in range(complete_key.id, complete_key.id + num_ids) + _Key(_Key.kind, id, database=database_id) + for id in range(complete_key.id, complete_key.id + num_ids) ) expected_keys = [key.to_protobuf() for key in reserved_keys] - reserve_ids.assert_called_once_with( - request={"project_id": PROJECT, "keys": expected_keys} - ) + expected_request = {"project_id": PROJECT, "keys": expected_keys} + set_database_id_to_request(expected_request, database_id) + reserve_ids.assert_called_once_with(request=expected_request) _assert_reserve_ids_warning(warned) -def test_client_reserve_ids_w_completed_key_w_retry_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_reserve_ids_w_completed_key_w_retry_w_timeout(database_id): import warnings num_ids = 2 @@ -1243,8 +1327,8 @@ def test_client_reserve_ids_w_completed_key_w_retry_w_timeout(): timeout = 100000 creds = _make_credentials() - client = _make_client(credentials=creds, _use_grpc=False) - complete_key = _Key() + client = _make_client(credentials=creds, _use_grpc=False, database=database_id) + complete_key = _Key(database=database_id) assert not complete_key.is_partial reserve_ids = mock.Mock() ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) @@ -1254,24 +1338,28 @@ def test_client_reserve_ids_w_completed_key_w_retry_w_timeout(): client.reserve_ids(complete_key, num_ids, retry=retry, timeout=timeout) reserved_keys = ( - _Key(_Key.kind, id) for id in range(complete_key.id, complete_key.id + num_ids) + _Key(_Key.kind, id, database=database_id) + for id in range(complete_key.id, complete_key.id + num_ids) ) expected_keys = [key.to_protobuf() for key in reserved_keys] + expected_request = {"project_id": PROJECT, "keys": expected_keys} + set_database_id_to_request(expected_request, database_id) reserve_ids.assert_called_once_with( - request={"project_id": PROJECT, "keys": expected_keys}, + request=expected_request, retry=retry, timeout=timeout, ) _assert_reserve_ids_warning(warned) -def test_client_reserve_ids_w_completed_key_w_ancestor(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_reserve_ids_w_completed_key_w_ancestor(database_id): import warnings num_ids = 2 creds = _make_credentials() - client = _make_client(credentials=creds, _use_grpc=False) - complete_key = _Key("PARENT", "SINGLETON", _Key.kind, 1234) + client = _make_client(credentials=creds, _use_grpc=False, database=database_id) + complete_key = _Key("PARENT", "SINGLETON", _Key.kind, 1234, database=database_id) reserve_ids = mock.Mock() ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) client._datastore_api_internal = ds_api @@ -1281,80 +1369,116 @@ def test_client_reserve_ids_w_completed_key_w_ancestor(): client.reserve_ids(complete_key, num_ids) reserved_keys = ( - _Key("PARENT", "SINGLETON", _Key.kind, id) + _Key("PARENT", "SINGLETON", _Key.kind, id, database=database_id) for id in range(complete_key.id, complete_key.id + num_ids) ) expected_keys = [key.to_protobuf() for key in reserved_keys] - reserve_ids.assert_called_once_with( - request={"project_id": PROJECT, "keys": expected_keys} - ) + expected_request = {"project_id": PROJECT, "keys": expected_keys} + set_database_id_to_request(expected_request, database_id) + + reserve_ids.assert_called_once_with(request=expected_request) _assert_reserve_ids_warning(warned) -def test_client_key_w_project(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_key_w_project(database_id): KIND = "KIND" ID = 1234 creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) with pytest.raises(TypeError): - client.key(KIND, ID, project=PROJECT) + client.key(KIND, ID, project=PROJECT, database=database_id) -def test_client_key_wo_project(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_key_wo_project(database_id): kind = "KIND" id_ = 1234 + creds = _make_credentials() + client = _make_client(credentials=creds, database=database_id) + + patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"]) + with patch as mock_klass: + key = client.key(kind, id_) + assert key is mock_klass.return_value + mock_klass.assert_called_once_with( + kind, id_, project=PROJECT, namespace=None, database=database_id + ) + + +def test_client_key_w_database(): + KIND = "KIND" + ID = 1234 + creds = _make_credentials() client = _make_client(credentials=creds) + with pytest.raises(TypeError): + client.key(KIND, ID, database="somedb") + + +def test_client_key_wo_database(): + kind = "KIND" + id_ = 1234 + database = "DATABASE" + + creds = _make_credentials() + client = _make_client(database=database, credentials=creds) + patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"]) with patch as mock_klass: key = client.key(kind, id_) assert key is mock_klass.return_value - mock_klass.assert_called_once_with(kind, id_, project=PROJECT, namespace=None) + mock_klass.assert_called_once_with( + kind, id_, project=PROJECT, namespace=None, database=database + ) -def test_client_key_w_namespace(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_key_w_namespace(database_id): kind = "KIND" id_ = 1234 namespace = object() creds = _make_credentials() - client = _make_client(namespace=namespace, credentials=creds) + client = _make_client(namespace=namespace, credentials=creds, database=database_id) patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"]) with patch as mock_klass: key = client.key(kind, id_) assert key is mock_klass.return_value mock_klass.assert_called_once_with( - kind, id_, project=PROJECT, namespace=namespace + kind, id_, project=PROJECT, namespace=namespace, database=database_id ) -def test_client_key_w_namespace_collision(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_key_w_namespace_collision(database_id): kind = "KIND" id_ = 1234 namespace1 = object() namespace2 = object() creds = _make_credentials() - client = _make_client(namespace=namespace1, credentials=creds) + client = _make_client(namespace=namespace1, credentials=creds, database=database_id) patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"]) with patch as mock_klass: key = client.key(kind, id_, namespace=namespace2) assert key is mock_klass.return_value mock_klass.assert_called_once_with( - kind, id_, project=PROJECT, namespace=namespace2 + kind, id_, project=PROJECT, namespace=namespace2, database=database_id ) -def test_client_entity_w_defaults(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_entity_w_defaults(database_id): creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) patch = mock.patch("google.cloud.datastore.client.Entity", spec=["__call__"]) with patch as mock_klass: @@ -1424,19 +1548,21 @@ def test_client_query_w_other_client(): client.query(kind=KIND, client=other) -def test_client_query_w_project(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_query_w_project(database_id): KIND = "KIND" creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) with pytest.raises(TypeError): client.query(kind=KIND, project=PROJECT) -def test_client_query_w_defaults(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_query_w_defaults(database_id): creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"]) with patch as mock_klass: @@ -1445,7 +1571,8 @@ def test_client_query_w_defaults(): mock_klass.assert_called_once_with(client, project=PROJECT, namespace=None) -def test_client_query_w_explicit(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_query_w_explicit(database_id): kind = "KIND" namespace = "NAMESPACE" ancestor = object() @@ -1455,7 +1582,7 @@ def test_client_query_w_explicit(): distinct_on = ["DISTINCT_ON"] creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"]) with patch as mock_klass: @@ -1482,12 +1609,13 @@ def test_client_query_w_explicit(): ) -def test_client_query_w_namespace(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_query_w_namespace(database_id): kind = "KIND" namespace = object() creds = _make_credentials() - client = _make_client(namespace=namespace, credentials=creds) + client = _make_client(namespace=namespace, credentials=creds, database=database_id) patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"]) with patch as mock_klass: @@ -1498,13 +1626,14 @@ def test_client_query_w_namespace(): ) -def test_client_query_w_namespace_collision(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_query_w_namespace_collision(database_id): kind = "KIND" namespace1 = object() namespace2 = object() creds = _make_credentials() - client = _make_client(namespace=namespace1, credentials=creds) + client = _make_client(namespace=namespace1, credentials=creds, database=database_id) patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"]) with patch as mock_klass: @@ -1515,9 +1644,10 @@ def test_client_query_w_namespace_collision(): ) -def test_client_aggregation_query_w_defaults(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_aggregation_query_w_defaults(database_id): creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) query = client.query() patch = mock.patch( "google.cloud.datastore.client.AggregationQuery", spec=["__call__"] @@ -1528,42 +1658,46 @@ def test_client_aggregation_query_w_defaults(): mock_klass.assert_called_once_with(client, query) -def test_client_aggregation_query_w_namespace(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_aggregation_query_w_namespace(database_id): namespace = object() creds = _make_credentials() - client = _make_client(namespace=namespace, credentials=creds) + client = _make_client(namespace=namespace, credentials=creds, database=database_id) query = client.query() aggregation_query = client.aggregation_query(query=query) assert aggregation_query.namespace == namespace -def test_client_aggregation_query_w_namespace_collision(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_aggregation_query_w_namespace_collision(database_id): namespace1 = object() namespace2 = object() creds = _make_credentials() - client = _make_client(namespace=namespace1, credentials=creds) + client = _make_client(namespace=namespace1, credentials=creds, database=database_id) query = client.query(namespace=namespace2) aggregation_query = client.aggregation_query(query=query) assert aggregation_query.namespace == namespace2 -def test_client_reserve_ids_multi_w_partial_key(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_reserve_ids_multi_w_partial_key(database_id): incomplete_key = _Key(_Key.kind, None) creds = _make_credentials() - client = _make_client(credentials=creds) + client = _make_client(credentials=creds, database=database_id) with pytest.raises(ValueError): client.reserve_ids_multi([incomplete_key]) -def test_client_reserve_ids_multi(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_reserve_ids_multi(database_id): creds = _make_credentials() - client = _make_client(credentials=creds, _use_grpc=False) - key1 = _Key(_Key.kind, "one") - key2 = _Key(_Key.kind, "two") + client = _make_client(credentials=creds, _use_grpc=False, database=database_id) + key1 = _Key(_Key.kind, "one", database=database_id) + key2 = _Key(_Key.kind, "two", database=database_id) reserve_ids = mock.Mock() ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"]) client._datastore_api_internal = ds_api @@ -1571,9 +1705,9 @@ def test_client_reserve_ids_multi(): client.reserve_ids_multi([key1, key2]) expected_keys = [key1.to_protobuf(), key2.to_protobuf()] - reserve_ids.assert_called_once_with( - request={"project_id": PROJECT, "keys": expected_keys} - ) + expected_request = {"project_id": PROJECT, "keys": expected_keys} + set_database_id_to_request(expected_request, database_id) + reserve_ids.assert_called_once_with(request=expected_request) class _NoCommitBatch(object): @@ -1621,6 +1755,7 @@ class _Key(object): id = 1234 name = None _project = project = PROJECT + _database = database = None _namespace = None _key = "KEY" @@ -1745,12 +1880,13 @@ def _make_credentials(): return mock.Mock(spec=google.auth.credentials.Credentials) -def _make_entity_pb(project, kind, integer_id, name=None, str_val=None): +def _make_entity_pb(project, kind, integer_id, name=None, str_val=None, database=None): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() entity_pb.key.partition_id.project_id = project + entity_pb.key.partition_id.database_id = database path_element = entity_pb._pb.key.path.add() path_element.kind = kind path_element.id = integer_id diff --git a/packages/google-cloud-datastore/tests/unit/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py index cf626ee36822..467a2df18d78 100644 --- a/packages/google-cloud-datastore/tests/unit/test_helpers.py +++ b/packages/google-cloud-datastore/tests/unit/test_helpers.py @@ -435,12 +435,14 @@ def test_enity_to_protobf_w_dict_to_entity_recursive(): assert entity_pb == expected_pb -def _make_key_pb(project=None, namespace=None, path=()): +def _make_key_pb(project=None, namespace=None, path=(), database=None): from google.cloud.datastore_v1.types import entity as entity_pb2 pb = entity_pb2.Key() if project is not None: pb.partition_id.project_id = project + if database is not None: + pb.partition_id.database_id = database if namespace is not None: pb.partition_id.namespace_id = namespace for elem in path: @@ -453,28 +455,38 @@ def _make_key_pb(project=None, namespace=None, path=()): return pb -def test_key_from_protobuf_wo_namespace_in_pb(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key_from_protobuf_wo_database_or_namespace_in_pb(database_id): from google.cloud.datastore.helpers import key_from_protobuf _PROJECT = "PROJECT" - pb = _make_key_pb(path=[{"kind": "KIND"}], project=_PROJECT) + pb = _make_key_pb(path=[{"kind": "KIND"}], project=_PROJECT, database=database_id) key = key_from_protobuf(pb) assert key.project == _PROJECT + assert key.database == database_id assert key.namespace is None -def test_key_from_protobuf_w_namespace_in_pb(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key_from_protobuf_w_namespace_in_pb(database_id): from google.cloud.datastore.helpers import key_from_protobuf _PROJECT = "PROJECT" _NAMESPACE = "NAMESPACE" - pb = _make_key_pb(path=[{"kind": "KIND"}], namespace=_NAMESPACE, project=_PROJECT) + pb = _make_key_pb( + path=[{"kind": "KIND"}], + namespace=_NAMESPACE, + project=_PROJECT, + database=database_id, + ) key = key_from_protobuf(pb) assert key.project == _PROJECT + assert key.database == database_id assert key.namespace == _NAMESPACE -def test_key_from_protobuf_w_nested_path_in_pb(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key_from_protobuf_w_nested_path_in_pb(database_id): from google.cloud.datastore.helpers import key_from_protobuf _PATH = [ @@ -482,9 +494,10 @@ def test_key_from_protobuf_w_nested_path_in_pb(): {"kind": "CHILD", "id": 1234}, {"kind": "GRANDCHILD", "id": 5678}, ] - pb = _make_key_pb(path=_PATH, project="PROJECT") + pb = _make_key_pb(path=_PATH, project="PROJECT", database=database_id) key = key_from_protobuf(pb) assert key.path == _PATH + assert key.database == database_id def test_w_nothing_in_pb(): diff --git a/packages/google-cloud-datastore/tests/unit/test_key.py b/packages/google-cloud-datastore/tests/unit/test_key.py index 575601f0b4f3..517013d5d782 100644 --- a/packages/google-cloud-datastore/tests/unit/test_key.py +++ b/packages/google-cloud-datastore/tests/unit/test_key.py @@ -16,7 +16,9 @@ _DEFAULT_PROJECT = "PROJECT" +_DEFAULT_DATABASE = "" PROJECT = "my-prahjekt" +DATABASE = "my-database" # NOTE: This comes directly from a running (in the dev appserver) # App Engine app. Created via: # @@ -64,6 +66,7 @@ def test_key_ctor_parent(): _PARENT_KIND = "KIND1" _PARENT_ID = 1234 _PARENT_PROJECT = "PROJECT-ALT" + _PARENT_DATABASE = "DATABASE-ALT" _PARENT_NAMESPACE = "NAMESPACE" _CHILD_KIND = "KIND2" _CHILD_ID = 2345 @@ -75,43 +78,73 @@ def test_key_ctor_parent(): _PARENT_KIND, _PARENT_ID, project=_PARENT_PROJECT, + database=_PARENT_DATABASE, namespace=_PARENT_NAMESPACE, ) key = _make_key(_CHILD_KIND, _CHILD_ID, parent=parent_key) assert key.project == parent_key.project + assert key.database == parent_key.database assert key.namespace == parent_key.namespace assert key.kind == _CHILD_KIND assert key.path == _PATH assert key.parent is parent_key -def test_key_ctor_partial_parent(): - parent_key = _make_key("KIND", project=_DEFAULT_PROJECT) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key_ctor_partial_parent(database_id): + parent_key = _make_key("KIND", project=_DEFAULT_PROJECT, database=database_id) with pytest.raises(ValueError): - _make_key("KIND2", 1234, parent=parent_key) + _make_key("KIND2", 1234, parent=parent_key, database=database_id) -def test_key_ctor_parent_bad_type(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key_ctor_parent_bad_type(database_id): with pytest.raises(AttributeError): - _make_key("KIND2", 1234, parent=("KIND1", 1234), project=_DEFAULT_PROJECT) + _make_key( + "KIND2", + 1234, + parent=("KIND1", 1234), + project=_DEFAULT_PROJECT, + database=database_id, + ) -def test_key_ctor_parent_bad_namespace(): - parent_key = _make_key("KIND", 1234, namespace="FOO", project=_DEFAULT_PROJECT) - with pytest.raises(ValueError): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key_ctor_parent_bad_namespace(database_id): + parent_key = _make_key( + "KIND", 1234, namespace="FOO", project=_DEFAULT_PROJECT, database=database_id + ) + with pytest.raises(ValueError) as exc: _make_key( "KIND2", 1234, namespace="BAR", parent=parent_key, PROJECT=_DEFAULT_PROJECT, + database=database_id, ) + assert "Child namespace must agree with parent's." in str(exc.value) -def test_key_ctor_parent_bad_project(): - parent_key = _make_key("KIND", 1234, project="FOO") - with pytest.raises(ValueError): - _make_key("KIND2", 1234, parent=parent_key, project="BAR") +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key_ctor_parent_bad_project(database_id): + parent_key = _make_key("KIND", 1234, project="FOO", database=database_id) + with pytest.raises(ValueError) as exc: + _make_key("KIND2", 1234, parent=parent_key, project="BAR", database=database_id) + assert "Child project must agree with parent's." in str(exc.value) + + +def test_key_ctor_parent_bad_database(): + parent_key = _make_key("KIND", 1234, project=_DEFAULT_PROJECT, database="db1") + with pytest.raises(ValueError) as exc: + _make_key( + "KIND2", + 1234, + parent=parent_key, + PROJECT=_DEFAULT_PROJECT, + database="db2", + ) + assert "Child database must agree with parent's" in str(exc.value) def test_key_ctor_parent_empty_path(): @@ -122,12 +155,33 @@ def test_key_ctor_parent_empty_path(): def test_key_ctor_explicit(): _PROJECT = "PROJECT-ALT" + _DATABASE = "DATABASE-ALT" _NAMESPACE = "NAMESPACE" _KIND = "KIND" _ID = 1234 _PATH = [{"kind": _KIND, "id": _ID}] - key = _make_key(_KIND, _ID, namespace=_NAMESPACE, project=_PROJECT) + key = _make_key( + _KIND, _ID, namespace=_NAMESPACE, database=_DATABASE, project=_PROJECT + ) + assert key.project == _PROJECT + assert key.database == _DATABASE + assert key.namespace == _NAMESPACE + assert key.kind == _KIND + assert key.path == _PATH + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key_ctor_explicit_w_unspecified_database(database_id): + _PROJECT = "PROJECT-ALT" + _NAMESPACE = "NAMESPACE" + _KIND = "KIND" + _ID = 1234 + _PATH = [{"kind": _KIND, "id": _ID}] + key = _make_key( + _KIND, _ID, namespace=_NAMESPACE, project=_PROJECT, database=database_id + ) assert key.project == _PROJECT + assert key.database == database_id assert key.namespace == _NAMESPACE assert key.kind == _KIND assert key.path == _PATH @@ -151,21 +205,26 @@ def test_key_ctor_bad_id_or_name(): def test_key__clone(): _PROJECT = "PROJECT-ALT" + _DATABASE = "DATABASE-ALT" _NAMESPACE = "NAMESPACE" _KIND = "KIND" _ID = 1234 _PATH = [{"kind": _KIND, "id": _ID}] - key = _make_key(_KIND, _ID, namespace=_NAMESPACE, project=_PROJECT) + key = _make_key( + _KIND, _ID, namespace=_NAMESPACE, database=_DATABASE, project=_PROJECT + ) clone = key._clone() assert clone.project == _PROJECT + assert clone.database == _DATABASE assert clone.namespace == _NAMESPACE assert clone.kind == _KIND assert clone.path == _PATH -def test_key__clone_with_parent(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key__clone_with_parent(database_id): _PROJECT = "PROJECT-ALT" _NAMESPACE = "NAMESPACE" _KIND1 = "PARENT" @@ -174,174 +233,246 @@ def test_key__clone_with_parent(): _ID2 = 2345 _PATH = [{"kind": _KIND1, "id": _ID1}, {"kind": _KIND2, "id": _ID2}] - parent = _make_key(_KIND1, _ID1, namespace=_NAMESPACE, project=_PROJECT) - key = _make_key(_KIND2, _ID2, parent=parent) + parent = _make_key( + _KIND1, _ID1, namespace=_NAMESPACE, database=database_id, project=_PROJECT + ) + key = _make_key(_KIND2, _ID2, parent=parent, database=database_id) assert key.parent is parent clone = key._clone() assert clone.parent is key.parent assert clone.project == _PROJECT + assert clone.database == database_id assert clone.namespace == _NAMESPACE assert clone.path == _PATH -def test_key___eq_____ne___w_non_key(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key___eq_____ne___w_non_key(database_id): _PROJECT = "PROJECT" _KIND = "KIND" _NAME = "one" - key = _make_key(_KIND, _NAME, project=_PROJECT) + key = _make_key(_KIND, _NAME, project=_PROJECT, database=database_id) assert not key == object() assert key != object() -def test_key___eq_____ne___two_incomplete_keys_same_kind(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key___eq_____ne___two_incomplete_keys_same_kind(database_id): _PROJECT = "PROJECT" _KIND = "KIND" - key1 = _make_key(_KIND, project=_PROJECT) - key2 = _make_key(_KIND, project=_PROJECT) + key1 = _make_key(_KIND, project=_PROJECT, database=database_id) + key2 = _make_key(_KIND, project=_PROJECT, database=database_id) assert not key1 == key2 assert key1 != key2 -def test_key___eq_____ne___incomplete_key_w_complete_key_same_kind(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key___eq_____ne___incomplete_key_w_complete_key_same_kind(database_id): _PROJECT = "PROJECT" _KIND = "KIND" _ID = 1234 - key1 = _make_key(_KIND, project=_PROJECT) - key2 = _make_key(_KIND, _ID, project=_PROJECT) + key1 = _make_key(_KIND, project=_PROJECT, database=database_id) + key2 = _make_key(_KIND, _ID, project=_PROJECT, database=database_id) assert not key1 == key2 assert key1 != key2 -def test_key___eq_____ne___complete_key_w_incomplete_key_same_kind(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key___eq_____ne___complete_key_w_incomplete_key_same_kind(database_id): _PROJECT = "PROJECT" _KIND = "KIND" _ID = 1234 - key1 = _make_key(_KIND, _ID, project=_PROJECT) - key2 = _make_key(_KIND, project=_PROJECT) + key1 = _make_key(_KIND, _ID, project=_PROJECT, database=database_id) + key2 = _make_key(_KIND, project=_PROJECT, database=database_id) assert not key1 == key2 assert key1 != key2 -def test_key___eq_____ne___same_kind_different_ids(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key___eq_____ne___same_kind_different_ids(database_id): _PROJECT = "PROJECT" _KIND = "KIND" _ID1 = 1234 _ID2 = 2345 - key1 = _make_key(_KIND, _ID1, project=_PROJECT) - key2 = _make_key(_KIND, _ID2, project=_PROJECT) + key1 = _make_key(_KIND, _ID1, project=_PROJECT, database=database_id) + key2 = _make_key(_KIND, _ID2, project=_PROJECT, database=database_id) assert not key1 == key2 assert key1 != key2 -def test_key___eq_____ne___same_kind_and_id(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key___eq_____ne___same_kind_and_id(database_id): _PROJECT = "PROJECT" _KIND = "KIND" _ID = 1234 - key1 = _make_key(_KIND, _ID, project=_PROJECT) - key2 = _make_key(_KIND, _ID, project=_PROJECT) + key1 = _make_key(_KIND, _ID, project=_PROJECT, database=database_id) + key2 = _make_key(_KIND, _ID, project=_PROJECT, database=database_id) assert key1 == key2 assert not key1 != key2 -def test_key___eq_____ne___same_kind_and_id_different_project(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key___eq_____ne___same_kind_and_id_different_project(database_id): _PROJECT1 = "PROJECT1" _PROJECT2 = "PROJECT2" _KIND = "KIND" _ID = 1234 - key1 = _make_key(_KIND, _ID, project=_PROJECT1) - key2 = _make_key(_KIND, _ID, project=_PROJECT2) + key1 = _make_key(_KIND, _ID, project=_PROJECT1, database=database_id) + key2 = _make_key(_KIND, _ID, project=_PROJECT2, database=database_id) assert not key1 == key2 assert key1 != key2 -def test_key___eq_____ne___same_kind_and_id_different_namespace(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key___eq_____ne___same_kind_and_id_different_database(database_id): + _PROJECT = "PROJECT" + _DATABASE1 = "DATABASE1" + _DATABASE2 = "DATABASE2" + _KIND = "KIND" + _ID = 1234 + key1 = _make_key(_KIND, _ID, project=_PROJECT, database=_DATABASE1) + key2 = _make_key(_KIND, _ID, project=_PROJECT, database=_DATABASE2) + key_with_explicit_default = _make_key( + _KIND, _ID, project=_PROJECT, database=database_id + ) + key_with_implicit_default = _make_key( + _KIND, _ID, project=_PROJECT, database=database_id + ) + assert not key1 == key2 + assert key1 != key2 + assert not key1 == key_with_explicit_default + assert key1 != key_with_explicit_default + assert not key1 == key_with_implicit_default + assert key1 != key_with_implicit_default + assert key_with_explicit_default == key_with_implicit_default + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key___eq_____ne___same_kind_and_id_different_namespace(database_id): _PROJECT = "PROJECT" _NAMESPACE1 = "NAMESPACE1" _NAMESPACE2 = "NAMESPACE2" _KIND = "KIND" _ID = 1234 - key1 = _make_key(_KIND, _ID, project=_PROJECT, namespace=_NAMESPACE1) - key2 = _make_key(_KIND, _ID, project=_PROJECT, namespace=_NAMESPACE2) + key1 = _make_key( + _KIND, _ID, project=_PROJECT, namespace=_NAMESPACE1, database=database_id + ) + key2 = _make_key( + _KIND, _ID, project=_PROJECT, namespace=_NAMESPACE2, database=database_id + ) assert not key1 == key2 assert key1 != key2 -def test_key___eq_____ne___same_kind_different_names(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key___eq_____ne___same_kind_different_names(database_id): _PROJECT = "PROJECT" _KIND = "KIND" _NAME1 = "one" _NAME2 = "two" - key1 = _make_key(_KIND, _NAME1, project=_PROJECT) - key2 = _make_key(_KIND, _NAME2, project=_PROJECT) + key1 = _make_key(_KIND, _NAME1, project=_PROJECT, database=database_id) + key2 = _make_key(_KIND, _NAME2, project=_PROJECT, database=database_id) assert not key1 == key2 assert key1 != key2 -def test_key___eq_____ne___same_kind_and_name(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key___eq_____ne___same_kind_and_name(database_id): _PROJECT = "PROJECT" _KIND = "KIND" _NAME = "one" - key1 = _make_key(_KIND, _NAME, project=_PROJECT) - key2 = _make_key(_KIND, _NAME, project=_PROJECT) + key1 = _make_key(_KIND, _NAME, project=_PROJECT, database=database_id) + key2 = _make_key(_KIND, _NAME, project=_PROJECT, database=database_id) assert key1 == key2 assert not key1 != key2 -def test_key___eq_____ne___same_kind_and_name_different_project(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key___eq_____ne___same_kind_and_name_different_project(database_id): _PROJECT1 = "PROJECT1" _PROJECT2 = "PROJECT2" _KIND = "KIND" _NAME = "one" - key1 = _make_key(_KIND, _NAME, project=_PROJECT1) - key2 = _make_key(_KIND, _NAME, project=_PROJECT2) + key1 = _make_key(_KIND, _NAME, project=_PROJECT1, database=database_id) + key2 = _make_key(_KIND, _NAME, project=_PROJECT2, database=database_id) assert not key1 == key2 assert key1 != key2 -def test_key___eq_____ne___same_kind_and_name_different_namespace(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key___eq_____ne___same_kind_and_name_different_namespace(database_id): _PROJECT = "PROJECT" _NAMESPACE1 = "NAMESPACE1" _NAMESPACE2 = "NAMESPACE2" _KIND = "KIND" _NAME = "one" - key1 = _make_key(_KIND, _NAME, project=_PROJECT, namespace=_NAMESPACE1) - key2 = _make_key(_KIND, _NAME, project=_PROJECT, namespace=_NAMESPACE2) + key1 = _make_key( + _KIND, _NAME, project=_PROJECT, namespace=_NAMESPACE1, database=database_id + ) + key2 = _make_key( + _KIND, _NAME, project=_PROJECT, namespace=_NAMESPACE2, database=database_id + ) assert not key1 == key2 assert key1 != key2 -def test_key___hash___incomplete(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key___hash___incomplete(database_id): _PROJECT = "PROJECT" _KIND = "KIND" - key = _make_key(_KIND, project=_PROJECT) - assert hash(key) != hash(_KIND) + hash(_PROJECT) + hash(None) + key = _make_key(_KIND, project=_PROJECT, database_id=database_id) + assert hash(key) != hash(_KIND) + hash(_PROJECT) + hash(None) + hash(None) + hash( + database_id + ) -def test_key___hash___completed_w_id(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key___hash___completed_w_id(database_id): _PROJECT = "PROJECT" _KIND = "KIND" _ID = 1234 - key = _make_key(_KIND, _ID, project=_PROJECT) - assert hash(key) != hash(_KIND) + hash(_ID) + hash(_PROJECT) + hash(None) + key = _make_key(_KIND, _ID, project=_PROJECT, database=database_id) + assert hash(key) != hash(_KIND) + hash(_ID) + hash(_PROJECT) + hash(None) + hash( + None + ) + hash(database_id) -def test_key___hash___completed_w_name(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key___hash___completed_w_name(database_id): _PROJECT = "PROJECT" _KIND = "KIND" _NAME = "NAME" - key = _make_key(_KIND, _NAME, project=_PROJECT) - assert hash(key) != hash(_KIND) + hash(_NAME) + hash(_PROJECT) + hash(None) + key = _make_key(_KIND, _NAME, project=_PROJECT, database=database_id) + assert hash(key) != hash(_KIND) + hash(_NAME) + hash(_PROJECT) + hash(None) + hash( + None + ) + hash(database_id) -def test_key_completed_key_on_partial_w_id(): - key = _make_key("KIND", project=_DEFAULT_PROJECT) +def test_key___hash___completed_w_database_and_namespace(): + _PROJECT = "PROJECT" + _DATABASE = "DATABASE" + _NAMESPACE = "NAMESPACE" + _KIND = "KIND" + _NAME = "NAME" + key = _make_key( + _KIND, _NAME, project=_PROJECT, database=_DATABASE, namespace=_NAMESPACE + ) + assert hash(key) != hash(_KIND) + hash(_NAME) + hash(_PROJECT) + hash(None) + hash( + None + ) + hash(None) + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_key_completed_key_on_partial_w_id(database_id): + key = _make_key("KIND", project=_DEFAULT_PROJECT, database=database_id) _ID = 1234 new_key = key.completed_key(_ID) assert key is not new_key assert new_key.id == _ID assert new_key.name is None + assert new_key.database == database_id def test_key_completed_key_on_partial_w_name(): @@ -376,6 +507,7 @@ def test_key_to_protobuf_defaults(): # Check partition ID. assert pb.partition_id.project_id == _DEFAULT_PROJECT # Unset values are False-y. + assert pb.partition_id.database_id == _DEFAULT_DATABASE assert pb.partition_id.namespace_id == "" # Check the element PB matches the partial key and kind. @@ -394,6 +526,13 @@ def test_key_to_protobuf_w_explicit_project(): assert pb.partition_id.project_id == _PROJECT +def test_key_to_protobuf_w_explicit_database(): + _DATABASE = "DATABASE-ALT" + key = _make_key("KIND", project=_DEFAULT_PROJECT, database=_DATABASE) + pb = key.to_protobuf() + assert pb.partition_id.database_id == _DATABASE + + def test_key_to_protobuf_w_explicit_namespace(): _NAMESPACE = "NAMESPACE" key = _make_key("KIND", namespace=_NAMESPACE, project=_DEFAULT_PROJECT) @@ -450,12 +589,26 @@ def test_key_to_legacy_urlsafe_with_location_prefix(): assert urlsafe == _URLSAFE_EXAMPLE3 +def test_key_to_legacy_urlsafe_w_nondefault_database(): + _KIND = "KIND" + _ID = 1234 + _PROJECT = "PROJECT-ALT" + _DATABASE = "DATABASE-ALT" + key = _make_key(_KIND, _ID, project=_PROJECT, database=_DATABASE) + + with pytest.raises( + ValueError, match="to_legacy_urlsafe only supports the default database" + ): + key.to_legacy_urlsafe() + + def test_key_from_legacy_urlsafe(): from google.cloud.datastore.key import Key key = Key.from_legacy_urlsafe(_URLSAFE_EXAMPLE1) assert "s~" + key.project == _URLSAFE_APP1 + assert key.database is None assert key.namespace == _URLSAFE_NAMESPACE1 assert key.flat_path == _URLSAFE_FLAT_PATH1 # Also make sure we didn't accidentally set the parent. diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index f94a98984f89..25b3febb635c 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -25,13 +25,17 @@ BaseCompositeFilter, ) +from google.cloud.datastore.helpers import set_database_id_to_request + _PROJECT = "PROJECT" -def test_query_ctor_defaults(): - client = _make_client() +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_ctor_defaults(database_id): + client = _make_client(database=database_id) query = _make_query(client) assert query._client is client + assert query._client.database == client.database assert query.project == client.project assert query.kind is None assert query.namespace == client.namespace @@ -51,14 +55,15 @@ def test_query_ctor_defaults(): [Or([PropertyFilter("foo", "=", "Qux"), PropertyFilter("bar", "<", 17)])], ], ) -def test_query_ctor_explicit(filters): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_ctor_explicit(filters, database_id): from google.cloud.datastore.key import Key _PROJECT = "OTHER_PROJECT" _KIND = "KIND" _NAMESPACE = "OTHER_NAMESPACE" - client = _make_client() - ancestor = Key("ANCESTOR", 123, project=_PROJECT) + client = _make_client(database=database_id) + ancestor = Key("ANCESTOR", 123, project=_PROJECT, database=database_id) FILTERS = filters PROJECTION = ["foo", "bar", "baz"] ORDER = ["foo", "bar"] @@ -76,6 +81,7 @@ def test_query_ctor_explicit(filters): distinct_on=DISTINCT_ON, ) assert query._client is client + assert query._client.database == database_id assert query.project == _PROJECT assert query.kind == _KIND assert query.namespace == _NAMESPACE @@ -86,68 +92,91 @@ def test_query_ctor_explicit(filters): assert query.distinct_on == DISTINCT_ON -def test_query_ctor_bad_projection(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_ctor_bad_projection(database_id): BAD_PROJECTION = object() with pytest.raises(TypeError): - _make_query(_make_client(), projection=BAD_PROJECTION) + _make_query(_make_client(database=database_id), projection=BAD_PROJECTION) -def test_query_ctor_bad_order(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_ctor_bad_order(database_id): BAD_ORDER = object() with pytest.raises(TypeError): - _make_query(_make_client(), order=BAD_ORDER) + _make_query(_make_client(database=database_id), order=BAD_ORDER) -def test_query_ctor_bad_distinct_on(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_ctor_bad_distinct_on(database_id): BAD_DISTINCT_ON = object() with pytest.raises(TypeError): - _make_query(_make_client(), distinct_on=BAD_DISTINCT_ON) + _make_query(_make_client(database=database_id), distinct_on=BAD_DISTINCT_ON) -def test_query_ctor_bad_filters(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_ctor_bad_filters(database_id): FILTERS_CANT_UNPACK = [("one", "two")] with pytest.raises(ValueError): - _make_query(_make_client(), filters=FILTERS_CANT_UNPACK) + _make_query(_make_client(database=database_id), filters=FILTERS_CANT_UNPACK) + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_project_getter(database_id): + PROJECT = "PROJECT" + query = _make_query(_make_client(database=database_id), project=PROJECT) + assert query.project == PROJECT + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_database_getter(database_id): + query = _make_query(_make_client(database=database_id)) + assert query._client.database == database_id -def test_query_namespace_setter_w_non_string(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_namespace_setter_w_non_string(database_id): + query = _make_query(_make_client(database=database_id)) with pytest.raises(ValueError): query.namespace = object() -def test_query_namespace_setter(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_namespace_setter(database_id): _NAMESPACE = "OTHER_NAMESPACE" - query = _make_query(_make_client()) + query = _make_query(_make_client(database=database_id)) query.namespace = _NAMESPACE assert query.namespace == _NAMESPACE -def test_query_kind_setter_w_non_string(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_kind_setter_w_non_string(database_id): + query = _make_query(_make_client(database=database_id)) with pytest.raises(TypeError): query.kind = object() -def test_query_kind_setter_wo_existing(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_kind_setter_wo_existing(database_id): _KIND = "KIND" - query = _make_query(_make_client()) + query = _make_query(_make_client(database=database_id)) query.kind = _KIND assert query.kind == _KIND -def test_query_kind_setter_w_existing(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_kind_setter_w_existing(database_id): _KIND_BEFORE = "KIND_BEFORE" _KIND_AFTER = "KIND_AFTER" - query = _make_query(_make_client(), kind=_KIND_BEFORE) + query = _make_query(_make_client(database=database_id), kind=_KIND_BEFORE) assert query.kind == _KIND_BEFORE query.kind = _KIND_AFTER assert query.project == _PROJECT assert query.kind == _KIND_AFTER -def test_query_ancestor_setter_w_non_key(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_ancestor_setter_w_non_key(database_id): + query = _make_query(_make_client(database=database_id)) with pytest.raises(TypeError): query.ancestor = object() @@ -156,68 +185,76 @@ def test_query_ancestor_setter_w_non_key(): query.ancestor = ["KIND", "NAME"] -def test_query_ancestor_setter_w_key(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_ancestor_setter_w_key(database_id): from google.cloud.datastore.key import Key _NAME = "NAME" - key = Key("KIND", 123, project=_PROJECT) - query = _make_query(_make_client()) + key = Key("KIND", 123, project=_PROJECT, database=database_id) + query = _make_query(_make_client(database=database_id)) query.add_filter("name", "=", _NAME) query.ancestor = key assert query.ancestor.path == key.path -def test_query_ancestor_setter_w_key_property_filter(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_ancestor_setter_w_key_property_filter(database_id): from google.cloud.datastore.key import Key _NAME = "NAME" - key = Key("KIND", 123, project=_PROJECT) - query = _make_query(_make_client()) + key = Key("KIND", 123, project=_PROJECT, database=database_id) + query = _make_query(_make_client(database=database_id)) query.add_filter(filter=PropertyFilter("name", "=", _NAME)) query.ancestor = key assert query.ancestor.path == key.path -def test_query_ancestor_deleter_w_key(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_ancestor_deleter_w_key(database_id): from google.cloud.datastore.key import Key - key = Key("KIND", 123, project=_PROJECT) - query = _make_query(client=_make_client(), ancestor=key) + key = Key("KIND", 123, project=_PROJECT, database=database_id) + query = _make_query(client=_make_client(database=database_id), ancestor=key) del query.ancestor assert query.ancestor is None -def test_query_add_filter_setter_w_unknown_operator(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_add_filter_setter_w_unknown_operator(database_id): + query = _make_query(_make_client(database=database_id)) with pytest.raises(ValueError) as exc: query.add_filter("firstname", "~~", "John") assert "Invalid expression:" in str(exc.value) assert "Please use one of: =, <, <=, >, >=, !=, IN, NOT_IN." in str(exc.value) -def test_query_add_property_filter_setter_w_unknown_operator(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_add_property_filter_setter_w_unknown_operator(database_id): + query = _make_query(_make_client(database=database_id)) with pytest.raises(ValueError) as exc: query.add_filter(filter=PropertyFilter("firstname", "~~", "John")) assert "Invalid expression:" in str(exc.value) assert "Please use one of: =, <, <=, >, >=, !=, IN, NOT_IN." in str(exc.value) -def test_query_add_filter_w_known_operator(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_add_filter_w_known_operator(database_id): + query = _make_query(_make_client(database=database_id)) query.add_filter("firstname", "=", "John") assert query.filters == [("firstname", "=", "John")] -def test_query_add_property_filter_w_known_operator(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_add_property_filter_w_known_operator(database_id): + query = _make_query(_make_client(database=database_id)) property_filter = PropertyFilter("firstname", "=", "John") query.add_filter(filter=property_filter) assert query.filters == [property_filter] -def test_query_add_filter_w_all_operators(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_add_filter_w_all_operators(database_id): + query = _make_query(_make_client(database=database_id)) query.add_filter("leq_prop", "<=", "val1") query.add_filter("geq_prop", ">=", "val2") query.add_filter("lt_prop", "<", "val3") @@ -237,8 +274,9 @@ def test_query_add_filter_w_all_operators(): assert query.filters[7] == ("not_in_prop", "NOT_IN", ["val13"]) -def test_query_add_property_filter_w_all_operators(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_add_property_filter_w_all_operators(database_id): + query = _make_query(_make_client(database=database_id)) filters = [ ("leq_prop", "<=", "val1"), ("geq_prop", ">=", "val2"), @@ -260,10 +298,11 @@ def test_query_add_property_filter_w_all_operators(): assert query.filters[i] == property_filters[i] -def test_query_add_filter_w_known_operator_and_entity(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_add_filter_w_known_operator_and_entity(database_id): from google.cloud.datastore.entity import Entity - query = _make_query(_make_client()) + query = _make_query(_make_client(database=database_id)) other = Entity() other["firstname"] = "John" other["lastname"] = "Smith" @@ -271,10 +310,11 @@ def test_query_add_filter_w_known_operator_and_entity(): assert query.filters == [("other", "=", other)] -def test_query_add_property_filter_w_known_operator_and_entity(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_add_property_filter_w_known_operator_and_entity(database_id): from google.cloud.datastore.entity import Entity - query = _make_query(_make_client()) + query = _make_query(_make_client(database=database_id)) other = Entity() other["firstname"] = "John" other["lastname"] = "Smith" @@ -283,52 +323,58 @@ def test_query_add_property_filter_w_known_operator_and_entity(): assert query.filters == [property_filter] -def test_query_add_filter_w_whitespace_property_name(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_add_filter_w_whitespace_property_name(database_id): + query = _make_query(_make_client(database=database_id)) PROPERTY_NAME = " property with lots of space " query.add_filter(PROPERTY_NAME, "=", "John") assert query.filters == [(PROPERTY_NAME, "=", "John")] -def test_query_add_property_filter_w_whitespace_property_name(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_add_property_filter_w_whitespace_property_name(database_id): + query = _make_query(_make_client(database=database_id)) PROPERTY_NAME = " property with lots of space " property_filter = PropertyFilter(PROPERTY_NAME, "=", "John") query.add_filter(filter=property_filter) assert query.filters == [property_filter] -def test_query_add_filter___key__valid_key(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_add_filter___key__valid_key(database_id): from google.cloud.datastore.key import Key - query = _make_query(_make_client()) - key = Key("Foo", project=_PROJECT) + query = _make_query(_make_client(database=database_id)) + key = Key("Foo", project=_PROJECT, database=database_id) query.add_filter("__key__", "=", key) assert query.filters == [("__key__", "=", key)] -def test_query_add_property_filter___key__valid_key(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_add_property_filter___key__valid_key(database_id): from google.cloud.datastore.key import Key - query = _make_query(_make_client()) - key = Key("Foo", project=_PROJECT) + query = _make_query(_make_client(database=database_id)) + key = Key("Foo", project=_PROJECT, database=database_id) property_filter = PropertyFilter("__key__", "=", key) query.add_filter(filter=property_filter) assert query.filters == [property_filter] -def test_query_add_filter_return_query_obj(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_add_filter_return_query_obj(database_id): from google.cloud.datastore.query import Query - query = _make_query(_make_client()) + query = _make_query(_make_client(database=database_id)) query_obj = query.add_filter("firstname", "=", "John") assert isinstance(query_obj, Query) assert query_obj.filters == [("firstname", "=", "John")] -def test_query_add_property_filter_without_keyword_argument(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_add_property_filter_without_keyword_argument(database_id): - query = _make_query(_make_client()) + query = _make_query(_make_client(database=database_id)) property_filter = PropertyFilter("firstname", "=", "John") with pytest.raises(ValueError) as exc: query.add_filter(property_filter) @@ -339,9 +385,10 @@ def test_query_add_property_filter_without_keyword_argument(): ) -def test_query_add_composite_filter_without_keyword_argument(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_add_composite_filter_without_keyword_argument(database_id): - query = _make_query(_make_client()) + query = _make_query(_make_client(database=database_id)) and_filter = And(["firstname", "=", "John"]) with pytest.raises(ValueError) as exc: query.add_filter(and_filter) @@ -361,9 +408,10 @@ def test_query_add_composite_filter_without_keyword_argument(): ) -def test_query_positional_args_and_property_filter(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_positional_args_and_property_filter(database_id): - query = _make_query(_make_client()) + query = _make_query(_make_client(database=database_id)) with pytest.raises(ValueError) as exc: query.add_filter("firstname", "=", "John", filter=("name", "=", "Blabla")) @@ -373,9 +421,10 @@ def test_query_positional_args_and_property_filter(): ) -def test_query_positional_args_and_composite_filter(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_positional_args_and_composite_filter(database_id): - query = _make_query(_make_client()) + query = _make_query(_make_client(database=database_id)) and_filter = And(["firstname", "=", "John"]) with pytest.raises(ValueError) as exc: query.add_filter("firstname", "=", "John", filter=and_filter) @@ -386,8 +435,9 @@ def test_query_positional_args_and_composite_filter(): ) -def test_query_add_filter_with_positional_args_raises_user_warning(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_add_filter_with_positional_args_raises_user_warning(database_id): + query = _make_query(_make_client(database=database_id)) with pytest.warns( UserWarning, match="Detected filter using positional arguments", @@ -401,151 +451,171 @@ def test_query_add_filter_with_positional_args_raises_user_warning(): _make_stub_query(filters=[("name", "=", "John")]) -def test_query_filter___key__not_equal_operator(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_filter___key__not_equal_operator(database_id): from google.cloud.datastore.key import Key - key = Key("Foo", project=_PROJECT) - query = _make_query(_make_client()) + key = Key("Foo", project=_PROJECT, database=database_id) + query = _make_query(_make_client(database=database_id)) query.add_filter("__key__", "<", key) assert query.filters == [("__key__", "<", key)] -def test_query_property_filter___key__not_equal_operator(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_property_filter___key__not_equal_operator(database_id): from google.cloud.datastore.key import Key - key = Key("Foo", project=_PROJECT) - query = _make_query(_make_client()) + key = Key("Foo", project=_PROJECT, database=database_id) + query = _make_query(_make_client(database=database_id)) property_filter = PropertyFilter("__key__", "<", key) query.add_filter(filter=property_filter) assert query.filters == [property_filter] -def test_query_filter___key__invalid_value(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_filter___key__invalid_value(database_id): + query = _make_query(_make_client(database=database_id)) with pytest.raises(ValueError) as exc: query.add_filter("__key__", "=", None) assert "Invalid key:" in str(exc.value) -def test_query_property_filter___key__invalid_value(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_property_filter___key__invalid_value(database_id): + query = _make_query(_make_client(database=database_id)) with pytest.raises(ValueError) as exc: query.add_filter(filter=PropertyFilter("__key__", "=", None)) assert "Invalid key:" in str(exc.value) -def test_query_projection_setter_empty(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_projection_setter_empty(database_id): + query = _make_query(_make_client(database=database_id)) query.projection = [] assert query.projection == [] -def test_query_projection_setter_string(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_projection_setter_string(database_id): + query = _make_query(_make_client(database=database_id)) query.projection = "field1" assert query.projection == ["field1"] -def test_query_projection_setter_non_empty(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_projection_setter_non_empty(database_id): + query = _make_query(_make_client(database=database_id)) query.projection = ["field1", "field2"] assert query.projection == ["field1", "field2"] -def test_query_projection_setter_multiple_calls(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_projection_setter_multiple_calls(database_id): _PROJECTION1 = ["field1", "field2"] _PROJECTION2 = ["field3"] - query = _make_query(_make_client()) + query = _make_query(_make_client(database=database_id)) query.projection = _PROJECTION1 assert query.projection == _PROJECTION1 query.projection = _PROJECTION2 assert query.projection == _PROJECTION2 -def test_query_keys_only(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_keys_only(database_id): + query = _make_query(_make_client(database=database_id)) query.keys_only() assert query.projection == ["__key__"] -def test_query_key_filter_defaults(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_key_filter_defaults(database_id): from google.cloud.datastore.key import Key - client = _make_client() + client = _make_client(database=database_id) query = _make_query(client) assert query.filters == [] - key = Key("Kind", 1234, project="project") + key = Key("Kind", 1234, project="project", database=database_id) query.key_filter(key) assert query.filters == [("__key__", "=", key)] -def test_query_key_filter_explicit(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_key_filter_explicit(database_id): from google.cloud.datastore.key import Key - client = _make_client() + client = _make_client(database=database_id) query = _make_query(client) assert query.filters == [] - key = Key("Kind", 1234, project="project") + key = Key("Kind", 1234, project="project", database=database_id) query.key_filter(key, operator=">") assert query.filters == [("__key__", ">", key)] -def test_query_order_setter_empty(): - query = _make_query(_make_client(), order=["foo", "-bar"]) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_order_setter_empty(database_id): + query = _make_query(_make_client(database=database_id), order=["foo", "-bar"]) query.order = [] assert query.order == [] -def test_query_order_setter_string(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_order_setter_string(database_id): + query = _make_query(_make_client(database=database_id)) query.order = "field" assert query.order == ["field"] -def test_query_order_setter_single_item_list_desc(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_order_setter_single_item_list_desc(database_id): + query = _make_query(_make_client(database=database_id)) query.order = ["-field"] assert query.order == ["-field"] -def test_query_order_setter_multiple(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_order_setter_multiple(database_id): + query = _make_query(_make_client(database=database_id)) query.order = ["foo", "-bar"] assert query.order == ["foo", "-bar"] -def test_query_distinct_on_setter_empty(): - query = _make_query(_make_client(), distinct_on=["foo", "bar"]) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_distinct_on_setter_empty(database_id): + query = _make_query(_make_client(database=database_id), distinct_on=["foo", "bar"]) query.distinct_on = [] assert query.distinct_on == [] -def test_query_distinct_on_setter_string(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_distinct_on_setter_string(database_id): + query = _make_query(_make_client(database=database_id)) query.distinct_on = "field1" assert query.distinct_on == ["field1"] -def test_query_distinct_on_setter_non_empty(): - query = _make_query(_make_client()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_distinct_on_setter_non_empty(database_id): + query = _make_query(_make_client(database=database_id)) query.distinct_on = ["field1", "field2"] assert query.distinct_on == ["field1", "field2"] -def test_query_distinct_on_multiple_calls(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_distinct_on_multiple_calls(database_id): _DISTINCT_ON1 = ["field1", "field2"] _DISTINCT_ON2 = ["field3"] - query = _make_query(_make_client()) + query = _make_query(_make_client(database=database_id)) query.distinct_on = _DISTINCT_ON1 assert query.distinct_on == _DISTINCT_ON1 query.distinct_on = _DISTINCT_ON2 assert query.distinct_on == _DISTINCT_ON2 -def test_query_fetch_defaults_w_client_attr(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_fetch_defaults_w_client_attr(database_id): from google.cloud.datastore.query import Iterator - client = _make_client() + client = _make_client(database=database_id) query = _make_query(client) iterator = query.fetch() @@ -559,11 +629,12 @@ def test_query_fetch_defaults_w_client_attr(): assert iterator._timeout is None -def test_query_fetch_w_explicit_client_w_retry_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_fetch_w_explicit_client_w_retry_w_timeout(database_id): from google.cloud.datastore.query import Iterator - client = _make_client() - other_client = _make_client() + client = _make_client(database=database_id) + other_client = _make_client(database=database_id) query = _make_query(client) retry = mock.Mock() timeout = 100000 @@ -697,13 +768,14 @@ def test_iterator__build_protobuf_all_values_except_start_and_end_cursor(): assert pb == expected_pb -def test_iterator__process_query_results(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator__process_query_results(database_id): from google.cloud.datastore_v1.types import query as query_pb2 iterator = _make_iterator(None, None, end_cursor="abcd") assert iterator._end_cursor is not None - entity_pbs = [_make_entity("Hello", 9998, "PRAHJEKT")] + entity_pbs = [_make_entity("Hello", 9998, "PRAHJEKT", database=database_id)] cursor_as_bytes = b"\x9ai\xe7" cursor = b"mmnn" skipped_results = 4 @@ -719,13 +791,14 @@ def test_iterator__process_query_results(): assert iterator._more_results -def test_iterator__process_query_results_done(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator__process_query_results_done(database_id): from google.cloud.datastore_v1.types import query as query_pb2 iterator = _make_iterator(None, None, end_cursor="abcd") assert iterator._end_cursor is not None - entity_pbs = [_make_entity("World", 1234, "PROJECT")] + entity_pbs = [_make_entity("World", 1234, "PROJECT", database=database_id)] cursor_as_bytes = b"\x9ai\xe7" skipped_results = 44 more_results_enum = query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS @@ -749,7 +822,9 @@ def test_iterator__process_query_results_bad_enum(): iterator._process_query_results(response_pb) -def _next_page_helper(txn_id=None, retry=None, timeout=None, read_time=None): +def _next_page_helper( + txn_id=None, retry=None, timeout=None, read_time=None, database=None +): from google.api_core import page_iterator from google.cloud.datastore.query import Query from google.cloud.datastore_v1.types import datastore as datastore_pb2 @@ -762,10 +837,12 @@ def _next_page_helper(txn_id=None, retry=None, timeout=None, read_time=None): project = "prujekt" ds_api = _make_datastore_api(result) if txn_id is None: - client = _Client(project, datastore_api=ds_api) + client = _Client(project, database=database, datastore_api=ds_api) else: transaction = mock.Mock(id=txn_id, spec=["id"]) - client = _Client(project, datastore_api=ds_api, transaction=transaction) + client = _Client( + project, database=database, datastore_api=ds_api, transaction=transaction + ) query = Query(client) kwargs = {} @@ -787,7 +864,7 @@ def _next_page_helper(txn_id=None, retry=None, timeout=None, read_time=None): assert isinstance(page, page_iterator.Page) assert page._parent is iterator - partition_id = entity_pb2.PartitionId(project_id=project) + partition_id = entity_pb2.PartitionId(project_id=project, database_id=database) if txn_id is not None: read_options = datastore_pb2.ReadOptions(transaction=txn_id) elif read_time is not None: @@ -797,40 +874,48 @@ def _next_page_helper(txn_id=None, retry=None, timeout=None, read_time=None): else: read_options = datastore_pb2.ReadOptions() empty_query = query_pb2.Query() + expected_request = { + "project_id": project, + "partition_id": partition_id, + "read_options": read_options, + "query": empty_query, + } + set_database_id_to_request(expected_request, database) ds_api.run_query.assert_called_once_with( - request={ - "project_id": project, - "partition_id": partition_id, - "read_options": read_options, - "query": empty_query, - }, + request=expected_request, **kwargs, ) -def test_iterator__next_page(): - _next_page_helper() +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator__next_page(database_id): + _next_page_helper(database_id) -def test_iterator__next_page_w_retry(): - _next_page_helper(retry=mock.Mock()) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator__next_page_w_retry(database_id): + _next_page_helper(retry=mock.Mock(), database=database_id) -def test_iterator__next_page_w_timeout(): - _next_page_helper(timeout=100000) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator__next_page_w_timeout(database_id): + _next_page_helper(timeout=100000, database=database_id) -def test_iterator__next_page_in_transaction(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator__next_page_in_transaction(database_id): txn_id = b"1xo1md\xe2\x98\x83" - _next_page_helper(txn_id) + _next_page_helper(txn_id, database=database_id) -def test_iterator__next_page_w_read_time(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator__next_page_w_read_time(database_id): read_time = datetime.datetime.utcfromtimestamp(1641058200.123456) - _next_page_helper(read_time=read_time) + _next_page_helper(read_time=read_time, database=database_id) -def test_iterator__next_page_no_more(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator__next_page_no_more(database_id): from google.cloud.datastore.query import Query ds_api = _make_datastore_api() @@ -844,7 +929,8 @@ def test_iterator__next_page_no_more(): ds_api.run_query.assert_not_called() -def test_iterator__next_page_w_skipped_lt_offset(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator__next_page_w_skipped_lt_offset(database_id): from google.api_core import page_iterator from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 @@ -865,7 +951,7 @@ def test_iterator__next_page_w_skipped_lt_offset(): result_2.batch.skipped_cursor = skipped_cursor_2 ds_api = _make_datastore_api(result_1, result_2) - client = _Client(project, datastore_api=ds_api) + client = _Client(project, datastore_api=ds_api, database=database_id) query = Query(client) offset = 150 @@ -876,24 +962,24 @@ def test_iterator__next_page_w_skipped_lt_offset(): assert isinstance(page, page_iterator.Page) assert page._parent is iterator - partition_id = entity_pb2.PartitionId(project_id=project) + partition_id = entity_pb2.PartitionId(project_id=project, database_id=database_id) read_options = datastore_pb2.ReadOptions() query_1 = query_pb2.Query(offset=offset) query_2 = query_pb2.Query( start_cursor=skipped_cursor_1, offset=(offset - skipped_1) ) - expected_calls = [ - mock.call( - request={ - "project_id": project, - "partition_id": partition_id, - "read_options": read_options, - "query": query, - } - ) - for query in [query_1, query_2] - ] + expected_calls = [] + for query in [query_1, query_2]: + expected_request = { + "project_id": project, + "partition_id": partition_id, + "read_options": read_options, + "query": query, + } + set_database_id_to_request(expected_request, database_id) + expected_calls.append(mock.call(request=expected_request)) + assert ds_api.run_query.call_args_list == expected_calls @@ -943,12 +1029,13 @@ def test_pb_from_query_kind(): assert [item.name for item in pb.kind] == ["KIND"] -def test_pb_from_query_ancestor(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_pb_from_query_ancestor(database_id): from google.cloud.datastore.key import Key from google.cloud.datastore_v1.types import query as query_pb2 from google.cloud.datastore.query import _pb_from_query - ancestor = Key("Ancestor", 123, project="PROJECT") + ancestor = Key("Ancestor", 123, project="PROJECT", database=database_id) pb = _pb_from_query(_make_stub_query(ancestor=ancestor)) cfilter = pb.filter.composite_filter assert cfilter.op == query_pb2.CompositeFilter.Operator.AND @@ -974,12 +1061,13 @@ def test_pb_from_query_filter(): assert pfilter.value.string_value == "John" -def test_pb_from_query_filter_key(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_pb_from_query_filter_key(database_id): from google.cloud.datastore.key import Key from google.cloud.datastore_v1.types import query as query_pb2 from google.cloud.datastore.query import _pb_from_query - key = Key("Kind", 123, project="PROJECT") + key = Key("Kind", 123, project="PROJECT", database=database_id) query = _make_stub_query(filters=[("__key__", "=", key)]) query.OPERATORS = {"=": query_pb2.PropertyFilter.Operator.EQUAL} pb = _pb_from_query(query) @@ -1142,9 +1230,17 @@ def _make_stub_query( class _Client(object): - def __init__(self, project, datastore_api=None, namespace=None, transaction=None): + def __init__( + self, + project, + datastore_api=None, + namespace=None, + transaction=None, + database=None, + ): self.project = project self._datastore_api = datastore_api + self.database = database self.namespace = namespace self._transaction = transaction @@ -1165,15 +1261,16 @@ def _make_iterator(*args, **kw): return Iterator(*args, **kw) -def _make_client(): - return _Client(_PROJECT) +def _make_client(database=None): + return _Client(_PROJECT, database=database) -def _make_entity(kind, id_, project): +def _make_entity(kind, id_, project, database=None): from google.cloud.datastore_v1.types import entity as entity_pb2 key = entity_pb2.Key() key.partition_id.project_id = project + key.partition_id.database_id = database elem = key.path._pb.add() elem.kind = kind elem.id = id_ diff --git a/packages/google-cloud-datastore/tests/unit/test_transaction.py b/packages/google-cloud-datastore/tests/unit/test_transaction.py index 178bb4f17277..23574ef4cabd 100644 --- a/packages/google-cloud-datastore/tests/unit/test_transaction.py +++ b/packages/google-cloud-datastore/tests/unit/test_transaction.py @@ -15,16 +15,20 @@ import mock import pytest +from google.cloud.datastore.helpers import set_database_id_to_request -def test_transaction_ctor_defaults(): + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_ctor_defaults(database_id): from google.cloud.datastore.transaction import Transaction project = "PROJECT" - client = _Client(project) + client = _Client(project, database=database_id) xact = _make_transaction(client) assert xact.project == project + assert xact.database == database_id assert xact._client is client assert xact.id is None assert xact._status == Transaction._INITIAL @@ -32,53 +36,59 @@ def test_transaction_ctor_defaults(): assert len(xact._partial_key_entities) == 0 -def test_transaction_constructor_read_only(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_constructor_read_only(database_id): project = "PROJECT" id_ = 850302 ds_api = _make_datastore_api(xact=id_) - client = _Client(project, datastore_api=ds_api) + client = _Client(project, datastore_api=ds_api, database=database_id) options = _make_options(read_only=True) xact = _make_transaction(client, read_only=True) assert xact._options == options + assert xact.database == database_id -def test_transaction_constructor_w_read_time(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_constructor_w_read_time(database_id): from datetime import datetime project = "PROJECT" id_ = 850302 read_time = datetime.utcfromtimestamp(1641058200.123456) ds_api = _make_datastore_api(xact=id_) - client = _Client(project, datastore_api=ds_api) + client = _Client(project, datastore_api=ds_api, database=database_id) options = _make_options(read_only=True, read_time=read_time) xact = _make_transaction(client, read_only=True, read_time=read_time) assert xact._options == options + assert xact.database == database_id -def test_transaction_constructor_read_write_w_read_time(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_constructor_read_write_w_read_time(database_id): from datetime import datetime project = "PROJECT" id_ = 850302 read_time = datetime.utcfromtimestamp(1641058200.123456) ds_api = _make_datastore_api(xact=id_) - client = _Client(project, datastore_api=ds_api) + client = _Client(project, datastore_api=ds_api, database=database_id) with pytest.raises(ValueError): _make_transaction(client, read_only=False, read_time=read_time) -def test_transaction_current(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_current(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" id_ = 678 ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) + client = _Client(project, database=database_id, datastore_api=ds_api) xact1 = _make_transaction(client) xact2 = _make_transaction(client) assert xact1.current() is None @@ -108,87 +118,97 @@ def test_transaction_current(): begin_txn = ds_api.begin_transaction assert begin_txn.call_count == 2 - expected_request = _make_begin_request(project) + expected_request = _make_begin_request(project, database=database_id) begin_txn.assert_called_with(request=expected_request) commit_method = ds_api.commit assert commit_method.call_count == 2 mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL - commit_method.assert_called_with( - request={ - "project_id": project, - "mode": mode, - "mutations": [], - "transaction": id_, - } - ) + expected_request = { + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": id_, + } + set_database_id_to_request(expected_request, database_id) + + commit_method.assert_called_with(request=expected_request) ds_api.rollback.assert_not_called() -def test_transaction_begin(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_begin(database_id): project = "PROJECT" id_ = 889 ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) + client = _Client(project, database=database_id, datastore_api=ds_api) xact = _make_transaction(client) xact.begin() assert xact.id == id_ - expected_request = _make_begin_request(project) + expected_request = _make_begin_request(project, database=database_id) + ds_api.begin_transaction.assert_called_once_with(request=expected_request) -def test_transaction_begin_w_readonly(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_begin_w_readonly(database_id): project = "PROJECT" id_ = 889 ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) + client = _Client(project, datastore_api=ds_api, database=database_id) xact = _make_transaction(client, read_only=True) xact.begin() assert xact.id == id_ - expected_request = _make_begin_request(project, read_only=True) + expected_request = _make_begin_request( + project, read_only=True, database=database_id + ) ds_api.begin_transaction.assert_called_once_with(request=expected_request) -def test_transaction_begin_w_read_time(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_begin_w_read_time(database_id): from datetime import datetime project = "PROJECT" id_ = 889 read_time = datetime.utcfromtimestamp(1641058200.123456) ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) + client = _Client(project, datastore_api=ds_api, database=database_id) xact = _make_transaction(client, read_only=True, read_time=read_time) xact.begin() assert xact.id == id_ - expected_request = _make_begin_request(project, read_only=True, read_time=read_time) + expected_request = _make_begin_request( + project, read_only=True, read_time=read_time, database=database_id + ) ds_api.begin_transaction.assert_called_once_with(request=expected_request) -def test_transaction_begin_w_retry_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_begin_w_retry_w_timeout(database_id): project = "PROJECT" id_ = 889 retry = mock.Mock() timeout = 100000 ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) + client = _Client(project, datastore_api=ds_api, database=database_id) xact = _make_transaction(client) xact.begin(retry=retry, timeout=timeout) assert xact.id == id_ - expected_request = _make_begin_request(project) + expected_request = _make_begin_request(project, database=database_id) ds_api.begin_transaction.assert_called_once_with( request=expected_request, retry=retry, @@ -196,37 +216,38 @@ def test_transaction_begin_w_retry_w_timeout(): ) -def test_transaction_begin_tombstoned(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_begin_tombstoned(database_id): project = "PROJECT" id_ = 1094 ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) + client = _Client(project, datastore_api=ds_api, database=database_id) xact = _make_transaction(client) xact.begin() assert xact.id == id_ - expected_request = _make_begin_request(project) + expected_request = _make_begin_request(project, database=database_id) ds_api.begin_transaction.assert_called_once_with(request=expected_request) xact.rollback() - - client._datastore_api.rollback.assert_called_once_with( - request={"project_id": project, "transaction": id_} - ) + expected_request = {"project_id": project, "transaction": id_} + set_database_id_to_request(expected_request, database_id) + client._datastore_api.rollback.assert_called_once_with(request=expected_request) assert xact.id is None with pytest.raises(ValueError): xact.begin() -def test_transaction_begin_w_begin_transaction_failure(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_begin_w_begin_transaction_failure(database_id): project = "PROJECT" id_ = 712 ds_api = _make_datastore_api(xact_id=id_) ds_api.begin_transaction = mock.Mock(side_effect=RuntimeError, spec=[]) - client = _Client(project, datastore_api=ds_api) + client = _Client(project, datastore_api=ds_api, database=database_id) xact = _make_transaction(client) with pytest.raises(RuntimeError): @@ -234,48 +255,54 @@ def test_transaction_begin_w_begin_transaction_failure(): assert xact.id is None - expected_request = _make_begin_request(project) + expected_request = _make_begin_request(project, database=database_id) ds_api.begin_transaction.assert_called_once_with(request=expected_request) -def test_transaction_rollback(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_rollback(database_id): project = "PROJECT" id_ = 239 ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) + client = _Client(project, datastore_api=ds_api, database=database_id) xact = _make_transaction(client) xact.begin() xact.rollback() assert xact.id is None - ds_api.rollback.assert_called_once_with( - request={"project_id": project, "transaction": id_} - ) + expected_request = {"project_id": project, "transaction": id_} + set_database_id_to_request(expected_request, database_id) + ds_api.rollback.assert_called_once_with(request=expected_request) -def test_transaction_rollback_w_retry_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_rollback_w_retry_w_timeout(database_id): project = "PROJECT" id_ = 239 retry = mock.Mock() timeout = 100000 ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) + client = _Client(project, datastore_api=ds_api, database=database_id) xact = _make_transaction(client) xact.begin() xact.rollback(retry=retry, timeout=timeout) assert xact.id is None + expected_request = {"project_id": project, "transaction": id_} + set_database_id_to_request(expected_request, database_id) + ds_api.rollback.assert_called_once_with( - request={"project_id": project, "transaction": id_}, + request=expected_request, retry=retry, timeout=timeout, ) -def test_transaction_commit_no_partial_keys(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_commit_no_partial_keys(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" @@ -283,50 +310,53 @@ def test_transaction_commit_no_partial_keys(): mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) + client = _Client(project, database=database_id, datastore_api=ds_api) xact = _make_transaction(client) xact.begin() xact.commit() - ds_api.commit.assert_called_once_with( - request={ - "project_id": project, - "mode": mode, - "mutations": [], - "transaction": id_, - } - ) + expected_request = { + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": id_, + } + set_database_id_to_request(expected_request, database_id) + ds_api.commit.assert_called_once_with(request=expected_request) assert xact.id is None -def test_transaction_commit_w_partial_keys_w_retry_w_timeout(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_commit_w_partial_keys_w_retry_w_timeout(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" kind = "KIND" id1 = 123 mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL - key = _make_key(kind, id1, project) + key = _make_key(kind, id1, project, database=database_id) id2 = 234 retry = mock.Mock() timeout = 100000 ds_api = _make_datastore_api(key, xact_id=id2) - client = _Client(project, datastore_api=ds_api) + client = _Client(project, datastore_api=ds_api, database=database_id) xact = _make_transaction(client) xact.begin() - entity = _Entity() + entity = _Entity(database=database_id) xact.put(entity) xact.commit(retry=retry, timeout=timeout) + expected_request = { + "project_id": project, + "mode": mode, + "mutations": xact.mutations, + "transaction": id2, + } + set_database_id_to_request(expected_request, database_id) ds_api.commit.assert_called_once_with( - request={ - "project_id": project, - "mode": mode, - "mutations": xact.mutations, - "transaction": id2, - }, + request=expected_request, retry=retry, timeout=timeout, ) @@ -334,13 +364,14 @@ def test_transaction_commit_w_partial_keys_w_retry_w_timeout(): assert entity.key.path == [{"kind": kind, "id": id1}] -def test_transaction_context_manager_no_raise(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_context_manager_no_raise(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 project = "PROJECT" id_ = 912830 ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) + client = _Client(project, datastore_api=ds_api, database=database_id) xact = _make_transaction(client) with xact: @@ -349,28 +380,32 @@ def test_transaction_context_manager_no_raise(): assert xact.id is None - expected_request = _make_begin_request(project) + expected_request = _make_begin_request(project, database=database_id) ds_api.begin_transaction.assert_called_once_with(request=expected_request) mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL + expected_request = { + "project_id": project, + "mode": mode, + "mutations": [], + "transaction": id_, + } + set_database_id_to_request(expected_request, database_id) + client._datastore_api.commit.assert_called_once_with( - request={ - "project_id": project, - "mode": mode, - "mutations": [], - "transaction": id_, - }, + request=expected_request, ) -def test_transaction_context_manager_w_raise(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_context_manager_w_raise(database_id): class Foo(Exception): pass project = "PROJECT" id_ = 614416 ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) + client = _Client(project, datastore_api=ds_api, database=database_id) xact = _make_transaction(client) xact._mutation = object() try: @@ -382,22 +417,23 @@ class Foo(Exception): assert xact.id is None - expected_request = _make_begin_request(project) + expected_request = _make_begin_request(project, database=database_id) + set_database_id_to_request(expected_request, database_id) ds_api.begin_transaction.assert_called_once_with(request=expected_request) client._datastore_api.commit.assert_not_called() - - client._datastore_api.rollback.assert_called_once_with( - request={"project_id": project, "transaction": id_} - ) + expected_request = {"project_id": project, "transaction": id_} + set_database_id_to_request(expected_request, database_id) + client._datastore_api.rollback.assert_called_once_with(request=expected_request) -def test_transaction_put_read_only(): +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_put_read_only(database_id): project = "PROJECT" id_ = 943243 ds_api = _make_datastore_api(xact_id=id_) - client = _Client(project, datastore_api=ds_api) - entity = _Entity() + client = _Client(project, datastore_api=ds_api, database=database_id) + entity = _Entity(database=database_id) xact = _make_transaction(client, read_only=True) xact.begin() @@ -405,11 +441,12 @@ def test_transaction_put_read_only(): xact.put(entity) -def _make_key(kind, id_, project): +def _make_key(kind, id_, project, database=None): from google.cloud.datastore_v1.types import entity as entity_pb2 key = entity_pb2.Key() key.partition_id.project_id = project + key.partition_id.database_id = database elem = key._pb.path.add() elem.kind = kind elem.id = id_ @@ -417,20 +454,21 @@ def _make_key(kind, id_, project): class _Entity(dict): - def __init__(self): + def __init__(self, database=None): super(_Entity, self).__init__() from google.cloud.datastore.key import Key - self.key = Key("KIND", project="PROJECT") + self.key = Key("KIND", project="PROJECT", database=database) class _Client(object): - def __init__(self, project, datastore_api=None, namespace=None): + def __init__(self, project, datastore_api=None, namespace=None, database=None): self.project = project if datastore_api is None: datastore_api = _make_datastore_api() self._datastore_api = datastore_api self.namespace = namespace + self.database = database self._batches = [] def _push_batch(self, batch): @@ -483,12 +521,14 @@ def _make_transaction(client, **kw): return Transaction(client, **kw) -def _make_begin_request(project, read_only=False, read_time=None): +def _make_begin_request(project, read_only=False, read_time=None, database=None): expected_options = _make_options(read_only=read_only, read_time=read_time) - return { + request = { "project_id": project, "transaction_options": expected_options, } + set_database_id_to_request(request, database) + return request def _make_commit_response(*keys): From b6c1d60e97d8abd6ad42a7e7be04dad810fdb204 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 21 Jun 2023 13:53:28 -0400 Subject: [PATCH 520/611] chore(main): release 2.16.0 (#448) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-datastore/.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../google/cloud/datastore_admin/gapic_version.py | 2 +- .../google/cloud/datastore_admin_v1/gapic_version.py | 2 +- .../google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 13 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index f4de5340c336..7a15bc188562 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.15.2" + ".": "2.16.0" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 91c974ea3339..753aee345f80 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.16.0](https://github.com/googleapis/python-datastore/compare/v2.15.2...v2.16.0) (2023-06-21) + + +### Features + +* Named database support ([#439](https://github.com/googleapis/python-datastore/issues/439)) ([abf0060](https://github.com/googleapis/python-datastore/commit/abf0060980b2e444f4ec66e9779900658572317e)) + ## [2.15.2](https://github.com/googleapis/python-datastore/compare/v2.15.1...v2.15.2) (2023-05-04) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index 0a2bac493697..f75debd2d65e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.15.2" # {x-release-please-version} +__version__ = "2.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 31e212c0d679..a93d72c2b64e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.15.2" +__version__ = "2.16.0" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index db31fdc2ac14..a2303530d547 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.2" # {x-release-please-version} +__version__ = "2.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index cc1c66a7b788..e08f7bb1304d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.15.2" # {x-release-please-version} +__version__ = "2.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index cc1c66a7b788..e08f7bb1304d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.15.2" # {x-release-please-version} +__version__ = "2.16.0" # {x-release-please-version} From f81522901e7c8ae63d165108df18cf37d4ad907e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 28 Jun 2023 12:00:45 -0400 Subject: [PATCH 521/611] chore: remove pinned Sphinx version [autoapprove] (#450) Source-Link: https://github.com/googleapis/synthtool/commit/909573ce9da2819eeb835909c795d29aea5c724e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ddf4551385d566771dc713090feb7b4c1164fb8a698fe52bbe7670b24236565b Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-datastore/noxfile.py | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 02a4dedced74..1b3cb6c52663 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc -# created: 2023-06-03T21:25:37.968717478Z + digest: sha256:ddf4551385d566771dc713090feb7b4c1164fb8a698fe52bbe7670b24236565b +# created: 2023-06-27T13:04:21.96690344Z diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 84ae80a43893..02ee593caf67 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -334,10 +334,9 @@ def docfx(session): session.install("-e", ".") session.install( - "sphinx==4.0.1", + "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", - "gcp-sphinx-docfx-yaml", ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) From 2369ef7158d18481bcafa91debaab24bdd36f05b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 29 Jun 2023 12:26:23 -0400 Subject: [PATCH 522/611] chore: store artifacts in placer (#451) Source-Link: https://github.com/googleapis/synthtool/commit/cb960373d12d20f8dc38beee2bf884d49627165e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd Co-authored-by: Owl Bot --- .../google-cloud-datastore/.github/.OwlBot.lock.yaml | 4 ++-- .../google-cloud-datastore/.kokoro/release/common.cfg | 9 +++++++++ packages/google-cloud-datastore/noxfile.py | 2 +- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 1b3cb6c52663..98994f474104 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ddf4551385d566771dc713090feb7b4c1164fb8a698fe52bbe7670b24236565b -# created: 2023-06-27T13:04:21.96690344Z + digest: sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd +# created: 2023-06-28T17:03:33.371210701Z diff --git a/packages/google-cloud-datastore/.kokoro/release/common.cfg b/packages/google-cloud-datastore/.kokoro/release/common.cfg index 6f858df92560..717dc3eee743 100644 --- a/packages/google-cloud-datastore/.kokoro/release/common.cfg +++ b/packages/google-cloud-datastore/.kokoro/release/common.cfg @@ -38,3 +38,12 @@ env_vars: { key: "SECRET_MANAGER_KEYS" value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } + +# Store the packages we uploaded to PyPI. That way, we have a record of exactly +# what we published, which we can use to generate SBOMs and attestations. +action { + define_artifacts { + regex: "github/python-datastore/**/*.tar.gz" + strip_prefix: "github/python-datastore" + } +} diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 02ee593caf67..c4ee430d6f22 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -408,6 +408,7 @@ def prerelease_deps(session): "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", + "google-auth", "proto-plus", "google-cloud-testutils", # dependencies of google-cloud-testutils" @@ -420,7 +421,6 @@ def prerelease_deps(session): # Remaining dependencies other_deps = [ "requests", - "google-auth", ] session.install(*other_deps) From aee5465ee37269c1b0bd00264d5c4840f953e85c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 4 Jul 2023 15:41:25 -0400 Subject: [PATCH 523/611] fix: Add async context manager return types (#452) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Add async context manager return types chore: Mock return_value should not populate oneof message fields chore: Support snippet generation for services that only support REST transport chore: Update gapic-generator-python to v1.11.0 PiperOrigin-RevId: 545430278 Source-Link: https://github.com/googleapis/googleapis/commit/601b5326107eeb74800b426d1f9933faa233258a Source-Link: https://github.com/googleapis/googleapis-gen/commit/b3f18d0f6560a855022fd058865e7620479d7af9 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjNmMThkMGY2NTYwYTg1NTAyMmZkMDU4ODY1ZTc2MjA0NzlkN2FmOSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/datastore_admin/async_client.py | 2 +- .../datastore_v1/services/datastore/async_client.py | 2 +- .../google/cloud/datastore_v1/types/entity.py | 12 ++++++------ .../gapic/datastore_admin_v1/test_datastore_admin.py | 6 ++++-- 4 files changed, 12 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index d940a0b078f6..818fb6bff91d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -1272,7 +1272,7 @@ async def cancel_operation( metadata=metadata, ) - async def __aenter__(self): + async def __aenter__(self) -> "DatastoreAdminAsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index 84c963d05adf..4e0bf5352b9f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -1363,7 +1363,7 @@ async def cancel_operation( metadata=metadata, ) - async def __aenter__(self): + async def __aenter__(self) -> "DatastoreAsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index ed66e490b1d1..9fd055b73ec3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -38,11 +38,11 @@ class PartitionId(proto.Message): r"""A partition ID identifies a grouping of entities. The grouping is - always by project. database. and namespace, however the namespace ID may be - empty. Empty database ID refers to the default database. + always by project and namespace, however the namespace ID may be + empty. - A partition ID contains several dimensions: project ID, database ID, - and namespace ID. + A partition ID contains several dimensions: project ID and namespace + ID. Partition dimensions: @@ -54,7 +54,7 @@ class PartitionId(proto.Message): ID is forbidden in certain documented contexts. Foreign partition IDs (in which the project ID does not match the - context project ID) are discouraged. Reads and writes of foreign + context project ID ) are discouraged. Reads and writes of foreign partition IDs may fail if the project is not in an active state. Attributes: @@ -63,7 +63,7 @@ class PartitionId(proto.Message): belong. database_id (str): If not empty, the ID of the database to which - the entities belong. Empty corresponds to the default database. + the entities belong. namespace_id (str): If not empty, the ID of the namespace to which the entities belong. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index 8e9b37d08065..47d3153ec62f 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -2016,9 +2016,11 @@ async def test_list_indexes_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_indexes(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token From e96cb29cc1fe88a2ea61e4c0d9cb989e1ca2047e Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 5 Jul 2023 16:45:11 +0200 Subject: [PATCH 524/611] chore(deps): update all dependencies (#449) Co-authored-by: Anthonios Partheniou --- .../samples/snippets/requirements-test.txt | 2 +- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements-test.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt index d700e917ed72..f8841e8c6af2 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff===1.11.1; python_version < "3.7" backoff==2.2.1; python_version >= "3.7" -pytest==7.3.2 +pytest==7.4.0 flaky==3.7.0 diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index d0195bcdbdf6..fcddcf53ddee 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.15.2 \ No newline at end of file +google-cloud-datastore==2.16.0 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt index 28706bebc1f8..6950eb5a7b6a 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt @@ -1 +1 @@ -pytest==7.3.2 \ No newline at end of file +pytest==7.4.0 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index ff812cc4f0c5..4c89b8f03eb8 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.15.2 +google-cloud-datastore==2.16.0 From 7fbbabdcd914ab45e6126cc1031f5157a6fd69c2 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 5 Jul 2023 10:56:33 -0400 Subject: [PATCH 525/611] chore(main): release 2.16.1 (#453) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-datastore/.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../google/cloud/datastore_admin/gapic_version.py | 2 +- .../google/cloud/datastore_admin_v1/gapic_version.py | 2 +- .../google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 13 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index 7a15bc188562..f122d158c5e8 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.16.0" + ".": "2.16.1" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 753aee345f80..907be34a4111 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.16.1](https://github.com/googleapis/python-datastore/compare/v2.16.0...v2.16.1) (2023-07-05) + + +### Bug Fixes + +* Add async context manager return types ([#452](https://github.com/googleapis/python-datastore/issues/452)) ([05f20dc](https://github.com/googleapis/python-datastore/commit/05f20dc54a100413ff7db2376ba353311584e4c8)) + ## [2.16.0](https://github.com/googleapis/python-datastore/compare/v2.15.2...v2.16.0) (2023-06-21) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index f75debd2d65e..da0da6723fc2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.16.0" # {x-release-please-version} +__version__ = "2.16.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index a93d72c2b64e..61e0c0a8360c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.16.0" +__version__ = "2.16.1" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index a2303530d547..8edfaef7141c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.0" # {x-release-please-version} +__version__ = "2.16.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index e08f7bb1304d..8a60bdde448c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.16.0" # {x-release-please-version} +__version__ = "2.16.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index e08f7bb1304d..8a60bdde448c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.16.0" # {x-release-please-version} +__version__ = "2.16.1" # {x-release-please-version} From f1ea8eea669b058ae73d44a7eb4e7a735cf02ec6 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 5 Jul 2023 21:22:31 +0200 Subject: [PATCH 526/611] chore(deps): update dependency google-cloud-datastore to v2.16.1 (#454) --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index fcddcf53ddee..9c65e4996e43 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.16.0 \ No newline at end of file +google-cloud-datastore==2.16.1 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index 4c89b8f03eb8..4a4657a1ac18 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.16.0 +google-cloud-datastore==2.16.1 From ea6fb13ab0eca51546b9b9825d7a56f8c36a4fdf Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 10 Jul 2023 09:09:28 -0400 Subject: [PATCH 527/611] chore: Update gapic-generator-python to v1.11.2 (#455) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.11.2 PiperOrigin-RevId: 546510849 Source-Link: https://github.com/googleapis/googleapis/commit/736073ad9a9763a170eceaaa54519bcc0ea55a5e Source-Link: https://github.com/googleapis/googleapis-gen/commit/deb64e8ec19d141e31089fe932b3a997ad541c4d Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZGViNjRlOGVjMTlkMTQxZTMxMDg5ZmU5MzJiM2E5OTdhZDU0MWM0ZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google/cloud/datastore_admin/__init__.py | 2 +- .../google/cloud/datastore_admin_v1/__init__.py | 2 +- .../google/cloud/datastore_admin_v1/services/__init__.py | 2 +- .../datastore_admin_v1/services/datastore_admin/__init__.py | 2 +- .../datastore_admin_v1/services/datastore_admin/async_client.py | 2 +- .../cloud/datastore_admin_v1/services/datastore_admin/client.py | 2 +- .../cloud/datastore_admin_v1/services/datastore_admin/pagers.py | 2 +- .../services/datastore_admin/transports/__init__.py | 2 +- .../services/datastore_admin/transports/base.py | 2 +- .../services/datastore_admin/transports/grpc.py | 2 +- .../services/datastore_admin/transports/grpc_asyncio.py | 2 +- .../services/datastore_admin/transports/rest.py | 2 +- .../google/cloud/datastore_admin_v1/types/__init__.py | 2 +- .../google/cloud/datastore_admin_v1/types/datastore_admin.py | 2 +- .../google/cloud/datastore_admin_v1/types/index.py | 2 +- .../google/cloud/datastore_admin_v1/types/migration.py | 2 +- .../google/cloud/datastore_v1/__init__.py | 2 +- .../google/cloud/datastore_v1/services/__init__.py | 2 +- .../google/cloud/datastore_v1/services/datastore/__init__.py | 2 +- .../cloud/datastore_v1/services/datastore/async_client.py | 2 +- .../google/cloud/datastore_v1/services/datastore/client.py | 2 +- .../datastore_v1/services/datastore/transports/__init__.py | 2 +- .../cloud/datastore_v1/services/datastore/transports/base.py | 2 +- .../cloud/datastore_v1/services/datastore/transports/grpc.py | 2 +- .../datastore_v1/services/datastore/transports/grpc_asyncio.py | 2 +- .../cloud/datastore_v1/services/datastore/transports/rest.py | 2 +- .../google/cloud/datastore_v1/types/__init__.py | 2 +- .../google/cloud/datastore_v1/types/aggregation_result.py | 2 +- .../google/cloud/datastore_v1/types/datastore.py | 2 +- .../google/cloud/datastore_v1/types/entity.py | 2 +- .../google/cloud/datastore_v1/types/query.py | 2 +- .../scripts/fixup_datastore_admin_v1_keywords.py | 2 +- .../scripts/fixup_datastore_v1_keywords.py | 2 +- packages/google-cloud-datastore/tests/__init__.py | 2 +- packages/google-cloud-datastore/tests/unit/__init__.py | 2 +- packages/google-cloud-datastore/tests/unit/gapic/__init__.py | 2 +- .../tests/unit/gapic/datastore_admin_v1/__init__.py | 2 +- .../tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py | 2 +- .../tests/unit/gapic/datastore_v1/__init__.py | 2 +- .../tests/unit/gapic/datastore_v1/test_datastore.py | 2 +- 40 files changed, 40 insertions(+), 40 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/__init__.py index 6e5cd0cd98b8..17ff436e52f9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py index d2b8b534c66b..b909402ef504 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py index e8e1c3845db5..89a37dc92c5a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py index 6e5bb3d16f6b..bbf2af15324b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 818fb6bff91d..2dd8c9880087 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index f160915dfb10..9d2ee237d7f3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py index 9a2d05ad7796..139e1983967f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py index e31fda239e8d..d1930caaaf71 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index bd0b94b4aef8..83b6167e56f4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index ce244dbcad26..79815b25b820 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index 1519b53f7f3b..3f1d638e5940 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py index 41aea81197e9..eb0f59ef9e66 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py index f194f3cf3208..7ba786ca6db5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py index 8b40de02c6bf..103d2563c3e6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py index 7966ab118517..c70a13dbe5cb 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py index 773c4238fb59..12e933592df4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py index 05159e6877c8..b494905bdb04 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py index e8e1c3845db5..89a37dc92c5a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py index 66d6560d859a..a93070e267de 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index 4e0bf5352b9f..1fcbf88f1bc8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index 480da31c83c9..2bb647a69f0c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py index 201364063c0f..911cdd46a9cc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py index 1e7a3c9416bb..54cbd8c473b9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py index 622f217cea98..27e578081af4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index 08b1e5f27b78..4baefa382ff0 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py index e1bced6ef0c2..a5086d560016 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py index ca7dae214ce3..b6ff2a444e92 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py index cf8e8339abf7..8bcb83711af1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index 2512ad890890..b04b73c51a75 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index 9fd055b73ec3..4ab5bc6d9892 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index afa05d19b82b..5fb51d46693a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py index 49b96026dac4..44ae7ecb85c3 100644 --- a/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py index 77b0527b8a12..4a8be1c96b8b 100644 --- a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/__init__.py b/packages/google-cloud-datastore/tests/__init__.py index e8e1c3845db5..89a37dc92c5a 100644 --- a/packages/google-cloud-datastore/tests/__init__.py +++ b/packages/google-cloud-datastore/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/__init__.py b/packages/google-cloud-datastore/tests/unit/__init__.py index e8e1c3845db5..89a37dc92c5a 100644 --- a/packages/google-cloud-datastore/tests/unit/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/__init__.py b/packages/google-cloud-datastore/tests/unit/gapic/__init__.py index e8e1c3845db5..89a37dc92c5a 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py index e8e1c3845db5..89a37dc92c5a 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index 47d3153ec62f..557238a07e57 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py index e8e1c3845db5..89a37dc92c5a 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 2a89f4e4ac60..9d99308cfa81 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 5718ced9a277889be9cb2d47bdfdf7b0fe8e821d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 17 Jul 2023 11:55:20 -0400 Subject: [PATCH 528/611] build(deps): [autoapprove] bump cryptography from 41.0.0 to 41.0.2 (#458) Source-Link: https://github.com/googleapis/synthtool/commit/d6103f4a3540ba60f633a9e25c37ec5fe7e6286d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:39f0f3f2be02ef036e297e376fe3b6256775576da8a6ccb1d5eeb80f4c8bf8fb Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.flake8 | 2 +- .../.github/.OwlBot.lock.yaml | 4 +- .../.github/auto-label.yaml | 2 +- .../google-cloud-datastore/.kokoro/build.sh | 2 +- .../.kokoro/docker/docs/Dockerfile | 2 +- .../.kokoro/populate-secrets.sh | 2 +- .../.kokoro/publish-docs.sh | 2 +- .../google-cloud-datastore/.kokoro/release.sh | 2 +- .../.kokoro/requirements.txt | 44 ++++++++++--------- .../.kokoro/test-samples-against-head.sh | 2 +- .../.kokoro/test-samples-impl.sh | 2 +- .../.kokoro/test-samples.sh | 2 +- .../.kokoro/trampoline.sh | 2 +- .../.kokoro/trampoline_v2.sh | 2 +- .../.pre-commit-config.yaml | 2 +- packages/google-cloud-datastore/.trampolinerc | 4 +- packages/google-cloud-datastore/MANIFEST.in | 2 +- packages/google-cloud-datastore/README.rst | 27 ++++++------ packages/google-cloud-datastore/docs/conf.py | 2 +- packages/google-cloud-datastore/noxfile.py | 3 +- .../scripts/decrypt-secrets.sh | 2 +- .../scripts/readme-gen/readme_gen.py | 18 ++++---- packages/google-cloud-datastore/setup.cfg | 2 +- 23 files changed, 69 insertions(+), 65 deletions(-) diff --git a/packages/google-cloud-datastore/.flake8 b/packages/google-cloud-datastore/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-datastore/.flake8 +++ b/packages/google-cloud-datastore/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 98994f474104..ae4a522b9e5f 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd -# created: 2023-06-28T17:03:33.371210701Z + digest: sha256:39f0f3f2be02ef036e297e376fe3b6256775576da8a6ccb1d5eeb80f4c8bf8fb +# created: 2023-07-17T15:20:13.819193964Z diff --git a/packages/google-cloud-datastore/.github/auto-label.yaml b/packages/google-cloud-datastore/.github/auto-label.yaml index 41bff0b5375a..b2016d119b40 100644 --- a/packages/google-cloud-datastore/.github/auto-label.yaml +++ b/packages/google-cloud-datastore/.github/auto-label.yaml @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/build.sh b/packages/google-cloud-datastore/.kokoro/build.sh index 3ca0a226f8cc..3795bde04965 100755 --- a/packages/google-cloud-datastore/.kokoro/build.sh +++ b/packages/google-cloud-datastore/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile index f8137d0ae497..8e39a2cc438d 100644 --- a/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/populate-secrets.sh b/packages/google-cloud-datastore/.kokoro/populate-secrets.sh index f52514257ef0..6f3972140e80 100755 --- a/packages/google-cloud-datastore/.kokoro/populate-secrets.sh +++ b/packages/google-cloud-datastore/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC. +# Copyright 2023 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/publish-docs.sh b/packages/google-cloud-datastore/.kokoro/publish-docs.sh index 1c4d62370042..9eafe0be3bba 100755 --- a/packages/google-cloud-datastore/.kokoro/publish-docs.sh +++ b/packages/google-cloud-datastore/.kokoro/publish-docs.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/release.sh b/packages/google-cloud-datastore/.kokoro/release.sh index 0235f8de618f..c236e3cf0913 100755 --- a/packages/google-cloud-datastore/.kokoro/release.sh +++ b/packages/google-cloud-datastore/.kokoro/release.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index c7929db6d152..67d70a110897 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -113,26 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.0 \ - --hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \ - --hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \ - --hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \ - --hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \ - --hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \ - --hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \ - --hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \ - --hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \ - --hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \ - --hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \ - --hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \ - --hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \ - --hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \ - --hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \ - --hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \ - --hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \ - --hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \ - --hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \ - --hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be +cryptography==41.0.2 \ + --hash=sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711 \ + --hash=sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7 \ + --hash=sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd \ + --hash=sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e \ + --hash=sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58 \ + --hash=sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0 \ + --hash=sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d \ + --hash=sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83 \ + --hash=sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831 \ + --hash=sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766 \ + --hash=sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b \ + --hash=sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c \ + --hash=sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182 \ + --hash=sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f \ + --hash=sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa \ + --hash=sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4 \ + --hash=sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a \ + --hash=sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2 \ + --hash=sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76 \ + --hash=sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5 \ + --hash=sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee \ + --hash=sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f \ + --hash=sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14 # via # gcp-releasetool # secretstorage diff --git a/packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh b/packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh index ba3a707b040c..63ac41dfae1d 100755 --- a/packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh +++ b/packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh b/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh index 2c6500cae0b9..5a0f5fab6a89 100755 --- a/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh +++ b/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/test-samples.sh b/packages/google-cloud-datastore/.kokoro/test-samples.sh index 11c042d342d7..50b35a48c190 100755 --- a/packages/google-cloud-datastore/.kokoro/test-samples.sh +++ b/packages/google-cloud-datastore/.kokoro/test-samples.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/trampoline.sh b/packages/google-cloud-datastore/.kokoro/trampoline.sh index f39236e943a8..d85b1f267693 100755 --- a/packages/google-cloud-datastore/.kokoro/trampoline.sh +++ b/packages/google-cloud-datastore/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2017 Google Inc. +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh b/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh index 4af6cdc26dbc..59a7cf3a9373 100755 --- a/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh +++ b/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.pre-commit-config.yaml b/packages/google-cloud-datastore/.pre-commit-config.yaml index 5405cc8ff1f3..9e3898fd1c12 100644 --- a/packages/google-cloud-datastore/.pre-commit-config.yaml +++ b/packages/google-cloud-datastore/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.trampolinerc b/packages/google-cloud-datastore/.trampolinerc index 0eee72ab62aa..a7dfeb42c6d0 100644 --- a/packages/google-cloud-datastore/.trampolinerc +++ b/packages/google-cloud-datastore/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Template for .trampolinerc - # Add required env vars here. required_envvars+=( ) diff --git a/packages/google-cloud-datastore/MANIFEST.in b/packages/google-cloud-datastore/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-datastore/MANIFEST.in +++ b/packages/google-cloud-datastore/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index 392bfd8c0927..0d5b7f269581 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -40,21 +40,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/python-datastore/tree/main/samples Supported Python Versions @@ -81,10 +84,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-datastore + pip install google-cloud-datastore Windows @@ -92,10 +94,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-datastore + py -m venv + .\\Scripts\activate + pip install google-cloud-datastore Next Steps ~~~~~~~~~~ diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index febe857a9da4..3e814f57e6f1 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index c4ee430d6f22..953704325f88 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -429,6 +429,7 @@ def prerelease_deps(session): "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" ) session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-datastore/scripts/decrypt-secrets.sh b/packages/google-cloud-datastore/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-datastore/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-datastore/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/scripts/readme-gen/readme_gen.py b/packages/google-cloud-datastore/scripts/readme-gen/readme_gen.py index 91b59676bfc7..1acc119835b5 100644 --- a/packages/google-cloud-datastore/scripts/readme-gen/readme_gen.py +++ b/packages/google-cloud-datastore/scripts/readme-gen/readme_gen.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2016 Google Inc +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -33,17 +33,17 @@ autoescape=True, ) -README_TMPL = jinja_env.get_template('README.tmpl.rst') +README_TMPL = jinja_env.get_template("README.tmpl.rst") def get_help(file): - return subprocess.check_output(['python', file, '--help']).decode() + return subprocess.check_output(["python", file, "--help"]).decode() def main(): parser = argparse.ArgumentParser() - parser.add_argument('source') - parser.add_argument('--destination', default='README.rst') + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") args = parser.parse_args() @@ -51,9 +51,9 @@ def main(): root = os.path.dirname(source) destination = os.path.join(root, args.destination) - jinja_env.globals['get_help'] = get_help + jinja_env.globals["get_help"] = get_help - with io.open(source, 'r') as f: + with io.open(source, "r") as f: config = yaml.load(f) # This allows get_help to execute in the right directory. @@ -61,9 +61,9 @@ def main(): output = README_TMPL.render(config) - with io.open(destination, 'w') as f: + with io.open(destination, "w") as f: f.write(output) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/packages/google-cloud-datastore/setup.cfg b/packages/google-cloud-datastore/setup.cfg index c3a2b39f6528..052350089505 100644 --- a/packages/google-cloud-datastore/setup.cfg +++ b/packages/google-cloud-datastore/setup.cfg @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From ec671aa71b654075c38acf5cd2c972bda1f6de99 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 21 Jul 2023 09:31:44 -0400 Subject: [PATCH 529/611] build(deps): [autoapprove] bump pygments from 2.13.0 to 2.15.0 (#461) Source-Link: https://github.com/googleapis/synthtool/commit/eaef28efd179e6eeb9f4e9bf697530d074a6f3b9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f8ca7655fa8a449cadcabcbce4054f593dcbae7aeeab34aa3fcc8b5cf7a93c9e Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-datastore/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index ae4a522b9e5f..17c21d96d654 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:39f0f3f2be02ef036e297e376fe3b6256775576da8a6ccb1d5eeb80f4c8bf8fb -# created: 2023-07-17T15:20:13.819193964Z + digest: sha256:f8ca7655fa8a449cadcabcbce4054f593dcbae7aeeab34aa3fcc8b5cf7a93c9e +# created: 2023-07-21T02:12:46.49799314Z diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index 67d70a110897..b563eb284459 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -396,9 +396,9 @@ pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi -pygments==2.13.0 \ - --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \ - --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42 +pygments==2.15.0 \ + --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \ + --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500 # via # readme-renderer # rich From 212934629f29ace82882b26f864428822d2d1c56 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 26 Jul 2023 07:03:42 -0400 Subject: [PATCH 530/611] build(deps): [autoapprove] bump certifi from 2022.12.7 to 2023.7.22 (#463) Source-Link: https://github.com/googleapis/synthtool/commit/395d53adeeacfca00b73abf197f65f3c17c8f1e9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:6c1cbc75c74b8bdd71dada2fa1677e9d6d78a889e9a70ee75b93d1d0543f96e1 Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-datastore/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 17c21d96d654..0ddd0e4d1873 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f8ca7655fa8a449cadcabcbce4054f593dcbae7aeeab34aa3fcc8b5cf7a93c9e -# created: 2023-07-21T02:12:46.49799314Z + digest: sha256:6c1cbc75c74b8bdd71dada2fa1677e9d6d78a889e9a70ee75b93d1d0543f96e1 +# created: 2023-07-25T21:01:10.396410762Z diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index b563eb284459..76d9bba0f7d0 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.12.7 \ - --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ - --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 +certifi==2023.7.22 \ + --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ + --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ From 69f8dd0b7acc968cf104d6aa65ee431dc35ea7de Mon Sep 17 00:00:00 2001 From: "Bob \"Wombat\" Hogg" Date: Tue, 1 Aug 2023 13:54:54 -0400 Subject: [PATCH 531/611] chore: Fix linter errors (use isinstance instead of type) (#464) --- .../google-cloud-datastore/google/cloud/datastore/client.py | 2 +- packages/google-cloud-datastore/tests/unit/test_aggregation.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index fe25a0e05d77..3f5041d622f5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -334,7 +334,7 @@ def __init__( else: api_endpoint = _DATASTORE_BASE_URL if client_options: - if type(client_options) == dict: + if isinstance(client_options, dict): client_options = google.api_core.client_options.from_dict( client_options ) diff --git a/packages/google-cloud-datastore/tests/unit/test_aggregation.py b/packages/google-cloud-datastore/tests/unit/test_aggregation.py index ebfa9a3f67c3..fd72ad0a0eb5 100644 --- a/packages/google-cloud-datastore/tests/unit/test_aggregation.py +++ b/packages/google-cloud-datastore/tests/unit/test_aggregation.py @@ -420,7 +420,7 @@ def test__item_to_aggregation_result(): result = _item_to_aggregation_result(None, map_composite_mock) assert len(result) == 1 - assert type(result[0]) == AggregationResult + assert isinstance(result[0], AggregationResult) assert result[0].alias == "total" assert result[0].value == map_composite_mock.__getitem__().integer_value From f0839b7af2fd8d3f10818c4b1354634d988588af Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 3 Aug 2023 10:38:01 -0400 Subject: [PATCH 532/611] build: [autoapprove] bump cryptography from 41.0.2 to 41.0.3 (#467) Source-Link: https://github.com/googleapis/synthtool/commit/352b9d4c068ce7c05908172af128b294073bf53c Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 48 +++++++++---------- .../.pre-commit-config.yaml | 2 +- packages/google-cloud-datastore/noxfile.py | 3 +- 4 files changed, 29 insertions(+), 28 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 0ddd0e4d1873..a3da1b0d4cd3 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:6c1cbc75c74b8bdd71dada2fa1677e9d6d78a889e9a70ee75b93d1d0543f96e1 -# created: 2023-07-25T21:01:10.396410762Z + digest: sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 +# created: 2023-08-02T10:53:29.114535628Z diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index 76d9bba0f7d0..029bd342de94 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -113,30 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.2 \ - --hash=sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711 \ - --hash=sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7 \ - --hash=sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd \ - --hash=sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e \ - --hash=sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58 \ - --hash=sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0 \ - --hash=sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d \ - --hash=sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83 \ - --hash=sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831 \ - --hash=sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766 \ - --hash=sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b \ - --hash=sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c \ - --hash=sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182 \ - --hash=sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f \ - --hash=sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa \ - --hash=sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4 \ - --hash=sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a \ - --hash=sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2 \ - --hash=sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76 \ - --hash=sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5 \ - --hash=sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee \ - --hash=sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f \ - --hash=sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14 +cryptography==41.0.3 \ + --hash=sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306 \ + --hash=sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84 \ + --hash=sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47 \ + --hash=sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d \ + --hash=sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116 \ + --hash=sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207 \ + --hash=sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81 \ + --hash=sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087 \ + --hash=sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd \ + --hash=sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507 \ + --hash=sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858 \ + --hash=sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae \ + --hash=sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34 \ + --hash=sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906 \ + --hash=sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd \ + --hash=sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922 \ + --hash=sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7 \ + --hash=sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4 \ + --hash=sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574 \ + --hash=sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1 \ + --hash=sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c \ + --hash=sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e \ + --hash=sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de # via # gcp-releasetool # secretstorage diff --git a/packages/google-cloud-datastore/.pre-commit-config.yaml b/packages/google-cloud-datastore/.pre-commit-config.yaml index 9e3898fd1c12..19409cbd37a4 100644 --- a/packages/google-cloud-datastore/.pre-commit-config.yaml +++ b/packages/google-cloud-datastore/.pre-commit-config.yaml @@ -26,6 +26,6 @@ repos: hooks: - id: black - repo: https://github.com/pycqa/flake8 - rev: 3.9.2 + rev: 6.1.0 hooks: - id: flake8 diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 953704325f88..99df26d9ac61 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -25,6 +25,7 @@ import nox +FLAKE8_VERSION = "flake8==6.1.0" BLACK_VERSION = "black==22.3.0" ISORT_VERSION = "isort==5.10.1" LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] @@ -83,7 +84,7 @@ def lint(session): Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ - session.install("flake8", BLACK_VERSION) + session.install(FLAKE8_VERSION, BLACK_VERSION) session.run( "black", "--check", From 06cb0897930e0360d3388a014a873265a495c782 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 9 Aug 2023 06:38:23 -0400 Subject: [PATCH 533/611] feat: publish proto definitions for SUM/AVG in Datastore (#456) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.11.3 PiperOrigin-RevId: 546899192 Source-Link: https://github.com/googleapis/googleapis/commit/e6b16918b98fe1a35f725b56537354f22b6cdc48 Source-Link: https://github.com/googleapis/googleapis-gen/commit/0b3917c421cbda7fcb67092e16c33f3ea46f4bc7 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMGIzOTE3YzQyMWNiZGE3ZmNiNjcwOTJlMTZjMzNmM2VhNDZmNGJjNyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore(python): Add `warehouse-package-name` for google/datastore/admin/v1 PiperOrigin-RevId: 551158666 Source-Link: https://github.com/googleapis/googleapis/commit/5096eae6a449e0ee66b68cfd2634cac97b682e88 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a0e6a6ca362c8521ab301408db389dd8235ca073 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTBlNmE2Y2EzNjJjODUyMWFiMzAxNDA4ZGIzODlkZDgyMzVjYTA3MyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * docs: Specify limit for `properties` in `Index` message in Datastore Admin API docs: Minor formatting in Datastore Admin API PiperOrigin-RevId: 551819875 Source-Link: https://github.com/googleapis/googleapis/commit/3db8b015f79d0d2b495c40ad6f0320e9d88c5275 Source-Link: https://github.com/googleapis/googleapis-gen/commit/7ebf1137639b93613e6a6206654a5ba8f21e9321 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2ViZjExMzc2MzliOTM2MTNlNmE2MjA2NjU0YTViYThmMjFlOTMyMSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: publish proto definitions for SUM/AVG in Datastore PiperOrigin-RevId: 552847139 Source-Link: https://github.com/googleapis/googleapis/commit/6148e5b6f91abc8d2f6e246e9608bf92ffb408bd Source-Link: https://github.com/googleapis/googleapis-gen/commit/96be51b995047bfdef4825178539e238583101a9 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOTZiZTUxYjk5NTA0N2JmZGVmNDgyNTE3ODUzOWUyMzg1ODMxMDFhOSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * apply docs fix from https://github.com/googleapis/gapic-generator-python/pull/1721 --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google/cloud/datastore_admin/py.typed | 2 +- .../google/cloud/datastore_admin_v1/py.typed | 2 +- .../services/datastore_admin/async_client.py | 89 ++++++++--------- .../services/datastore_admin/client.py | 88 ++++++++--------- .../datastore_admin/transports/grpc.py | 89 ++++++++--------- .../transports/grpc_asyncio.py | 89 ++++++++--------- .../datastore_admin/transports/rest.py | 89 ++++++++--------- .../cloud/datastore_admin_v1/types/index.py | 8 +- .../cloud/datastore_v1/types/datastore.py | 14 ++- .../google/cloud/datastore_v1/types/entity.py | 4 +- .../google/cloud/datastore_v1/types/query.py | 97 +++++++++++++++++-- 11 files changed, 317 insertions(+), 254 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/py.typed b/packages/google-cloud-datastore/google/cloud/datastore_admin/py.typed index dc48a544f252..e82a93198619 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/py.typed +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/py.typed @@ -1,2 +1,2 @@ # Marker file for PEP 561. -# The google-cloud-datastore-admin package uses inline types. +# The google-cloud-datastore package uses inline types. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/py.typed b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/py.typed index dc48a544f252..e82a93198619 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/py.typed +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/py.typed @@ -1,2 +1,2 @@ # Marker file for PEP 561. -# The google-cloud-datastore-admin package uses inline types. +# The google-cloud-datastore package uses inline types. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 2dd8c9880087..fb112508a9e4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -58,11 +58,8 @@ class DatastoreAdminAsyncClient: """Google Cloud Datastore Admin API The Datastore Admin API provides several admin services for Cloud Datastore. - ----------------------------------------------------------------------------- - ## Concepts - - Project, namespace, kind, and entity as defined in the Google - Cloud Datastore API. + Concepts: Project, namespace, kind, and entity as defined in the + Google Cloud Datastore API. Operation: An Operation represents work being performed in the background. @@ -70,49 +67,45 @@ class DatastoreAdminAsyncClient: project. This is specified as a combination of kinds and namespaces (either or both of which may be all). - ----------------------------------------------------------------------------- - ## Services - - # Export/Import - - The Export/Import service provides the ability to copy all or a - subset of entities to/from Google Cloud Storage. - - Exported data may be imported into Cloud Datastore for any - Google Cloud Platform project. It is not restricted to the - export source project. It is possible to export from one project - and then import into another. - Exported data can also be loaded into Google BigQuery for - analysis. - Exports and imports are performed asynchronously. An Operation - resource is created for each export/import. The state (including - any errors encountered) of the export/import may be queried via - the Operation resource. - # Index - - The index service manages Cloud Datastore composite indexes. - Index creation and deletion are performed asynchronously. An - Operation resource is created for each such asynchronous - operation. The state of the operation (including any errors - encountered) may be queried via the Operation resource. - - # Operation - - The Operations collection provides a record of actions performed - for the specified project (including any operations in - progress). Operations are not created directly but through calls - on other collections or resources. - An operation that is not yet done may be cancelled. The request - to cancel is asynchronous and the operation may continue to run - for some time after the request to cancel is made. - - An operation that is done may be deleted so that it is no longer - listed as part of the Operation collection. - - ListOperations returns all pending operations, but not completed - operations. - Operations are created by service DatastoreAdmin, - but are accessed via service google.longrunning.Operations. + Export/Import Service: + + - The Export/Import service provides the ability to copy all or + a subset of entities to/from Google Cloud Storage. + - Exported data may be imported into Cloud Datastore for any + Google Cloud Platform project. It is not restricted to the + export source project. It is possible to export from one + project and then import into another. + - Exported data can also be loaded into Google BigQuery for + analysis. + - Exports and imports are performed asynchronously. An Operation + resource is created for each export/import. The state + (including any errors encountered) of the export/import may be + queried via the Operation resource. + + Index Service: + + - The index service manages Cloud Datastore composite indexes. + - Index creation and deletion are performed asynchronously. An + Operation resource is created for each such asynchronous + operation. The state of the operation (including any errors + encountered) may be queried via the Operation resource. + + Operation Service: + + - The Operations collection provides a record of actions + performed for the specified project (including any operations + in progress). Operations are not created directly but through + calls on other collections or resources. + - An operation that is not yet done may be cancelled. The + request to cancel is asynchronous and the operation may + continue to run for some time after the request to cancel is + made. + - An operation that is done may be deleted so that it is no + longer listed as part of the Operation collection. + - ListOperations returns all pending operations, but not + completed operations. + - Operations are created by service DatastoreAdmin, but are + accessed via service google.longrunning.Operations. """ _client: DatastoreAdminClient diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index 9d2ee237d7f3..fabdcecf8593 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -100,58 +100,54 @@ class DatastoreAdminClient(metaclass=DatastoreAdminClientMeta): """Google Cloud Datastore Admin API The Datastore Admin API provides several admin services for Cloud Datastore. - ## Concepts - - Project, namespace, kind, and entity as defined in the Google - Cloud Datastore API. + Concepts: Project, namespace, kind, and entity as defined in the + Google Cloud Datastore API. Operation: An Operation represents work being performed in the background. EntityFilter: Allows specifying a subset of entities in a project. This is specified as a combination of kinds and namespaces (either or both of which may be all). - ## Services - - # Export/Import - - The Export/Import service provides the ability to copy all or a - subset of entities to/from Google Cloud Storage. - - Exported data may be imported into Cloud Datastore for any - Google Cloud Platform project. It is not restricted to the - export source project. It is possible to export from one project - and then import into another. - Exported data can also be loaded into Google BigQuery for - analysis. - Exports and imports are performed asynchronously. An Operation - resource is created for each export/import. The state (including - any errors encountered) of the export/import may be queried via - the Operation resource. - # Index - - The index service manages Cloud Datastore composite indexes. - Index creation and deletion are performed asynchronously. An - Operation resource is created for each such asynchronous - operation. The state of the operation (including any errors - encountered) may be queried via the Operation resource. - - # Operation - - The Operations collection provides a record of actions performed - for the specified project (including any operations in - progress). Operations are not created directly but through calls - on other collections or resources. - An operation that is not yet done may be cancelled. The request - to cancel is asynchronous and the operation may continue to run - for some time after the request to cancel is made. - - An operation that is done may be deleted so that it is no longer - listed as part of the Operation collection. - - ListOperations returns all pending operations, but not completed - operations. - Operations are created by service DatastoreAdmin, - but are accessed via service google.longrunning.Operations. + + Export/Import Service: + + - The Export/Import service provides the ability to copy all or + a subset of entities to/from Google Cloud Storage. + - Exported data may be imported into Cloud Datastore for any + Google Cloud Platform project. It is not restricted to the + export source project. It is possible to export from one + project and then import into another. + - Exported data can also be loaded into Google BigQuery for + analysis. + - Exports and imports are performed asynchronously. An Operation + resource is created for each export/import. The state + (including any errors encountered) of the export/import may be + queried via the Operation resource. + + Index Service: + + - The index service manages Cloud Datastore composite indexes. + - Index creation and deletion are performed asynchronously. An + Operation resource is created for each such asynchronous + operation. The state of the operation (including any errors + encountered) may be queried via the Operation resource. + + Operation Service: + + - The Operations collection provides a record of actions + performed for the specified project (including any operations + in progress). Operations are not created directly but through + calls on other collections or resources. + - An operation that is not yet done may be cancelled. The + request to cancel is asynchronous and the operation may + continue to run for some time after the request to cancel is + made. + - An operation that is done may be deleted so that it is no + longer listed as part of the Operation collection. + - ListOperations returns all pending operations, but not + completed operations. + - Operations are created by service DatastoreAdmin, but are + accessed via service google.longrunning.Operations. """ @staticmethod diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index 79815b25b820..24ca17c761cb 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -38,11 +38,8 @@ class DatastoreAdminGrpcTransport(DatastoreAdminTransport): Google Cloud Datastore Admin API The Datastore Admin API provides several admin services for Cloud Datastore. - ----------------------------------------------------------------------------- - ## Concepts - - Project, namespace, kind, and entity as defined in the Google - Cloud Datastore API. + Concepts: Project, namespace, kind, and entity as defined in the + Google Cloud Datastore API. Operation: An Operation represents work being performed in the background. @@ -50,49 +47,45 @@ class DatastoreAdminGrpcTransport(DatastoreAdminTransport): project. This is specified as a combination of kinds and namespaces (either or both of which may be all). - ----------------------------------------------------------------------------- - ## Services - - # Export/Import - - The Export/Import service provides the ability to copy all or a - subset of entities to/from Google Cloud Storage. - - Exported data may be imported into Cloud Datastore for any - Google Cloud Platform project. It is not restricted to the - export source project. It is possible to export from one project - and then import into another. - Exported data can also be loaded into Google BigQuery for - analysis. - Exports and imports are performed asynchronously. An Operation - resource is created for each export/import. The state (including - any errors encountered) of the export/import may be queried via - the Operation resource. - # Index - - The index service manages Cloud Datastore composite indexes. - Index creation and deletion are performed asynchronously. An - Operation resource is created for each such asynchronous - operation. The state of the operation (including any errors - encountered) may be queried via the Operation resource. - - # Operation - - The Operations collection provides a record of actions performed - for the specified project (including any operations in - progress). Operations are not created directly but through calls - on other collections or resources. - An operation that is not yet done may be cancelled. The request - to cancel is asynchronous and the operation may continue to run - for some time after the request to cancel is made. - - An operation that is done may be deleted so that it is no longer - listed as part of the Operation collection. - - ListOperations returns all pending operations, but not completed - operations. - Operations are created by service DatastoreAdmin, - but are accessed via service google.longrunning.Operations. + Export/Import Service: + + - The Export/Import service provides the ability to copy all or + a subset of entities to/from Google Cloud Storage. + - Exported data may be imported into Cloud Datastore for any + Google Cloud Platform project. It is not restricted to the + export source project. It is possible to export from one + project and then import into another. + - Exported data can also be loaded into Google BigQuery for + analysis. + - Exports and imports are performed asynchronously. An Operation + resource is created for each export/import. The state + (including any errors encountered) of the export/import may be + queried via the Operation resource. + + Index Service: + + - The index service manages Cloud Datastore composite indexes. + - Index creation and deletion are performed asynchronously. An + Operation resource is created for each such asynchronous + operation. The state of the operation (including any errors + encountered) may be queried via the Operation resource. + + Operation Service: + + - The Operations collection provides a record of actions + performed for the specified project (including any operations + in progress). Operations are not created directly but through + calls on other collections or resources. + - An operation that is not yet done may be cancelled. The + request to cancel is asynchronous and the operation may + continue to run for some time after the request to cancel is + made. + - An operation that is done may be deleted so that it is no + longer listed as part of the Operation collection. + - ListOperations returns all pending operations, but not + completed operations. + - Operations are created by service DatastoreAdmin, but are + accessed via service google.longrunning.Operations. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index 3f1d638e5940..9c37e0745e7a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -39,11 +39,8 @@ class DatastoreAdminGrpcAsyncIOTransport(DatastoreAdminTransport): Google Cloud Datastore Admin API The Datastore Admin API provides several admin services for Cloud Datastore. - ----------------------------------------------------------------------------- - ## Concepts - - Project, namespace, kind, and entity as defined in the Google - Cloud Datastore API. + Concepts: Project, namespace, kind, and entity as defined in the + Google Cloud Datastore API. Operation: An Operation represents work being performed in the background. @@ -51,49 +48,45 @@ class DatastoreAdminGrpcAsyncIOTransport(DatastoreAdminTransport): project. This is specified as a combination of kinds and namespaces (either or both of which may be all). - ----------------------------------------------------------------------------- - ## Services - - # Export/Import - - The Export/Import service provides the ability to copy all or a - subset of entities to/from Google Cloud Storage. - - Exported data may be imported into Cloud Datastore for any - Google Cloud Platform project. It is not restricted to the - export source project. It is possible to export from one project - and then import into another. - Exported data can also be loaded into Google BigQuery for - analysis. - Exports and imports are performed asynchronously. An Operation - resource is created for each export/import. The state (including - any errors encountered) of the export/import may be queried via - the Operation resource. - # Index - - The index service manages Cloud Datastore composite indexes. - Index creation and deletion are performed asynchronously. An - Operation resource is created for each such asynchronous - operation. The state of the operation (including any errors - encountered) may be queried via the Operation resource. - - # Operation - - The Operations collection provides a record of actions performed - for the specified project (including any operations in - progress). Operations are not created directly but through calls - on other collections or resources. - An operation that is not yet done may be cancelled. The request - to cancel is asynchronous and the operation may continue to run - for some time after the request to cancel is made. - - An operation that is done may be deleted so that it is no longer - listed as part of the Operation collection. - - ListOperations returns all pending operations, but not completed - operations. - Operations are created by service DatastoreAdmin, - but are accessed via service google.longrunning.Operations. + Export/Import Service: + + - The Export/Import service provides the ability to copy all or + a subset of entities to/from Google Cloud Storage. + - Exported data may be imported into Cloud Datastore for any + Google Cloud Platform project. It is not restricted to the + export source project. It is possible to export from one + project and then import into another. + - Exported data can also be loaded into Google BigQuery for + analysis. + - Exports and imports are performed asynchronously. An Operation + resource is created for each export/import. The state + (including any errors encountered) of the export/import may be + queried via the Operation resource. + + Index Service: + + - The index service manages Cloud Datastore composite indexes. + - Index creation and deletion are performed asynchronously. An + Operation resource is created for each such asynchronous + operation. The state of the operation (including any errors + encountered) may be queried via the Operation resource. + + Operation Service: + + - The Operations collection provides a record of actions + performed for the specified project (including any operations + in progress). Operations are not created directly but through + calls on other collections or resources. + - An operation that is not yet done may be cancelled. The + request to cancel is asynchronous and the operation may + continue to run for some time after the request to cancel is + made. + - An operation that is done may be deleted so that it is no + longer listed as part of the Operation collection. + - ListOperations returns all pending operations, but not + completed operations. + - Operations are created by service DatastoreAdmin, but are + accessed via service google.longrunning.Operations. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py index eb0f59ef9e66..ee7f7dc0fed6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py @@ -365,11 +365,8 @@ class DatastoreAdminRestTransport(DatastoreAdminTransport): Google Cloud Datastore Admin API The Datastore Admin API provides several admin services for Cloud Datastore. - ----------------------------------------------------------------------------- - ## Concepts - - Project, namespace, kind, and entity as defined in the Google - Cloud Datastore API. + Concepts: Project, namespace, kind, and entity as defined in the + Google Cloud Datastore API. Operation: An Operation represents work being performed in the background. @@ -377,49 +374,45 @@ class DatastoreAdminRestTransport(DatastoreAdminTransport): project. This is specified as a combination of kinds and namespaces (either or both of which may be all). - ----------------------------------------------------------------------------- - ## Services - - # Export/Import - - The Export/Import service provides the ability to copy all or a - subset of entities to/from Google Cloud Storage. - - Exported data may be imported into Cloud Datastore for any - Google Cloud Platform project. It is not restricted to the - export source project. It is possible to export from one project - and then import into another. - Exported data can also be loaded into Google BigQuery for - analysis. - Exports and imports are performed asynchronously. An Operation - resource is created for each export/import. The state (including - any errors encountered) of the export/import may be queried via - the Operation resource. - # Index - - The index service manages Cloud Datastore composite indexes. - Index creation and deletion are performed asynchronously. An - Operation resource is created for each such asynchronous - operation. The state of the operation (including any errors - encountered) may be queried via the Operation resource. - - # Operation - - The Operations collection provides a record of actions performed - for the specified project (including any operations in - progress). Operations are not created directly but through calls - on other collections or resources. - An operation that is not yet done may be cancelled. The request - to cancel is asynchronous and the operation may continue to run - for some time after the request to cancel is made. - - An operation that is done may be deleted so that it is no longer - listed as part of the Operation collection. - - ListOperations returns all pending operations, but not completed - operations. - Operations are created by service DatastoreAdmin, - but are accessed via service google.longrunning.Operations. + Export/Import Service: + + - The Export/Import service provides the ability to copy all or + a subset of entities to/from Google Cloud Storage. + - Exported data may be imported into Cloud Datastore for any + Google Cloud Platform project. It is not restricted to the + export source project. It is possible to export from one + project and then import into another. + - Exported data can also be loaded into Google BigQuery for + analysis. + - Exports and imports are performed asynchronously. An Operation + resource is created for each export/import. The state + (including any errors encountered) of the export/import may be + queried via the Operation resource. + + Index Service: + + - The index service manages Cloud Datastore composite indexes. + - Index creation and deletion are performed asynchronously. An + Operation resource is created for each such asynchronous + operation. The state of the operation (including any errors + encountered) may be queried via the Operation resource. + + Operation Service: + + - The Operations collection provides a record of actions + performed for the specified project (including any operations + in progress). Operations are not created directly but through + calls on other collections or resources. + - An operation that is not yet done may be cancelled. The + request to cancel is asynchronous and the operation may + continue to run for some time after the request to cancel is + made. + - An operation that is done may be deleted so that it is no + longer listed as part of the Operation collection. + - ListOperations returns all pending operations, but not + completed operations. + - Operations are created by service DatastoreAdmin, but are + accessed via service google.longrunning.Operations. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py index c70a13dbe5cb..dfb44417e1d6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py @@ -43,8 +43,12 @@ class Index(proto.Message): Required. The index's ancestor mode. Must not be ANCESTOR_MODE_UNSPECIFIED. properties (MutableSequence[google.cloud.datastore_admin_v1.types.Index.IndexedProperty]): - Required. An ordered sequence of property - names and their index attributes. + Required. An ordered sequence of property names and their + index attributes. + + Requires: + + - A maximum of 100 properties. state (google.cloud.datastore_admin_v1.types.Index.State): Output only. The state of the index. """ diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index b04b73c51a75..be824c9602cb 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -859,9 +859,12 @@ class ReadOptions(proto.Message): This field is a member of `oneof`_ ``consistency_type``. read_time (google.protobuf.timestamp_pb2.Timestamp): Reads entities as they were at the given - time. This may not be older than 270 seconds. - This value is only supported for Cloud Firestore - in Datastore mode. + time. This value is only supported for Cloud + Firestore in Datastore mode. + This must be a microsecond precision timestamp + within the past one hour, or if Point-in-Time + Recovery is enabled, can additionally be a whole + minute timestamp within the past 7 days. This field is a member of `oneof`_ ``consistency_type``. """ @@ -954,7 +957,10 @@ class ReadOnly(proto.Message): Attributes: read_time (google.protobuf.timestamp_pb2.Timestamp): Reads entities at the given time. - This may not be older than 60 seconds. + This must be a microsecond precision timestamp + within the past one hour, or if Point-in-Time + Recovery is enabled, can additionally be a whole + minute timestamp within the past 7 days. """ read_time: timestamp_pb2.Timestamp = proto.Field( diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index 4ab5bc6d9892..dc3374e418b6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -265,8 +265,8 @@ class Value(proto.Message): entity_value (google.cloud.datastore_v1.types.Entity): An entity value. - May have no key. - - May have a key with an incomplete key path. - - May have a reserved/read-only key. + - May have a key with an incomplete key path. + - May have a reserved/read-only key. This field is a member of `oneof`_ ``value_type``. array_value (google.cloud.datastore_v1.types.ArrayValue): diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index 5fb51d46693a..67a1c58db8c1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -246,12 +246,25 @@ class AggregationQuery(proto.Message): class Aggregation(proto.Message): r"""Defines an aggregation that produces a single result. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: count (google.cloud.datastore_v1.types.AggregationQuery.Aggregation.Count): Count aggregator. + This field is a member of `oneof`_ ``operator``. + sum (google.cloud.datastore_v1.types.AggregationQuery.Aggregation.Sum): + Sum aggregator. + + This field is a member of `oneof`_ ``operator``. + avg (google.cloud.datastore_v1.types.AggregationQuery.Aggregation.Avg): + Average aggregator. + This field is a member of `oneof`_ ``operator``. alias (str): Optional. Optional name of the property to store the result @@ -328,12 +341,83 @@ class Count(proto.Message): message=wrappers_pb2.Int64Value, ) + class Sum(proto.Message): + r"""Sum of the values of the requested property. + + - Only numeric values will be aggregated. All non-numeric values + including ``NULL`` are skipped. + + - If the aggregated values contain ``NaN``, returns ``NaN``. + Infinity math follows IEEE-754 standards. + + - If the aggregated value set is empty, returns 0. + + - Returns a 64-bit integer if all aggregated numbers are integers + and the sum result does not overflow. Otherwise, the result is + returned as a double. Note that even if all the aggregated values + are integers, the result is returned as a double if it cannot fit + within a 64-bit signed integer. When this occurs, the returned + value will lose precision. + + - When underflow occurs, floating-point aggregation is + non-deterministic. This means that running the same query + repeatedly without any changes to the underlying values could + produce slightly different results each time. In those cases, + values should be stored as integers over floating-point numbers. + + Attributes: + property (google.cloud.datastore_v1.types.PropertyReference): + The property to aggregate on. + """ + + property: "PropertyReference" = proto.Field( + proto.MESSAGE, + number=1, + message="PropertyReference", + ) + + class Avg(proto.Message): + r"""Average of the values of the requested property. + + - Only numeric values will be aggregated. All non-numeric values + including ``NULL`` are skipped. + + - If the aggregated values contain ``NaN``, returns ``NaN``. + Infinity math follows IEEE-754 standards. + + - If the aggregated value set is empty, returns ``NULL``. + + - Always returns the result as a double. + + Attributes: + property (google.cloud.datastore_v1.types.PropertyReference): + The property to aggregate on. + """ + + property: "PropertyReference" = proto.Field( + proto.MESSAGE, + number=1, + message="PropertyReference", + ) + count: "AggregationQuery.Aggregation.Count" = proto.Field( proto.MESSAGE, number=1, oneof="operator", message="AggregationQuery.Aggregation.Count", ) + sum: "AggregationQuery.Aggregation.Sum" = proto.Field( + proto.MESSAGE, + number=2, + oneof="operator", + message="AggregationQuery.Aggregation.Sum", + ) + avg: "AggregationQuery.Aggregation.Avg" = proto.Field( + proto.MESSAGE, + number=3, + oneof="operator", + message="AggregationQuery.Aggregation.Avg", + ) alias: str = proto.Field( proto.STRING, number=7, @@ -565,9 +649,9 @@ class Operator(proto.Enum): Requires: - - That ``value`` is a non-empty ``ArrayValue`` with at most - 10 values. - - No other ``IN`` or ``NOT_IN`` is in the same query. + - That ``value`` is a non-empty ``ArrayValue``, subject to + disjunction limits. + - No ``NOT_IN`` is in the same query. NOT_EQUAL (9): The given ``property`` is not equal to the given ``value``. @@ -583,7 +667,8 @@ class Operator(proto.Enum): Requires: - That ``value`` is an entity key. - - No other ``HAS_ANCESTOR`` is in the same query. + - All evaluated disjunctions must have the same + ``HAS_ANCESTOR`` filter. NOT_IN (13): The value of the ``property`` is not in the given array. @@ -591,8 +676,8 @@ class Operator(proto.Enum): - That ``value`` is a non-empty ``ArrayValue`` with at most 10 values. - - No other ``IN``, ``NOT_IN``, ``NOT_EQUAL`` is in the same - query. + - No other ``OR``, ``IN``, ``NOT_IN``, ``NOT_EQUAL`` is in + the same query. - That ``field`` comes first in the ``order_by``. """ OPERATOR_UNSPECIFIED = 0 From 174e059138e5ea61f2ca7c3885cb17b987038791 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 9 Aug 2023 09:27:53 -0400 Subject: [PATCH 534/611] chore(main): release 2.17.0 (#468) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 13 +++++++++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../google/cloud/datastore_admin/gapic_version.py | 2 +- .../cloud/datastore_admin_v1/gapic_version.py | 2 +- .../google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 19 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index f122d158c5e8..882f663e6b84 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.16.1" + ".": "2.17.0" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 907be34a4111..05e37195847e 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,19 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.17.0](https://github.com/googleapis/python-datastore/compare/v2.16.1...v2.17.0) (2023-08-09) + + +### Features + +* Publish proto definitions for SUM/AVG in Datastore ([cfcd0c2](https://github.com/googleapis/python-datastore/commit/cfcd0c2552c5dcf503cb3dee8c57b1e82be2d432)) + + +### Documentation + +* Minor formatting in Datastore Admin API ([cfcd0c2](https://github.com/googleapis/python-datastore/commit/cfcd0c2552c5dcf503cb3dee8c57b1e82be2d432)) +* Specify limit for `properties` in `Index` message in Datastore Admin API ([cfcd0c2](https://github.com/googleapis/python-datastore/commit/cfcd0c2552c5dcf503cb3dee8c57b1e82be2d432)) + ## [2.16.1](https://github.com/googleapis/python-datastore/compare/v2.16.0...v2.16.1) (2023-07-05) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index da0da6723fc2..6de7a69bf095 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.16.1" # {x-release-please-version} +__version__ = "2.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 61e0c0a8360c..422b383cc30d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.16.1" +__version__ = "2.17.0" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index 8edfaef7141c..8d4f4cfb61d6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.1" # {x-release-please-version} +__version__ = "2.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index 8a60bdde448c..7a84223d5368 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.16.1" # {x-release-please-version} +__version__ = "2.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index 8a60bdde448c..7a84223d5368 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.16.1" # {x-release-please-version} +__version__ = "2.17.0" # {x-release-please-version} From 9e6b1f9eacd66383d3e7816142fc63f51b04b867 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 9 Aug 2023 20:56:51 +0200 Subject: [PATCH 535/611] chore(deps): update dependency google-cloud-datastore to v2.17.0 (#469) --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index 9c65e4996e43..390385f6cb30 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.16.1 \ No newline at end of file +google-cloud-datastore==2.17.0 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index 4a4657a1ac18..f305ba58311c 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.16.1 +google-cloud-datastore==2.17.0 From 80c808339d0607eb0840183758276d4158505973 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 28 Aug 2023 09:13:58 -0400 Subject: [PATCH 536/611] docs: Update property requirement specifications (#470) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Update property requirement specifications PiperOrigin-RevId: 557861399 Source-Link: https://github.com/googleapis/googleapis/commit/3303b93dc165b6a5b8efdaf30db292e9ed1f28e5 Source-Link: https://github.com/googleapis/googleapis-gen/commit/fb0d0a45385eb0be5842111af79af5d719d32168 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZmIwZDBhNDUzODVlYjBiZTU4NDIxMTFhZjc5YWY1ZDcxOWQzMjE2OCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * apply docs fix from googleapis/gapic-generator-python#1721 --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google/cloud/datastore_v1/types/query.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index 67a1c58db8c1..0ddd68111db4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -455,9 +455,13 @@ class PropertyReference(proto.Message): Attributes: name (str): - The name of the property. - If name includes "."s, it may be interpreted as - a property name path. + A reference to a property. + + Requires: + + - MUST be a dot-delimited (``.``) string of segments, where + each segment conforms to [entity property + name][google.datastore.v1.Entity.properties] limitations. """ name: str = proto.Field( From fa24d3dd428026a8677f343e920cfe12a8efce64 Mon Sep 17 00:00:00 2001 From: Mariatta Date: Tue, 5 Sep 2023 15:38:22 -0700 Subject: [PATCH 537/611] feat: Add support for Sum and Avg aggregation query (#437) --- .../google/cloud/datastore/aggregation.py | 89 ++++- .../tests/system/index.yaml | 2 +- .../tests/system/test_aggregation_query.py | 358 ++++++++++++++++-- .../tests/system/test_query.py | 15 + .../tests/unit/test_aggregation.py | 196 +++++++++- .../tests/unit/test_query.py | 50 +++ 6 files changed, 663 insertions(+), 47 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py b/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py index 421ffc9392da..0518514ec894 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py @@ -39,6 +39,9 @@ class BaseAggregation(ABC): Base class representing an Aggregation operation in Datastore """ + def __init__(self, alias=None): + self.alias = alias + @abc.abstractmethod def _to_pb(self): """ @@ -59,7 +62,7 @@ class CountAggregation(BaseAggregation): """ def __init__(self, alias=None): - self.alias = alias + super(CountAggregation, self).__init__(alias=alias) def _to_pb(self): """ @@ -71,6 +74,60 @@ def _to_pb(self): return aggregation_pb +class SumAggregation(BaseAggregation): + """ + Representation of a "Sum" aggregation query. + + :type property_ref: str + :param property_ref: The property_ref for the aggregation. + + :type value: int + :param value: The resulting value from the aggregation. + + """ + + def __init__(self, property_ref, alias=None): + self.property_ref = property_ref + super(SumAggregation, self).__init__(alias=alias) + + def _to_pb(self): + """ + Convert this instance to the protobuf representation + """ + aggregation_pb = query_pb2.AggregationQuery.Aggregation() + aggregation_pb.sum = query_pb2.AggregationQuery.Aggregation.Sum() + aggregation_pb.sum.property.name = self.property_ref + aggregation_pb.alias = self.alias + return aggregation_pb + + +class AvgAggregation(BaseAggregation): + """ + Representation of a "Avg" aggregation query. + + :type property_ref: str + :param property_ref: The property_ref for the aggregation. + + :type value: int + :param value: The resulting value from the aggregation. + + """ + + def __init__(self, property_ref, alias=None): + self.property_ref = property_ref + super(AvgAggregation, self).__init__(alias=alias) + + def _to_pb(self): + """ + Convert this instance to the protobuf representation + """ + aggregation_pb = query_pb2.AggregationQuery.Aggregation() + aggregation_pb.avg = query_pb2.AggregationQuery.Aggregation.Avg() + aggregation_pb.avg.property.name = self.property_ref + aggregation_pb.alias = self.alias + return aggregation_pb + + class AggregationResult(object): """ A class representing result from Aggregation Query @@ -154,6 +211,28 @@ def count(self, alias=None): self._aggregations.append(count_aggregation) return self + def sum(self, property_ref, alias=None): + """ + Adds a sum over the nested query + + :type property_ref: str + :param property_ref: The property_ref for the sum + """ + sum_aggregation = SumAggregation(property_ref=property_ref, alias=alias) + self._aggregations.append(sum_aggregation) + return self + + def avg(self, property_ref, alias=None): + """ + Adds a avg over the nested query + + :type property_ref: str + :param property_ref: The property_ref for the sum + """ + avg_aggregation = AvgAggregation(property_ref=property_ref, alias=alias) + self._aggregations.append(avg_aggregation) + return self + def add_aggregation(self, aggregation): """ Adds an aggregation operation to the nested query @@ -327,8 +406,7 @@ def _build_protobuf(self): """ pb = self._aggregation_query._to_pb() if self._limit is not None and self._limit > 0: - for aggregation in pb.aggregations: - aggregation.count.up_to = self._limit + pb.nested_query.limit = self._limit return pb def _process_query_results(self, response_pb): @@ -438,5 +516,8 @@ def _item_to_aggregation_result(iterator, pb): :rtype: :class:`google.cloud.datastore.aggregation.AggregationResult` :returns: The list of AggregationResults """ - results = [AggregationResult(alias=k, value=pb[k].integer_value) for k in pb.keys()] + results = [ + AggregationResult(alias=k, value=pb[k].integer_value or pb[k].double_value) + for k in pb.keys() + ] return results diff --git a/packages/google-cloud-datastore/tests/system/index.yaml b/packages/google-cloud-datastore/tests/system/index.yaml index f9cc2a5bc677..1f27c2464188 100644 --- a/packages/google-cloud-datastore/tests/system/index.yaml +++ b/packages/google-cloud-datastore/tests/system/index.yaml @@ -39,9 +39,9 @@ indexes: - name: family - name: appearances - - kind: Character ancestor: yes properties: - name: family - name: appearances + diff --git a/packages/google-cloud-datastore/tests/system/test_aggregation_query.py b/packages/google-cloud-datastore/tests/system/test_aggregation_query.py index 51045003b632..ae9a8297438a 100644 --- a/packages/google-cloud-datastore/tests/system/test_aggregation_query.py +++ b/packages/google-cloud-datastore/tests/system/test_aggregation_query.py @@ -70,54 +70,281 @@ def nested_query(aggregation_query_client, ancestor_key): @pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) -def test_aggregation_query_default(aggregation_query_client, nested_query, database_id): +def test_count_query_default(aggregation_query_client, nested_query, database_id): query = nested_query aggregation_query = aggregation_query_client.aggregation_query(query) aggregation_query.count() result = _do_fetch(aggregation_query) assert len(result) == 1 - for r in result[0]: - assert r.alias == "property_1" - assert r.value == 8 + assert len(result[0]) == 1 + r = result[0][0] + assert r.alias == "property_1" + expected_count = len(populate_datastore.CHARACTERS) + assert r.value == expected_count @pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) -def test_aggregation_query_with_alias( - aggregation_query_client, nested_query, database_id +@pytest.mark.parametrize( + "aggregation_type,aggregation_args,expected", + [ + ("count", (), len(populate_datastore.CHARACTERS)), + ( + "sum", + ("appearances",), + sum(c["appearances"] for c in populate_datastore.CHARACTERS), + ), + ( + "avg", + ("appearances",), + sum(c["appearances"] for c in populate_datastore.CHARACTERS) + / len(populate_datastore.CHARACTERS), + ), + ], +) +def test_aggregation_query_in_transaction( + aggregation_query_client, + nested_query, + database_id, + aggregation_type, + aggregation_args, + expected, ): + """ + When an aggregation query is run in a transaction, the transaction id should be sent with the request. + The result is the same as when it is run outside of a transaction. + """ + with aggregation_query_client.transaction(): + query = nested_query + + aggregation_query = aggregation_query_client.aggregation_query(query) + getattr(aggregation_query, aggregation_type)(*aggregation_args) + # run full query + result = _do_fetch(aggregation_query) + assert len(result) == 1 + assert len(result[0]) == 1 + r = result[0][0] + assert r.alias == "property_1" + assert r.value == expected + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_count_query_with_alias(aggregation_query_client, nested_query, database_id): query = nested_query aggregation_query = aggregation_query_client.aggregation_query(query) aggregation_query.count(alias="total") result = _do_fetch(aggregation_query) assert len(result) == 1 - for r in result[0]: - assert r.alias == "total" - assert r.value > 0 + assert len(result[0]) == 1 + r = result[0][0] + assert r.alias == "total" + expected_count = len(populate_datastore.CHARACTERS) + assert r.value == expected_count @pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) -def test_aggregation_query_with_limit( - aggregation_query_client, nested_query, database_id -): +def test_sum_query_default(aggregation_query_client, nested_query, database_id): + query = nested_query + + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.sum("appearances") + result = _do_fetch(aggregation_query) + assert len(result) == 1 + assert len(result[0]) == 1 + r = result[0][0] + assert r.alias == "property_1" + expected_sum = sum(c["appearances"] for c in populate_datastore.CHARACTERS) + assert r.value == expected_sum + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_sum_query_with_alias(aggregation_query_client, nested_query, database_id): + query = nested_query + + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.sum("appearances", alias="sum_appearances") + result = _do_fetch(aggregation_query) + assert len(result) == 1 + assert len(result[0]) == 1 + r = result[0][0] + assert r.alias == "sum_appearances" + expected_sum = sum(c["appearances"] for c in populate_datastore.CHARACTERS) + assert r.value == expected_sum + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_avg_query_default(aggregation_query_client, nested_query, database_id): + query = nested_query + + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.avg("appearances") + result = _do_fetch(aggregation_query) + assert len(result) == 1 + assert len(result[0]) == 1 + r = result[0][0] + assert r.alias == "property_1" + expected_avg = sum(c["appearances"] for c in populate_datastore.CHARACTERS) / len( + populate_datastore.CHARACTERS + ) + assert r.value == expected_avg + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_avg_query_with_alias(aggregation_query_client, nested_query, database_id): + query = nested_query + + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.avg("appearances", alias="avg_appearances") + result = _do_fetch(aggregation_query) + assert len(result) == 1 + assert len(result[0]) == 1 + r = result[0][0] + assert r.alias == "avg_appearances" + expected_avg = sum(c["appearances"] for c in populate_datastore.CHARACTERS) / len( + populate_datastore.CHARACTERS + ) + assert r.value == expected_avg + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_count_query_with_limit(aggregation_query_client, nested_query, database_id): query = nested_query aggregation_query = aggregation_query_client.aggregation_query(query) aggregation_query.count(alias="total") result = _do_fetch(aggregation_query) # count without limit assert len(result) == 1 - for r in result[0]: - assert r.alias == "total" - assert r.value == 8 + assert len(result[0]) == 1 + r = result[0][0] + assert r.alias == "total" + expected_count = len(populate_datastore.CHARACTERS) + assert r.value == expected_count aggregation_query = aggregation_query_client.aggregation_query(query) aggregation_query.count(alias="total_up_to") - result = _do_fetch(aggregation_query, limit=2) # count with limit = 2 + limit = 2 + result = _do_fetch(aggregation_query, limit=limit) # count with limit = 2 + assert len(result) == 1 + assert len(result[0]) == 1 + r = result[0][0] + assert r.alias == "total_up_to" + assert r.value == limit + + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.count(alias="total_high_limit") + limit = 2 + result = _do_fetch( + aggregation_query, limit=expected_count * 2 + ) # count with limit > total + assert len(result) == 1 + assert len(result[0]) == 1 + r = result[0][0] + assert r.alias == "total_high_limit" + assert r.value == expected_count + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_sum_query_with_limit(aggregation_query_client, nested_query, database_id): + query = nested_query + + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.sum("appearances", alias="sum_limited") + limit = 2 + result = _do_fetch(aggregation_query, limit=limit) # count with limit = 2 + assert len(result) == 1 + assert len(result[0]) == 1 + r = result[0][0] + assert r.alias == "sum_limited" + expected = sum(c["appearances"] for c in populate_datastore.CHARACTERS[:limit]) + assert r.value == expected + + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.sum("appearances", alias="sum_high_limit") + num_characters = len(populate_datastore.CHARACTERS) + result = _do_fetch( + aggregation_query, limit=num_characters * 2 + ) # count with limit > total + assert len(result) == 1 + assert len(result[0]) == 1 + r = result[0][0] + assert r.alias == "sum_high_limit" + assert r.value == sum(c["appearances"] for c in populate_datastore.CHARACTERS) + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_avg_query_with_limit(aggregation_query_client, nested_query, database_id): + query = nested_query + + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.avg("appearances", alias="avg_limited") + limit = 2 + result = _do_fetch(aggregation_query, limit=limit) # count with limit = 2 + assert len(result) == 1 + assert len(result[0]) == 1 + r = result[0][0] + assert r.alias == "avg_limited" + expected = ( + sum(c["appearances"] for c in populate_datastore.CHARACTERS[:limit]) / limit + ) + assert r.value == expected + + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.avg("appearances", alias="avg_high_limit") + num_characters = len(populate_datastore.CHARACTERS) + result = _do_fetch( + aggregation_query, limit=num_characters * 2 + ) # count with limit > total + assert len(result) == 1 + assert len(result[0]) == 1 + r = result[0][0] + assert r.alias == "avg_high_limit" + assert ( + r.value + == sum(c["appearances"] for c in populate_datastore.CHARACTERS) / num_characters + ) + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_count_query_empty(aggregation_query_client, nested_query, database_id): + query = nested_query + query.add_filter("name", "=", "nonexistent") + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.count(alias="total") + result = _do_fetch(aggregation_query) + assert len(result) == 1 + assert len(result[0]) == 1 + r = result[0][0] + assert r.alias == "total" + assert r.value == 0 + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_sum_query_empty(aggregation_query_client, nested_query, database_id): + query = nested_query + query.add_filter("family", "=", "nonexistent") + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.sum("appearances", alias="sum") + result = _do_fetch(aggregation_query) assert len(result) == 1 - for r in result[0]: - assert r.alias == "total_up_to" - assert r.value == 2 + assert len(result[0]) == 1 + r = result[0][0] + assert r.alias == "sum" + assert r.value == 0 + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_avg_query_empty(aggregation_query_client, nested_query, database_id): + query = nested_query + query.add_filter("family", "=", "nonexistent") + aggregation_query = aggregation_query_client.aggregation_query(query) + aggregation_query.avg("appearances", alias="avg") + result = _do_fetch(aggregation_query) + assert len(result) == 1 + assert len(result[0]) == 1 + r = result[0][0] + assert r.alias == "avg" + assert r.value == 0 @pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) @@ -129,11 +356,20 @@ def test_aggregation_query_multiple_aggregations( aggregation_query = aggregation_query_client.aggregation_query(query) aggregation_query.count(alias="total") aggregation_query.count(alias="all") + aggregation_query.sum("appearances", alias="sum_appearances") + aggregation_query.avg("appearances", alias="avg_appearances") result = _do_fetch(aggregation_query) assert len(result) == 1 - for r in result[0]: - assert r.alias in ["all", "total"] - assert r.value > 0 + assert len(result[0]) == 4 + result_dict = {r.alias: r for r in result[0]} + assert result_dict["total"].value == len(populate_datastore.CHARACTERS) + assert result_dict["all"].value == len(populate_datastore.CHARACTERS) + assert result_dict["sum_appearances"].value == sum( + c["appearances"] for c in populate_datastore.CHARACTERS + ) + assert result_dict["avg_appearances"].value == sum( + c["appearances"] for c in populate_datastore.CHARACTERS + ) / len(populate_datastore.CHARACTERS) @pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) @@ -141,36 +377,66 @@ def test_aggregation_query_add_aggregation( aggregation_query_client, nested_query, database_id ): from google.cloud.datastore.aggregation import CountAggregation + from google.cloud.datastore.aggregation import SumAggregation + from google.cloud.datastore.aggregation import AvgAggregation query = nested_query aggregation_query = aggregation_query_client.aggregation_query(query) count_aggregation = CountAggregation(alias="total") aggregation_query.add_aggregation(count_aggregation) + + sum_aggregation = SumAggregation("appearances", alias="sum_appearances") + aggregation_query.add_aggregation(sum_aggregation) + + avg_aggregation = AvgAggregation("appearances", alias="avg_appearances") + aggregation_query.add_aggregation(avg_aggregation) + result = _do_fetch(aggregation_query) assert len(result) == 1 - for r in result[0]: - assert r.alias == "total" - assert r.value > 0 + assert len(result[0]) == 3 + result_dict = {r.alias: r for r in result[0]} + assert result_dict["total"].value == len(populate_datastore.CHARACTERS) + assert result_dict["sum_appearances"].value == sum( + c["appearances"] for c in populate_datastore.CHARACTERS + ) + assert result_dict["avg_appearances"].value == sum( + c["appearances"] for c in populate_datastore.CHARACTERS + ) / len(populate_datastore.CHARACTERS) @pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) def test_aggregation_query_add_aggregations( aggregation_query_client, nested_query, database_id ): - from google.cloud.datastore.aggregation import CountAggregation + from google.cloud.datastore.aggregation import ( + CountAggregation, + SumAggregation, + AvgAggregation, + ) query = nested_query aggregation_query = aggregation_query_client.aggregation_query(query) count_aggregation_1 = CountAggregation(alias="total") count_aggregation_2 = CountAggregation(alias="all") - aggregation_query.add_aggregations([count_aggregation_1, count_aggregation_2]) + sum_aggregation = SumAggregation("appearances", alias="sum_appearances") + avg_aggregation = AvgAggregation("appearances", alias="avg_appearances") + aggregation_query.add_aggregations( + [count_aggregation_1, count_aggregation_2, sum_aggregation, avg_aggregation] + ) result = _do_fetch(aggregation_query) assert len(result) == 1 - for r in result[0]: - assert r.alias in ["total", "all"] - assert r.value > 0 + assert len(result[0]) == 4 + result_dict = {r.alias: r for r in result[0]} + assert result_dict["total"].value == len(populate_datastore.CHARACTERS) + assert result_dict["all"].value == len(populate_datastore.CHARACTERS) + assert result_dict["sum_appearances"].value == sum( + c["appearances"] for c in populate_datastore.CHARACTERS + ) + assert result_dict["avg_appearances"].value == sum( + c["appearances"] for c in populate_datastore.CHARACTERS + ) / len(populate_datastore.CHARACTERS) @pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) @@ -218,12 +484,20 @@ def test_aggregation_query_with_nested_query_filtered( aggregation_query = aggregation_query_client.aggregation_query(query) aggregation_query.count(alias="total") + aggregation_query.sum("appearances", alias="sum_appearances") + aggregation_query.avg("appearances", alias="avg_appearances") result = _do_fetch(aggregation_query) assert len(result) == 1 - - for r in result[0]: - assert r.alias == "total" - assert r.value == 6 + assert len(result[0]) == 3 + result_dict = {r.alias: r for r in result[0]} + assert result_dict["total"].value == expected_matches + expected_sum = sum( + c["appearances"] + for c in populate_datastore.CHARACTERS + if c["appearances"] >= 20 + ) + assert result_dict["sum_appearances"].value == expected_sum + assert result_dict["avg_appearances"].value == expected_sum / expected_matches @pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) @@ -243,9 +517,17 @@ def test_aggregation_query_with_nested_query_multiple_filters( aggregation_query = aggregation_query_client.aggregation_query(query) aggregation_query.count(alias="total") + aggregation_query.sum("appearances", alias="sum_appearances") + aggregation_query.avg("appearances", alias="avg_appearances") result = _do_fetch(aggregation_query) assert len(result) == 1 - - for r in result[0]: - assert r.alias == "total" - assert r.value == 4 + assert len(result[0]) == 3 + result_dict = {r.alias: r for r in result[0]} + assert result_dict["total"].value == expected_matches + expected_sum = sum( + c["appearances"] + for c in populate_datastore.CHARACTERS + if c["appearances"] >= 26 and "Stark" in c["family"] + ) + assert result_dict["sum_appearances"].value == expected_sum + assert result_dict["avg_appearances"].value == expected_sum / expected_matches diff --git a/packages/google-cloud-datastore/tests/system/test_query.py b/packages/google-cloud-datastore/tests/system/test_query.py index 864bab570678..9d7bec06d856 100644 --- a/packages/google-cloud-datastore/tests/system/test_query.py +++ b/packages/google-cloud-datastore/tests/system/test_query.py @@ -82,6 +82,21 @@ def test_query_w_ancestor(ancestor_query, database_id): assert len(entities) == expected_matches +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_in_transaction(ancestor_query, database_id): + """ + when a query is run in a transaction, the transaction id should be sent with the request. + the result is the same as when it is run outside of a transaction. + """ + query = ancestor_query + client = query._client + expected_matches = 8 + with client.transaction(): + # run full query + entities = _do_fetch(query) + assert len(entities) == expected_matches + + @pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) def test_query_w_limit_paging(ancestor_query, database_id): query = ancestor_query diff --git a/packages/google-cloud-datastore/tests/unit/test_aggregation.py b/packages/google-cloud-datastore/tests/unit/test_aggregation.py index fd72ad0a0eb5..15d11acaeb45 100644 --- a/packages/google-cloud-datastore/tests/unit/test_aggregation.py +++ b/packages/google-cloud-datastore/tests/unit/test_aggregation.py @@ -15,7 +15,12 @@ import mock import pytest -from google.cloud.datastore.aggregation import CountAggregation, AggregationQuery +from google.cloud.datastore.aggregation import ( + CountAggregation, + SumAggregation, + AvgAggregation, + AggregationQuery, +) from google.cloud.datastore.helpers import set_database_id_to_request from tests.unit.test_query import _make_query, _make_client @@ -34,6 +39,30 @@ def test_count_aggregation_to_pb(): assert count_aggregation._to_pb() == expected_aggregation_query_pb +def test_sum_aggregation_to_pb(): + from google.cloud.datastore_v1.types import query as query_pb2 + + sum_aggregation = SumAggregation("appearances", alias="total") + + expected_aggregation_query_pb = query_pb2.AggregationQuery.Aggregation() + expected_aggregation_query_pb.sum = query_pb2.AggregationQuery.Aggregation.Sum() + expected_aggregation_query_pb.sum.property.name = sum_aggregation.property_ref + expected_aggregation_query_pb.alias = sum_aggregation.alias + assert sum_aggregation._to_pb() == expected_aggregation_query_pb + + +def test_avg_aggregation_to_pb(): + from google.cloud.datastore_v1.types import query as query_pb2 + + avg_aggregation = AvgAggregation("appearances", alias="total") + + expected_aggregation_query_pb = query_pb2.AggregationQuery.Aggregation() + expected_aggregation_query_pb.avg = query_pb2.AggregationQuery.Aggregation.Avg() + expected_aggregation_query_pb.avg.property.name = avg_aggregation.property_ref + expected_aggregation_query_pb.alias = avg_aggregation.alias + assert avg_aggregation._to_pb() == expected_aggregation_query_pb + + @pytest.fixture def database_id(request): return request.param @@ -117,6 +146,8 @@ def test_pb_over_query_with_add_aggregations(client, database_id): aggregations = [ CountAggregation(alias="total"), CountAggregation(alias="all"), + SumAggregation("appearances", alias="sum_appearances"), + AvgAggregation("appearances", alias="avg_appearances"), ] query = _make_query(client) @@ -125,9 +156,73 @@ def test_pb_over_query_with_add_aggregations(client, database_id): aggregation_query.add_aggregations(aggregations) pb = aggregation_query._to_pb() assert pb.nested_query == _pb_from_query(query) - assert len(pb.aggregations) == 2 + assert len(pb.aggregations) == 4 assert pb.aggregations[0] == CountAggregation(alias="total")._to_pb() assert pb.aggregations[1] == CountAggregation(alias="all")._to_pb() + assert ( + pb.aggregations[2] + == SumAggregation("appearances", alias="sum_appearances")._to_pb() + ) + assert ( + pb.aggregations[3] + == AvgAggregation("appearances", alias="avg_appearances")._to_pb() + ) + + +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +def test_pb_over_query_with_sum(client, database_id): + from google.cloud.datastore.query import _pb_from_query + + query = _make_query(client) + aggregation_query = _make_aggregation_query(client=client, query=query) + + aggregation_query.sum("appearances", alias="total") + pb = aggregation_query._to_pb() + assert pb.nested_query == _pb_from_query(query) + assert len(pb.aggregations) == 1 + assert pb.aggregations[0] == SumAggregation("appearances", alias="total")._to_pb() + + +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +def test_pb_over_query_sum_with_add_aggregation(client, database_id): + from google.cloud.datastore.query import _pb_from_query + + query = _make_query(client) + aggregation_query = _make_aggregation_query(client=client, query=query) + + aggregation_query.add_aggregation(SumAggregation("appearances", alias="total")) + pb = aggregation_query._to_pb() + assert pb.nested_query == _pb_from_query(query) + assert len(pb.aggregations) == 1 + assert pb.aggregations[0] == SumAggregation("appearances", alias="total")._to_pb() + + +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +def test_pb_over_query_with_avg(client, database_id): + from google.cloud.datastore.query import _pb_from_query + + query = _make_query(client) + aggregation_query = _make_aggregation_query(client=client, query=query) + + aggregation_query.avg("appearances", alias="avg") + pb = aggregation_query._to_pb() + assert pb.nested_query == _pb_from_query(query) + assert len(pb.aggregations) == 1 + assert pb.aggregations[0] == AvgAggregation("appearances", alias="avg")._to_pb() + + +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +def test_pb_over_query_avg_with_add_aggregation(client, database_id): + from google.cloud.datastore.query import _pb_from_query + + query = _make_query(client) + aggregation_query = _make_aggregation_query(client=client, query=query) + + aggregation_query.add_aggregation(AvgAggregation("appearances", alias="avg")) + pb = aggregation_query._to_pb() + assert pb.nested_query == _pb_from_query(query) + assert len(pb.aggregations) == 1 + assert pb.aggregations[0] == AvgAggregation("appearances", alias="avg")._to_pb() @pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) @@ -243,8 +338,11 @@ def test_iterator__build_protobuf_all_values(): query = _make_query(client) alias = "total" limit = 2 + property_ref = "appearances" aggregation_query = AggregationQuery(client=client, query=query) aggregation_query.count(alias) + aggregation_query.sum(property_ref) + aggregation_query.avg(property_ref) iterator = _make_aggregation_iterator(aggregation_query, client, limit=limit) iterator.num_results = 4 @@ -252,9 +350,22 @@ def test_iterator__build_protobuf_all_values(): pb = iterator._build_protobuf() expected_pb = query_pb2.AggregationQuery() expected_pb.nested_query = query_pb2.Query() + expected_pb.nested_query.limit = limit + expected_count_pb = query_pb2.AggregationQuery.Aggregation(alias=alias) - expected_count_pb.count.up_to = limit + expected_count_pb.count = query_pb2.AggregationQuery.Aggregation.Count() expected_pb.aggregations.append(expected_count_pb) + + expected_sum_pb = query_pb2.AggregationQuery.Aggregation() + expected_sum_pb.sum = query_pb2.AggregationQuery.Aggregation.Sum() + expected_sum_pb.sum.property.name = property_ref + expected_pb.aggregations.append(expected_sum_pb) + + expected_avg_pb = query_pb2.AggregationQuery.Aggregation() + expected_avg_pb.avg = query_pb2.AggregationQuery.Aggregation.Avg() + expected_avg_pb.avg.property.name = property_ref + expected_pb.aggregations.append(expected_avg_pb) + assert pb == expected_pb @@ -426,6 +537,81 @@ def test__item_to_aggregation_result(): assert result[0].value == map_composite_mock.__getitem__().integer_value +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +@pytest.mark.parametrize( + "aggregation_type,aggregation_args", + [ + ("count", ()), + ( + "sum", + ("appearances",), + ), + ("avg", ("appearances",)), + ], +) +def test_eventual_transaction_fails(database_id, aggregation_type, aggregation_args): + """ + Queries with eventual consistency cannot be used in a transaction. + """ + import mock + + transaction = mock.Mock() + transaction.id = b"expected_id" + client = _Client(None, database=database_id, transaction=transaction) + + query = _make_query(client) + aggregation_query = _make_aggregation_query(client=client, query=query) + # initiate requested aggregation (ex count, sum, avg) + getattr(aggregation_query, aggregation_type)(*aggregation_args) + with pytest.raises(ValueError): + list(aggregation_query.fetch(eventual=True)) + + +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +@pytest.mark.parametrize( + "aggregation_type,aggregation_args", + [ + ("count", ()), + ( + "sum", + ("appearances",), + ), + ("avg", ("appearances",)), + ], +) +def test_transaction_id_populated(database_id, aggregation_type, aggregation_args): + """ + When an aggregation is run in the context of a transaction, the transaction + ID should be populated in the request. + """ + import mock + + transaction = mock.Mock() + transaction.id = b"expected_id" + mock_datastore_api = mock.Mock() + mock_gapic = mock_datastore_api.run_aggregation_query + mock_gapic.return_value = _make_aggregation_query_response([]) + client = _Client( + None, + datastore_api=mock_datastore_api, + database=database_id, + transaction=transaction, + ) + + query = _make_query(client) + aggregation_query = _make_aggregation_query(client=client, query=query) + + # initiate requested aggregation (ex count, sum, avg) + getattr(aggregation_query, aggregation_type)(*aggregation_args) + # run mock query + list(aggregation_query.fetch()) + assert mock_gapic.call_count == 1 + request = mock_gapic.call_args[1]["request"] + read_options = request["read_options"] + # ensure transaction ID is populated + assert read_options.transaction == client.current_transaction.id + + class _Client(object): def __init__( self, @@ -459,7 +645,9 @@ def _make_aggregation_iterator(*args, **kw): return AggregationResultIterator(*args, **kw) -def _make_aggregation_query_response(aggregation_pbs, more_results_enum): +def _make_aggregation_query_response( + aggregation_pbs, more_results_enum=3 +): # 3 = NO_MORE_RESULTS from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import aggregation_result diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 25b3febb635c..7758d7fb8f28 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -652,6 +652,56 @@ def test_query_fetch_w_explicit_client_w_retry_w_timeout(database_id): assert iterator._timeout == timeout +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_eventual_transaction_fails(database_id): + """ + Queries with eventual consistency cannot be used in a transaction. + """ + import mock + + transaction = mock.Mock() + transaction.id = b"expected_id" + client = _Client(None, database=database_id, transaction=transaction) + + query = _make_query(client) + with pytest.raises(ValueError): + list(query.fetch(eventual=True)) + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_id_populated(database_id): + """ + When an aggregation is run in the context of a transaction, the transaction + ID should be populated in the request. + """ + import mock + + transaction = mock.Mock() + transaction.id = b"expected_id" + mock_datastore_api = mock.Mock() + mock_gapic = mock_datastore_api.run_query + + more_results_enum = 3 # NO_MORE_RESULTS + response_pb = _make_query_response([], b"", more_results_enum, 0) + mock_gapic.return_value = response_pb + + client = _Client( + None, + datastore_api=mock_datastore_api, + database=database_id, + transaction=transaction, + ) + + query = _make_query(client) + # run mock query + list(query.fetch()) + assert mock_gapic.call_count == 1 + request = mock_gapic.call_args[1]["request"] + read_options = request["read_options"] + # ensure transaction ID is populated + assert read_options.transaction == client.current_transaction.id + + def test_iterator_constructor_defaults(): query = object() client = object() From 270322845d9fc4ca76677493366f8a6742e8bb2a Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 6 Sep 2023 00:43:22 +0200 Subject: [PATCH 538/611] chore(deps): update all dependencies (#473) --- packages/google-cloud-datastore/.github/workflows/mypy.yml | 2 +- .../samples/snippets/requirements-test.txt | 2 +- .../samples/snippets/schedule-export/requirements-test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/.github/workflows/mypy.yml b/packages/google-cloud-datastore/.github/workflows/mypy.yml index c63242630acd..1445918662a8 100644 --- a/packages/google-cloud-datastore/.github/workflows/mypy.yml +++ b/packages/google-cloud-datastore/.github/workflows/mypy.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v4 with: diff --git a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt index f8841e8c6af2..063f91e2ea3f 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff===1.11.1; python_version < "3.7" backoff==2.2.1; python_version >= "3.7" -pytest==7.4.0 +pytest==7.4.1 flaky==3.7.0 diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt index 6950eb5a7b6a..bd5fef9ba0a5 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.0 \ No newline at end of file +pytest==7.4.1 \ No newline at end of file From 195d6e42e23d9106cdbb886380f9362ba4cd2f1c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 6 Sep 2023 00:07:06 +0000 Subject: [PATCH 539/611] chore(main): release 2.18.0 (#471) --- .../.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 12 ++++++++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../google/cloud/datastore_admin/gapic_version.py | 2 +- .../google/cloud/datastore_admin_v1/gapic_version.py | 2 +- .../google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 18 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index 882f663e6b84..a627e662e002 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.17.0" + ".": "2.18.0" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 05e37195847e..499af3b62e77 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.18.0](https://github.com/googleapis/python-datastore/compare/v2.17.0...v2.18.0) (2023-09-05) + + +### Features + +* Add support for Sum and Avg aggregation query ([#437](https://github.com/googleapis/python-datastore/issues/437)) ([e99120d](https://github.com/googleapis/python-datastore/commit/e99120d0dde446d89674addab49987cb24279fe3)) + + +### Documentation + +* Update property requirement specifications ([#470](https://github.com/googleapis/python-datastore/issues/470)) ([795ce81](https://github.com/googleapis/python-datastore/commit/795ce81c61d2af4e1ba285b4f0f8d796ea435bc3)) + ## [2.17.0](https://github.com/googleapis/python-datastore/compare/v2.16.1...v2.17.0) (2023-08-09) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index 6de7a69bf095..38172a8ea25b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.17.0" # {x-release-please-version} +__version__ = "2.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 422b383cc30d..a613e5ea2f7e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.17.0" +__version__ = "2.18.0" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index 8d4f4cfb61d6..f09943f6bdf7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.0" # {x-release-please-version} +__version__ = "2.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index 7a84223d5368..552e8442b58e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.17.0" # {x-release-please-version} +__version__ = "2.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index 7a84223d5368..552e8442b58e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.17.0" # {x-release-please-version} +__version__ = "2.18.0" # {x-release-please-version} From af4848a09c4a94eb503ac42d5d63da6a2b79b771 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 13 Sep 2023 16:21:45 -0700 Subject: [PATCH 540/611] docs: Minor formatting (#476) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Minor formatting chore: Update gapic-generator-python to v1.11.5 build: Update rules_python to 0.24.0 PiperOrigin-RevId: 563436317 Source-Link: https://github.com/googleapis/googleapis/commit/42fd37b18d706f6f51f52f209973b3b2c28f509a Source-Link: https://github.com/googleapis/googleapis-gen/commit/280264ca02fb9316b4237a96d0af1a2343a81a56 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjgwMjY0Y2EwMmZiOTMxNmI0MjM3YTk2ZDBhZjFhMjM0M2E4MWE1NiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/datastore_admin/async_client.py | 5 ++++- .../datastore_admin_v1/services/datastore_admin/client.py | 5 ++++- .../services/datastore_admin/transports/base.py | 1 - .../services/datastore_admin/transports/grpc.py | 4 +++- .../services/datastore_admin/transports/grpc_asyncio.py | 4 +++- .../services/datastore_admin/transports/rest.py | 4 +++- .../google/cloud/datastore_admin_v1/types/datastore_admin.py | 1 + .../cloud/datastore_v1/services/datastore/async_client.py | 2 +- .../google/cloud/datastore_v1/services/datastore/client.py | 2 +- .../cloud/datastore_v1/services/datastore/transports/base.py | 2 +- .../cloud/datastore_v1/services/datastore/transports/grpc.py | 2 +- .../services/datastore/transports/grpc_asyncio.py | 2 +- .../cloud/datastore_v1/services/datastore/transports/rest.py | 2 +- .../google/cloud/datastore_v1/types/aggregation_result.py | 1 + .../google/cloud/datastore_v1/types/datastore.py | 2 ++ .../google/cloud/datastore_v1/types/entity.py | 4 ++++ .../unit/gapic/datastore_admin_v1/test_datastore_admin.py | 2 +- .../tests/unit/gapic/datastore_v1/test_datastore.py | 2 +- 18 files changed, 33 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index fb112508a9e4..f0178210b18b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -47,7 +47,7 @@ from google.cloud.datastore_admin_v1.services.datastore_admin import pagers from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .transports.base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DatastoreAdminGrpcAsyncIOTransport @@ -56,13 +56,16 @@ class DatastoreAdminAsyncClient: """Google Cloud Datastore Admin API + The Datastore Admin API provides several admin services for Cloud Datastore. + Concepts: Project, namespace, kind, and entity as defined in the Google Cloud Datastore API. Operation: An Operation represents work being performed in the background. + EntityFilter: Allows specifying a subset of entities in a project. This is specified as a combination of kinds and namespaces (either or both of which may be all). diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index fabdcecf8593..cadac67edfe1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -51,7 +51,7 @@ from google.cloud.datastore_admin_v1.services.datastore_admin import pagers from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .transports.base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DatastoreAdminGrpcTransport @@ -98,13 +98,16 @@ def get_transport_class( class DatastoreAdminClient(metaclass=DatastoreAdminClientMeta): """Google Cloud Datastore Admin API + The Datastore Admin API provides several admin services for Cloud Datastore. + Concepts: Project, namespace, kind, and entity as defined in the Google Cloud Datastore API. Operation: An Operation represents work being performed in the background. + EntityFilter: Allows specifying a subset of entities in a project. This is specified as a combination of kinds and namespaces (either or both of which may be all). diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index 83b6167e56f4..d37482a19e6e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -29,7 +29,6 @@ from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 from google.longrunning import operations_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index 24ca17c761cb..f5fd0e906280 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -27,7 +27,6 @@ from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 from google.longrunning import operations_pb2 # type: ignore from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO @@ -36,13 +35,16 @@ class DatastoreAdminGrpcTransport(DatastoreAdminTransport): """gRPC backend transport for DatastoreAdmin. Google Cloud Datastore Admin API + The Datastore Admin API provides several admin services for Cloud Datastore. + Concepts: Project, namespace, kind, and entity as defined in the Google Cloud Datastore API. Operation: An Operation represents work being performed in the background. + EntityFilter: Allows specifying a subset of entities in a project. This is specified as a combination of kinds and namespaces (either or both of which may be all). diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index 9c37e0745e7a..3c6aa35167f8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -27,7 +27,6 @@ from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 from google.longrunning import operations_pb2 # type: ignore from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO from .grpc import DatastoreAdminGrpcTransport @@ -37,13 +36,16 @@ class DatastoreAdminGrpcAsyncIOTransport(DatastoreAdminTransport): """gRPC AsyncIO backend transport for DatastoreAdmin. Google Cloud Datastore Admin API + The Datastore Admin API provides several admin services for Cloud Datastore. + Concepts: Project, namespace, kind, and entity as defined in the Google Cloud Datastore API. Operation: An Operation represents work being performed in the background. + EntityFilter: Allows specifying a subset of entities in a project. This is specified as a combination of kinds and namespaces (either or both of which may be all). diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py index ee7f7dc0fed6..f5feaa8895d0 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py @@ -28,7 +28,6 @@ from google.protobuf import json_format from google.api_core import operations_v1 -from google.longrunning import operations_pb2 from requests import __version__ as requests_version import dataclasses import re @@ -363,13 +362,16 @@ class DatastoreAdminRestTransport(DatastoreAdminTransport): """REST backend transport for DatastoreAdmin. Google Cloud Datastore Admin API + The Datastore Admin API provides several admin services for Cloud Datastore. + Concepts: Project, namespace, kind, and entity as defined in the Google Cloud Datastore API. Operation: An Operation represents work being performed in the background. + EntityFilter: Allows specifying a subset of entities in a project. This is specified as a combination of kinds and namespaces (either or both of which may be all). diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py index 103d2563c3e6..447827271d96 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -429,6 +429,7 @@ class EntityFilter(proto.Message): An empty list represents all namespaces. This is the preferred usage for projects that don't use namespaces. + An empty string element represents the default namespace. This should be used if the project has data in non-default namespaces, but doesn't diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index 1fcbf88f1bc8..b5b700efa84b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -46,7 +46,7 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DatastoreGrpcAsyncIOTransport diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index 2bb647a69f0c..58c8ad221d85 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -50,7 +50,7 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DatastoreGrpcTransport diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py index 54cbd8c473b9..e97e6b6b8529 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -27,7 +27,7 @@ from google.oauth2 import service_account # type: ignore from google.cloud.datastore_v1.types import datastore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py index 27e578081af4..3e31e98e69d1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -25,7 +25,7 @@ import grpc # type: ignore from google.cloud.datastore_v1.types import datastore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from .base import DatastoreTransport, DEFAULT_CLIENT_INFO diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index 4baefa382ff0..2a3a66b0fe95 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -25,7 +25,7 @@ from grpc.experimental import aio # type: ignore from google.cloud.datastore_v1.types import datastore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from .base import DatastoreTransport, DEFAULT_CLIENT_INFO from .grpc import DatastoreGrpcTransport diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py index a5086d560016..4f25fef29c70 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py @@ -27,7 +27,6 @@ from google.api_core import gapic_v1 from google.protobuf import json_format -from google.longrunning import operations_pb2 from requests import __version__ as requests_version import dataclasses import re @@ -41,6 +40,7 @@ from google.cloud.datastore_v1.types import datastore +from google.longrunning import operations_pb2 # type: ignore from .base import DatastoreTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py index 8bcb83711af1..dd53cfa3d4de 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py @@ -74,6 +74,7 @@ class AggregationResultBatch(proto.Message): ``NO_MORE_RESULTS``. read_time (google.protobuf.timestamp_pb2.Timestamp): Read timestamp this batch was returned from. + In a single transaction, subsequent query result batches for the same query can have a greater timestamp. Each batch's read timestamp is valid diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index be824c9602cb..6c768904ef28 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -861,6 +861,7 @@ class ReadOptions(proto.Message): Reads entities as they were at the given time. This value is only supported for Cloud Firestore in Datastore mode. + This must be a microsecond precision timestamp within the past one hour, or if Point-in-Time Recovery is enabled, can additionally be a whole @@ -957,6 +958,7 @@ class ReadOnly(proto.Message): Attributes: read_time (google.protobuf.timestamp_pb2.Timestamp): Reads entities at the given time. + This must be a microsecond precision timestamp within the past one hour, or if Point-in-Time Recovery is enabled, can additionally be a whole diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index dc3374e418b6..09c0ecc809c3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -120,6 +120,7 @@ class Key(proto.Message): class PathElement(proto.Message): r"""A (kind, ID/name) pair used to construct a key path. + If either name or ID is set, the element is complete. If neither is set, the element is incomplete. @@ -143,6 +144,7 @@ class PathElement(proto.Message): base-64 encoding of the bytes. id (int): The auto-allocated ID of the entity. + Never equal to zero. Values less than zero are discouraged and may not be supported in the future. @@ -264,6 +266,7 @@ class Value(proto.Message): This field is a member of `oneof`_ ``value_type``. entity_value (google.cloud.datastore_v1.types.Entity): An entity value. + - May have no key. - May have a key with an incomplete key path. - May have a reserved/read-only key. @@ -356,6 +359,7 @@ class Value(proto.Message): class Entity(proto.Message): r"""A Datastore data object. + Must not exceed 1 MiB - 4 bytes. Attributes: diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index 557238a07e57..167aee46f769 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -58,7 +58,7 @@ from google.cloud.datastore_admin_v1.services.datastore_admin import transports from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import empty_pb2 # type: ignore import google.auth diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 9d99308cfa81..8f39cff05c87 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -51,7 +51,7 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore From 3f8055a52bc57c5a2ed08ae16b95c8ae40d0702c Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 15 Sep 2023 13:22:58 -0700 Subject: [PATCH 541/611] chore: fix test dependencies (#482) --- packages/google-cloud-datastore/noxfile.py | 8 ++++++-- packages/google-cloud-datastore/owlbot.py | 3 +++ 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 99df26d9ac61..9df1c97af96b 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -40,7 +40,9 @@ "pytest-cov", "pytest-asyncio", ] -UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [ + "six", +] UNIT_TEST_LOCAL_DEPENDENCIES = [] UNIT_TEST_DEPENDENCIES = [] UNIT_TEST_EXTRAS = [] @@ -52,7 +54,9 @@ "pytest", "google-cloud-testutils", ] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [ + "six", +] SYSTEM_TEST_LOCAL_DEPENDENCIES = [] SYSTEM_TEST_DEPENDENCIES = [] SYSTEM_TEST_EXTRAS = [] diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index d50402316b9a..44d01b44c7fe 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -99,6 +99,9 @@ def get_staging_dirs( templated_files = common.py_library( microgenerator=True, split_system_tests=True, + # six required by (but not installed by) google-cloud-core < v2.0.0 + unit_test_external_dependencies=["six"], + system_test_external_dependencies=["six"], cov_level=100, ) s.move( From 5dd7b00e0fd8583e456c2761c5b0472cb89fe8a6 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 15 Sep 2023 15:40:45 -0700 Subject: [PATCH 542/611] chore: add sync-repo-settings.yaml (#472) --- .../.github/sync-repo-settings.yaml | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 packages/google-cloud-datastore/.github/sync-repo-settings.yaml diff --git a/packages/google-cloud-datastore/.github/sync-repo-settings.yaml b/packages/google-cloud-datastore/.github/sync-repo-settings.yaml new file mode 100644 index 000000000000..dc8cd53ebddb --- /dev/null +++ b/packages/google-cloud-datastore/.github/sync-repo-settings.yaml @@ -0,0 +1,46 @@ +# Rules for main branch protection +branchProtectionRules: +# Identifies the protection rule pattern. Name of the branch to be protected. +# Defaults to `main` +- pattern: main + # Can admins overwrite branch protection. + # Defaults to `true` + isAdminEnforced: true + # Number of approving reviews required to update matching branches. + # Defaults to `1` + requiredApprovingReviewCount: 1 + # Are reviews from code owners required to update matching branches. + # Defaults to `false` + requiresCodeOwnerReviews: true + # Require up to date branches + requiresStrictStatusChecks: true + # List of required status check contexts that must pass for commits to be accepted to matching branches. + requiredStatusCheckContexts: + - 'Kokoro' + - 'Kokoro system-3.8' + - 'cla/google' + - 'OwlBot Post Processor' + - 'docs' + - 'docfx' + - 'lint' + - 'unit (3.7)' + - 'unit (3.8)' + - 'unit (3.9)' + - 'unit (3.10)' + - 'unit (3.11)' + - 'cover' + - 'mypy' +# List of explicit permissions to add (additive only) +permissionRules: + # Team slug to add to repository permissions + - team: yoshi-admins + # Access level required, one of push|pull|admin|maintain|triage + permission: admin + # Team slug to add to repository permissions + - team: yoshi-python-admins + # Access level required, one of push|pull|admin|maintain|triage + permission: admin + # Team slug to add to repository permissions + - team: yoshi-python + # Access level required, one of push|pull|admin|maintain|triage + permission: push From c48782bda604795ed11ebca365bdffeba9d15a67 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 16 Sep 2023 01:43:08 +0200 Subject: [PATCH 543/611] chore(deps): update all dependencies (#475) --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index 390385f6cb30..d4e90e37456f 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.17.0 \ No newline at end of file +google-cloud-datastore==2.18.0 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index f305ba58311c..a84b83a1fe64 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.17.0 +google-cloud-datastore==2.18.0 From 73421680939968dfe40c87b33277d2514b836d3a Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 18 Sep 2023 19:29:06 +0200 Subject: [PATCH 544/611] chore(deps): update all dependencies (#483) --- .../samples/snippets/requirements-test.txt | 2 +- .../samples/snippets/schedule-export/requirements-test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt index 063f91e2ea3f..9b6a1aeda140 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff===1.11.1; python_version < "3.7" backoff==2.2.1; python_version >= "3.7" -pytest==7.4.1 +pytest==7.4.2 flaky==3.7.0 diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt index bd5fef9ba0a5..de1887becf2e 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.1 \ No newline at end of file +pytest==7.4.2 \ No newline at end of file From 5738d8e3a50822c47f1acd8c5114043713e0dde2 Mon Sep 17 00:00:00 2001 From: Juan Lara Date: Tue, 3 Oct 2023 18:48:06 +0000 Subject: [PATCH 545/611] samples: Add snippets for sum and avg (#480) --- .../samples/snippets/snippets.py | 159 ++++++++++++++++++ .../samples/snippets/snippets_test.py | 95 +++++++++++ 2 files changed, 254 insertions(+) diff --git a/packages/google-cloud-datastore/samples/snippets/snippets.py b/packages/google-cloud-datastore/samples/snippets/snippets.py index 78e72bca66d3..749a1ce4d74d 100644 --- a/packages/google-cloud-datastore/samples/snippets/snippets.py +++ b/packages/google-cloud-datastore/samples/snippets/snippets.py @@ -230,6 +230,165 @@ def count_query_with_stale_read(client): return [task1, task2, task3] +def sum_query_on_kind(client): + # [START datastore_sum_aggregation_query_on_kind] + # Set up sample entities + # Use incomplete key to auto-generate ID + task1 = datastore.Entity(client.key("Task")) + task2 = datastore.Entity(client.key("Task")) + task3 = datastore.Entity(client.key("Task")) + + task1["hours"] = 5 + task2["hours"] = 3 + task3["hours"] = 1 + + tasks = [task1, task2, task3] + client.put_multi(tasks) + + # Execute sum aggregation query + all_tasks_query = client.query(kind="Task") + all_tasks_sum_query = client.aggregation_query(all_tasks_query).sum("hours") + query_result = all_tasks_sum_query.fetch() + for aggregation_results in query_result: + for aggregation in aggregation_results: + print(f"Total sum of hours in tasks is {aggregation.value}") + # [END datastore_sum_aggregation_query_on_kind] + return tasks + + +def sum_query_property_filter(client): + # [START datastore_sum_aggregation_query_with_filters] + # Set up sample entities + # Use incomplete key to auto-generate ID + task1 = datastore.Entity(client.key("Task")) + task2 = datastore.Entity(client.key("Task")) + task3 = datastore.Entity(client.key("Task")) + + task1["hours"] = 5 + task2["hours"] = 3 + task3["hours"] = 1 + + task1["done"] = True + task2["done"] = True + task3["done"] = False + + tasks = [task1, task2, task3] + client.put_multi(tasks) + + # Execute sum aggregation query with filters + completed_tasks = client.query(kind="Task").add_filter("done", "=", True) + completed_tasks_query = client.aggregation_query(query=completed_tasks).sum( + property_ref="hours", + alias="total_completed_sum_hours" + ) + + completed_query_result = completed_tasks_query.fetch() + for aggregation_results in completed_query_result: + for aggregation_result in aggregation_results: + if aggregation_result.alias == "total_completed_sum_hours": + print(f"Total sum of hours in completed tasks is {aggregation_result.value}") + # [END datastore_sum_aggregation_query_with_filters] + return tasks + + +def avg_query_on_kind(client): + # [START datastore_avg_aggregation_query_on_kind] + # Set up sample entities + # Use incomplete key to auto-generate ID + task1 = datastore.Entity(client.key("Task")) + task2 = datastore.Entity(client.key("Task")) + task3 = datastore.Entity(client.key("Task")) + + task1["hours"] = 5 + task2["hours"] = 3 + task3["hours"] = 1 + + tasks = [task1, task2, task3] + client.put_multi(tasks) + + # Execute average aggregation query + all_tasks_query = client.query(kind="Task") + all_tasks_avg_query = client.aggregation_query(all_tasks_query).avg("hours") + query_result = all_tasks_avg_query.fetch() + for aggregation_results in query_result: + for aggregation in aggregation_results: + print(f"Total average of hours in tasks is {aggregation.value}") + # [END datastore_avg_aggregation_query_on_kind] + return tasks + + +def avg_query_property_filter(client): + # [START datastore_avg_aggregation_query_with_filters] + # Set up sample entities + # Use incomplete key to auto-generate ID + task1 = datastore.Entity(client.key("Task")) + task2 = datastore.Entity(client.key("Task")) + task3 = datastore.Entity(client.key("Task")) + + task1["hours"] = 5 + task2["hours"] = 3 + task3["hours"] = 1 + + task1["done"] = True + task2["done"] = True + task3["done"] = False + + tasks = [task1, task2, task3] + client.put_multi(tasks) + + # Execute average aggregation query with filters + completed_tasks = client.query(kind="Task").add_filter("done", "=", True) + completed_tasks_query = client.aggregation_query(query=completed_tasks).avg( + property_ref="hours", + alias="total_completed_avg_hours" + ) + + completed_query_result = completed_tasks_query.fetch() + for aggregation_results in completed_query_result: + for aggregation_result in aggregation_results: + if aggregation_result.alias == "total_completed_avg_hours": + print(f"Total average of hours in completed tasks is {aggregation_result.value}") + # [END datastore_avg_aggregation_query_with_filters] + return tasks + + +def multiple_aggregations_query(client): + # [START datastore_multiple_aggregation_in_structured_query] + # Set up sample entities + # Use incomplete key to auto-generate ID + task1 = datastore.Entity(client.key("Task")) + task2 = datastore.Entity(client.key("Task")) + task3 = datastore.Entity(client.key("Task")) + + task1["hours"] = 5 + task2["hours"] = 3 + task3["hours"] = 1 + + tasks = [task1, task2, task3] + client.put_multi(tasks) + + # Execute query with multiple aggregations + all_tasks_query = client.query(kind="Task") + aggregation_query = client.aggregation_query(all_tasks_query) + # Add aggregations + aggregation_query.add_aggregations( + [ + datastore.aggregation.CountAggregation(alias="count_aggregation"), + datastore.aggregation.SumAggregation( + property_ref="hours", alias="sum_aggregation"), + datastore.aggregation.AvgAggregation( + property_ref="hours", alias="avg_aggregation") + ] + ) + + query_result = aggregation_query.fetch() + for aggregation_results in query_result: + for aggregation in aggregation_results: + print(f"{aggregation.alias} value is {aggregation.value}") + # [END datastore_multiple_aggregation_in_structured_query] + return tasks + + def main(project_id): client = datastore.Client(project_id) diff --git a/packages/google-cloud-datastore/samples/snippets/snippets_test.py b/packages/google-cloud-datastore/samples/snippets/snippets_test.py index 18bc701ec288..92db05075d9c 100644 --- a/packages/google-cloud-datastore/samples/snippets/snippets_test.py +++ b/packages/google-cloud-datastore/samples/snippets/snippets_test.py @@ -14,7 +14,9 @@ import os import backoff +import google.api_core.exceptions from google.cloud import datastore +from google.cloud import datastore_admin_v1 import pytest import snippets @@ -43,6 +45,38 @@ def client(): client.cleanup() +@pytest.fixture(scope="session", autouse=True) +def setup_indexes(request): + # Set up required indexes + admin_client = datastore_admin_v1.DatastoreAdminClient() + + indexes = [] + done_property_index = datastore_admin_v1.Index.IndexedProperty( + name='done', + direction=datastore_admin_v1.Index.Direction.ASCENDING + ) + hour_property_index = datastore_admin_v1.Index.IndexedProperty( + name='hours', + direction=datastore_admin_v1.Index.Direction.ASCENDING + ) + done_hour_index = datastore_admin_v1.Index( + kind='Task', + ancestor=datastore_admin_v1.Index.AncestorMode.NONE, + properties=[done_property_index, hour_property_index] + ) + indexes.append(done_hour_index) + + for index in indexes: + request = datastore_admin_v1.CreateIndexRequest(project_id=PROJECT, index=index) + # Create the required index + # Dependant tests will fail until the index is ready + try: + admin_client.create_index(request) + # Pass if the index already exists + except (google.api_core.exceptions.AlreadyExists): + pass + + @pytest.mark.flaky class TestDatastoreSnippets: # These tests mostly just test the absence of exceptions. @@ -118,3 +152,64 @@ def test_count_query_with_stale_read(self, capsys, client): assert captured.err == "" client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_sum_query_on_kind(self, capsys, client): + tasks = snippets.sum_query_on_kind(client) + captured = capsys.readouterr() + assert ( + captured.out.strip() == "Total sum of hours in tasks is 9" + ) + assert captured.err == "" + + client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_sum_query_property_filter(self, capsys, client): + tasks = snippets.sum_query_property_filter(client) + captured = capsys.readouterr() + assert ( + captured.out.strip() == "Total sum of hours in completed tasks is 8" + ) + assert captured.err == "" + + client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_avg_query_on_kind(self, capsys, client): + tasks = snippets.avg_query_on_kind(client) + captured = capsys.readouterr() + assert ( + captured.out.strip() == "Total average of hours in tasks is 3.0" + ) + assert captured.err == "" + + client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_avg_query_property_filter(self, capsys, client): + tasks = snippets.avg_query_property_filter(client) + captured = capsys.readouterr() + assert ( + captured.out.strip() == "Total average of hours in completed tasks is 4.0" + ) + assert captured.err == "" + + client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_multiple_aggregations_query(self, capsys, client): + tasks = snippets.multiple_aggregations_query(client) + captured = capsys.readouterr() + assert ( + 'avg_aggregation value is 3.0' in captured.out + ) + assert ( + 'count_aggregation value is 3' in captured.out + ) + assert ( + 'sum_aggregation value is 9' in captured.out + ) + assert captured.err == "" + + client.entities_to_delete.extend(tasks) From 2e019ce9b17e906fdea5bcab06b76be4b6d7f93b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 6 Oct 2023 21:52:40 -0400 Subject: [PATCH 546/611] chore: [autoapprove] bump cryptography from 41.0.3 to 41.0.4 (#486) Source-Link: https://github.com/googleapis/synthtool/commit/dede53ff326079b457cfb1aae5bbdc82cbb51dc3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- packages/google-cloud-datastore/.gitignore | 1 + .../.kokoro/requirements.txt | 49 ++++++++++--------- 3 files changed, 28 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index a3da1b0d4cd3..a9bdb1b7ac0f 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 -# created: 2023-08-02T10:53:29.114535628Z + digest: sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb +# created: 2023-10-02T21:31:03.517640371Z diff --git a/packages/google-cloud-datastore/.gitignore b/packages/google-cloud-datastore/.gitignore index b4243ced74e4..d083ea1ddc3e 100644 --- a/packages/google-cloud-datastore/.gitignore +++ b/packages/google-cloud-datastore/.gitignore @@ -50,6 +50,7 @@ docs.metadata # Virtual environment env/ +venv/ # Test logs coverage.xml diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index 029bd342de94..96d593c8c82a 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -113,30 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.3 \ - --hash=sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306 \ - --hash=sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84 \ - --hash=sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47 \ - --hash=sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d \ - --hash=sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116 \ - --hash=sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207 \ - --hash=sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81 \ - --hash=sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087 \ - --hash=sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd \ - --hash=sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507 \ - --hash=sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858 \ - --hash=sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae \ - --hash=sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34 \ - --hash=sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906 \ - --hash=sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd \ - --hash=sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922 \ - --hash=sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7 \ - --hash=sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4 \ - --hash=sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574 \ - --hash=sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1 \ - --hash=sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c \ - --hash=sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e \ - --hash=sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de +cryptography==41.0.4 \ + --hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \ + --hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \ + --hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \ + --hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \ + --hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \ + --hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \ + --hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \ + --hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \ + --hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \ + --hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \ + --hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \ + --hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \ + --hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \ + --hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \ + --hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \ + --hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \ + --hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \ + --hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \ + --hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \ + --hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \ + --hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \ + --hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \ + --hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f # via # gcp-releasetool # secretstorage @@ -382,6 +382,7 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core + # googleapis-common-protos pyasn1==0.4.8 \ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba From 97d4af8595830b51ce971618fbb0c57cd58069ed Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 10 Oct 2023 10:15:28 -0400 Subject: [PATCH 547/611] chore: [autoapprove] Update `black` and `isort` to latest versions (#489) Source-Link: https://github.com/googleapis/synthtool/commit/0c7b0333f44b2b7075447f43a121a12d15a7b76a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +-- .../.kokoro/requirements.txt | 6 ++-- .../.pre-commit-config.yaml | 2 +- .../cloud/datastore/_app_engine_key_pb2.py | 1 - .../google/cloud/datastore/aggregation.py | 1 - .../google/cloud/datastore/helpers.py | 1 - .../google/cloud/datastore/query.py | 1 - .../datastore_admin/transports/rest.py | 4 --- .../services/datastore/transports/rest.py | 4 --- packages/google-cloud-datastore/noxfile.py | 34 ++++++++++--------- .../tests/system/test_aggregation_query.py | 1 - .../tests/system/test_query.py | 3 -- .../tests/system/utils/clear_datastore.py | 1 - .../tests/unit/test_client.py | 3 -- .../tests/unit/test_entity.py | 1 - .../tests/unit/test_query.py | 5 --- 16 files changed, 24 insertions(+), 48 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index a9bdb1b7ac0f..dd98abbdeebe 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb -# created: 2023-10-02T21:31:03.517640371Z + digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 +# created: 2023-10-09T14:06:13.397766266Z diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index 96d593c8c82a..0332d3267e15 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in -urllib3==1.26.12 \ - --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ - --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 +urllib3==1.26.17 \ + --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \ + --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b # via # requests # twine diff --git a/packages/google-cloud-datastore/.pre-commit-config.yaml b/packages/google-cloud-datastore/.pre-commit-config.yaml index 19409cbd37a4..6a8e16950664 100644 --- a/packages/google-cloud-datastore/.pre-commit-config.yaml +++ b/packages/google-cloud-datastore/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 23.7.0 hooks: - id: black - repo: https://github.com/pycqa/flake8 diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py b/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py index 11a1df51cf2b..88c2cb1ebf37 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_app_engine_key_pb2.py @@ -69,7 +69,6 @@ _sym_db.RegisterMessage(Path.Element) if _descriptor._USE_C_DESCRIPTORS == False: # pragma: NO COVER - DESCRIPTOR._options = None _REFERENCE._serialized_start = 48 _REFERENCE._serialized_end = 134 diff --git a/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py b/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py index 0518514ec894..47ebfebd09e3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py @@ -166,7 +166,6 @@ def __init__( client, query, ): - self._client = client self._nested_query = query self._aggregations = [] diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index 2deecabedd35..e889488337b1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -50,7 +50,6 @@ def _get_meaning(value_pb, is_list=False): list meanings agree, it just condenses them. """ if is_list: - values = value_pb.array_value.values # An empty list will have no values, hence no shared meaning diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 289605bb7821..57c0702c2627 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -204,7 +204,6 @@ def __init__( order=(), distinct_on=(), ): - self._client = client self._kind = kind diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py index f5feaa8895d0..b29a8b7591bb 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py @@ -1125,7 +1125,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1188,7 +1187,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -1251,7 +1249,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -1318,7 +1315,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py index 4f25fef29c70..aa5b0d1e3585 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py @@ -1366,7 +1366,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1429,7 +1428,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -1492,7 +1490,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -1559,7 +1556,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 9df1c97af96b..5f35fbd9c6d7 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -17,22 +17,24 @@ # Generated by synthtool. DO NOT EDIT! from __future__ import absolute_import + import os import pathlib import re import shutil +from typing import Dict, List import warnings import nox FLAKE8_VERSION = "flake8==6.1.0" -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", @@ -40,27 +42,27 @@ "pytest-cov", "pytest-asyncio", ] -UNIT_TEST_EXTERNAL_DEPENDENCIES = [ +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ "six", ] -UNIT_TEST_LOCAL_DEPENDENCIES = [] -UNIT_TEST_DEPENDENCIES = [] -UNIT_TEST_EXTRAS = [] -UNIT_TEST_EXTRAS_BY_PYTHON = {} +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8"] +SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ "mock", "pytest", "google-cloud-testutils", ] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [ +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ "six", ] -SYSTEM_TEST_LOCAL_DEPENDENCIES = [] -SYSTEM_TEST_DEPENDENCIES = [] -SYSTEM_TEST_EXTRAS = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON = {} +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -75,6 +77,7 @@ "blacken", "docs", "doctests", + "format", ] # Error if a python version is missing @@ -203,7 +206,6 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): - # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. # See https://github.com/grpc/grpc/issues/32163 diff --git a/packages/google-cloud-datastore/tests/system/test_aggregation_query.py b/packages/google-cloud-datastore/tests/system/test_aggregation_query.py index ae9a8297438a..ae175d808b02 100644 --- a/packages/google-cloud-datastore/tests/system/test_aggregation_query.py +++ b/packages/google-cloud-datastore/tests/system/test_aggregation_query.py @@ -46,7 +46,6 @@ def aggregation_query_client(datastore_client, database_id=None): @pytest.fixture(scope="session") def ancestor_key(aggregation_query_client, in_emulator): - # In the emulator, re-populating the datastore is cheap. if in_emulator: populate_datastore.add_characters(client=aggregation_query_client) diff --git a/packages/google-cloud-datastore/tests/system/test_query.py b/packages/google-cloud-datastore/tests/system/test_query.py index 9d7bec06d856..9f902205e9db 100644 --- a/packages/google-cloud-datastore/tests/system/test_query.py +++ b/packages/google-cloud-datastore/tests/system/test_query.py @@ -48,7 +48,6 @@ def query_client(datastore_client): @pytest.fixture(scope="session") def ancestor_key(query_client, in_emulator): - # In the emulator, re-populating the datastore is cheap. if in_emulator: populate_datastore.add_characters(client=query_client) @@ -205,7 +204,6 @@ def test_query_w_projection(ancestor_query, database_id): @pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) def test_query_w_paginate_simple_uuid_keys(query_client, database_id): - # See issue #4264 page_query = query_client.query(kind="uuid_key") iterator = page_query.fetch() @@ -224,7 +222,6 @@ def test_query_w_paginate_simple_uuid_keys(query_client, database_id): @pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) def test_query_paginate_simple_timestamp_keys(query_client, database_id): - # See issue #4264 page_query = query_client.query(kind="timestamp_key") iterator = page_query.fetch() diff --git a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py index cd552c26a459..2082bce76363 100644 --- a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py @@ -105,7 +105,6 @@ def run(database): response = input("Is this OK [y/n]? ") if response.lower() == "y": - for kind in kinds: remove_kind(kind, client) diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 119bab79bd20..412f3923e145 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -374,7 +374,6 @@ def test_client__push_batch_and__pop_batch(): def test_client_get_miss(): - creds = _make_credentials() client = _make_client(credentials=creds) get_multi = client.get_multi = mock.Mock(return_value=[]) @@ -843,7 +842,6 @@ def test_client_get_multi_max_loops(database_id): @pytest.mark.parametrize("database_id", [None, "somedb"]) def test_client_put(database_id): - creds = _make_credentials() client = _make_client(credentials=creds, database=database_id) put_multi = client.put_multi = mock.Mock() @@ -856,7 +854,6 @@ def test_client_put(database_id): @pytest.mark.parametrize("database_id", [None, "somedb"]) def test_client_put_w_retry_w_timeout(database_id): - creds = _make_credentials() client = _make_client(credentials=creds, database=database_id) put_multi = client.put_multi = mock.Mock() diff --git a/packages/google-cloud-datastore/tests/unit/test_entity.py b/packages/google-cloud-datastore/tests/unit/test_entity.py index faa862e47f35..96770489c189 100644 --- a/packages/google-cloud-datastore/tests/unit/test_entity.py +++ b/packages/google-cloud-datastore/tests/unit/test_entity.py @@ -209,7 +209,6 @@ def test_id(): def test_id_none(): - entity = _make_entity(key=None) assert entity.id is None diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 7758d7fb8f28..84c0bedf5010 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -373,7 +373,6 @@ def test_query_add_filter_return_query_obj(database_id): @pytest.mark.parametrize("database_id", [None, "somedb"]) def test_query_add_property_filter_without_keyword_argument(database_id): - query = _make_query(_make_client(database=database_id)) property_filter = PropertyFilter("firstname", "=", "John") with pytest.raises(ValueError) as exc: @@ -387,7 +386,6 @@ def test_query_add_property_filter_without_keyword_argument(database_id): @pytest.mark.parametrize("database_id", [None, "somedb"]) def test_query_add_composite_filter_without_keyword_argument(database_id): - query = _make_query(_make_client(database=database_id)) and_filter = And(["firstname", "=", "John"]) with pytest.raises(ValueError) as exc: @@ -410,7 +408,6 @@ def test_query_add_composite_filter_without_keyword_argument(database_id): @pytest.mark.parametrize("database_id", [None, "somedb"]) def test_query_positional_args_and_property_filter(database_id): - query = _make_query(_make_client(database=database_id)) with pytest.raises(ValueError) as exc: query.add_filter("firstname", "=", "John", filter=("name", "=", "Blabla")) @@ -423,7 +420,6 @@ def test_query_positional_args_and_property_filter(database_id): @pytest.mark.parametrize("database_id", [None, "somedb"]) def test_query_positional_args_and_composite_filter(database_id): - query = _make_query(_make_client(database=database_id)) and_filter = And(["firstname", "=", "John"]) with pytest.raises(ValueError) as exc: @@ -1199,7 +1195,6 @@ def test_pb_from_complex_filter(): def test_build_pb_for_and(): - and_filter = And( [ ("name", "=", "John"), From 2857422b859a1f5c52e0af3bd473d6d6fcf1d517 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Oct 2023 07:28:50 -0400 Subject: [PATCH 548/611] chore(deps): bump urllib3 from 1.26.17 to 1.26.18 in /.kokoro (#492) Source-Link: https://github.com/googleapis/synthtool/commit/d52e638b37b091054c869bfa6f5a9fedaba9e0dd Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-datastore/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index dd98abbdeebe..7f291dbd5f9b 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 -# created: 2023-10-09T14:06:13.397766266Z + digest: sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 +# created: 2023-10-18T20:26:37.410353675Z diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index 0332d3267e15..16170d0ca7b8 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in -urllib3==1.26.17 \ - --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \ - --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b +urllib3==1.26.18 \ + --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ + --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 # via # requests # twine From ff5dc344ca92994f209aa8d5ce209a096e2ac474 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Oct 2023 07:44:05 -0400 Subject: [PATCH 549/611] chore: Update gapic-generator-python to v1.11.9 (#490) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.11.7 PiperOrigin-RevId: 573230664 Source-Link: https://github.com/googleapis/googleapis/commit/93beed334607e70709cc60e6145be65fdc8ec386 Source-Link: https://github.com/googleapis/googleapis-gen/commit/f4a4edaa8057639fcf6adf9179872280d1a8f651 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZjRhNGVkYWE4MDU3NjM5ZmNmNmFkZjkxNzk4NzIyODBkMWE4ZjY1MSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.11.8 PiperOrigin-RevId: 574178735 Source-Link: https://github.com/googleapis/googleapis/commit/7307199008ee2d57a4337066de29f9cd8c444bc6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ce3af21b7c559a87c2befc076be0e3aeda3a26f0 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2UzYWYyMWI3YzU1OWE4N2MyYmVmYzA3NmJlMGUzYWVkYTNhMjZmMCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.11.9 PiperOrigin-RevId: 574520922 Source-Link: https://github.com/googleapis/googleapis/commit/5183984d611beb41e90f65f08609b9d926f779bd Source-Link: https://github.com/googleapis/googleapis-gen/commit/a59af19d4ac6509faedf1cc39029141b6a5b8968 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTU5YWYxOWQ0YWM2NTA5ZmFlZGYxY2MzOTAyOTE0MWI2YTViODk2OCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../test_datastore_admin.py | 85 ++++++++++++-- .../unit/gapic/datastore_v1/test_datastore.py | 110 +++++++++++------- 2 files changed, 139 insertions(+), 56 deletions(-) diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index 167aee46f769..ab0e0a2a9026 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -2609,6 +2609,73 @@ def test_create_index_rest(request_type): "properties": [{"name": "name_value", "direction": 1}], "state": 1, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = datastore_admin.CreateIndexRequest.meta.fields["index"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["index"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["index"][field])): + del request_init["index"][field][i][subfield] + else: + del request_init["index"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2698,14 +2765,6 @@ def test_create_index_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project_id": "sample1"} - request_init["index"] = { - "project_id": "project_id_value", - "index_id": "index_id_value", - "kind": "kind_value", - "ancestor": 1, - "properties": [{"name": "name_value", "direction": 1}], - "state": 1, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2881,8 +2940,9 @@ def test_get_index_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = index.Index.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = index.Index.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3009,8 +3069,9 @@ def test_list_indexes_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = datastore_admin.ListIndexesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore_admin.ListIndexesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 8f39cff05c87..958a45f31e6c 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -2456,8 +2456,9 @@ def test_lookup_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.LookupResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.LookupResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2531,8 +2532,9 @@ def test_lookup_rest_required_fields(request_type=datastore.LookupRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.LookupResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.LookupResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2669,8 +2671,9 @@ def test_lookup_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.LookupResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.LookupResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2741,8 +2744,9 @@ def test_run_query_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.RunQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.RunQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2816,8 +2820,9 @@ def test_run_query_rest_required_fields(request_type=datastore.RunQueryRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.RunQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.RunQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2948,8 +2953,9 @@ def test_run_aggregation_query_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.RunAggregationQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.RunAggregationQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3025,8 +3031,9 @@ def test_run_aggregation_query_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.RunAggregationQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.RunAggregationQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3159,8 +3166,9 @@ def test_begin_transaction_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.BeginTransactionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.BeginTransactionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3236,8 +3244,9 @@ def test_begin_transaction_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.BeginTransactionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.BeginTransactionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3360,8 +3369,9 @@ def test_begin_transaction_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.BeginTransactionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.BeginTransactionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3425,8 +3435,9 @@ def test_commit_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3500,8 +3511,9 @@ def test_commit_rest_required_fields(request_type=datastore.CommitRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3634,8 +3646,9 @@ def test_commit_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3709,8 +3722,9 @@ def test_rollback_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.RollbackResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.RollbackResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3787,8 +3801,9 @@ def test_rollback_rest_required_fields(request_type=datastore.RollbackRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.RollbackResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.RollbackResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3918,8 +3933,9 @@ def test_rollback_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.RollbackResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.RollbackResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3981,8 +3997,9 @@ def test_allocate_ids_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.AllocateIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.AllocateIdsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4055,8 +4072,9 @@ def test_allocate_ids_rest_required_fields(request_type=datastore.AllocateIdsReq response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.AllocateIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.AllocateIdsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4190,8 +4208,9 @@ def test_allocate_ids_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.AllocateIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.AllocateIdsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4257,8 +4276,9 @@ def test_reserve_ids_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.ReserveIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.ReserveIdsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4331,8 +4351,9 @@ def test_reserve_ids_rest_required_fields(request_type=datastore.ReserveIdsReque response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.ReserveIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.ReserveIdsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4466,8 +4487,9 @@ def test_reserve_ids_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = datastore.ReserveIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = datastore.ReserveIdsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value From ca7caa78d08f8f8ce700848bde6474aaa9968037 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 30 Oct 2023 12:44:35 +0100 Subject: [PATCH 550/611] chore(deps): update all dependencies (#493) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../samples/snippets/requirements-test.txt | 2 +- .../samples/snippets/schedule-export/requirements-test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt index 9b6a1aeda140..dfba3f35d90f 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff===1.11.1; python_version < "3.7" backoff==2.2.1; python_version >= "3.7" -pytest==7.4.2 +pytest==7.4.3 flaky==3.7.0 diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt index de1887becf2e..f16ee69ae921 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.2 \ No newline at end of file +pytest==7.4.3 \ No newline at end of file From 224ab4b7d523d2e7a9e82081c49d6e37ccb6511a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 3 Nov 2023 07:58:41 -0400 Subject: [PATCH 551/611] chore: update docfx minimum Python version (#495) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update docfx minimum Python version Source-Link: https://github.com/googleapis/synthtool/commit/bc07fd415c39853b382bcf8315f8eeacdf334055 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 * update replacement in owlbot.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-datastore/.github/workflows/docs.yml | 2 +- packages/google-cloud-datastore/noxfile.py | 2 +- packages/google-cloud-datastore/owlbot.py | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 7f291dbd5f9b..ec696b558c35 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 -# created: 2023-10-18T20:26:37.410353675Z + digest: sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 +# created: 2023-11-03T00:57:07.335914631Z diff --git a/packages/google-cloud-datastore/.github/workflows/docs.yml b/packages/google-cloud-datastore/.github/workflows/docs.yml index e97d89e484c9..221806cedf58 100644 --- a/packages/google-cloud-datastore/.github/workflows/docs.yml +++ b/packages/google-cloud-datastore/.github/workflows/docs.yml @@ -28,7 +28,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.9" + python-version: "3.10" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 5f35fbd9c6d7..318f0f47c5e9 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -335,7 +335,7 @@ def doctests(session): session.run("py.test", "tests/doctests.py") -@nox.session(python="3.9") +@nox.session(python="3.10") def docfx(session): """Build the docfx yaml files for this library.""" diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 44d01b44c7fe..d2007bf87f74 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -183,7 +183,7 @@ def system(session, disable_grpc): assert 1 == s.replace( "noxfile.py", r"""\ -@nox.session\(python="3.9"\) +@nox.session\(python="3.10"\) def docfx\(session\): """, """\ @@ -198,7 +198,7 @@ def doctests(session): session.run("py.test", "tests/doctests.py") -@nox.session(python="3.9") +@nox.session(python="3.10") def docfx(session): """, ) From c1fd009b61396e9edbc59df7743563b5be0d7853 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 14 Nov 2023 11:09:54 -0500 Subject: [PATCH 552/611] chore: bump urllib3 from 1.26.12 to 1.26.18 (#496) Source-Link: https://github.com/googleapis/synthtool/commit/febacccc98d6d224aff9d0bd0373bb5a4cd5969c Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 532 +++++++++--------- 2 files changed, 277 insertions(+), 259 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index ec696b558c35..453b540c1e58 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 -# created: 2023-11-03T00:57:07.335914631Z + digest: sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 +# created: 2023-11-08T19:46:45.022803742Z diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index 16170d0ca7b8..8957e21104e2 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -4,91 +4,75 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==2.0.0 \ - --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \ - --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e +argcomplete==3.1.4 \ + --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ + --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f # via nox -attrs==22.1.0 \ - --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ - --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c +attrs==23.1.0 \ + --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ + --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 # via gcp-releasetool -bleach==5.0.1 \ - --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ - --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c - # via readme-renderer -cachetools==5.2.0 \ - --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ - --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db +cachetools==5.3.2 \ + --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ + --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 # via google-auth certifi==2023.7.22 \ --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via requests -cffi==1.15.1 \ - --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ - --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ - --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ - --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ - --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ - --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ - --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ - --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ - --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ - --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ - --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ - --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ - --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ - --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ - --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ - --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ - --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ - --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ - --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ - --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ - --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ - --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ - --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ - --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ - --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ - --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ - --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ - --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ - --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ - --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ - --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ - --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ - --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ - --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ - --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ - --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ - --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ - --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ - --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ - --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ - --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ - --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ - --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ - --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ - --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ - --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ - --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ - --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ - --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ - --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ - --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ - --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ - --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ - --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ - --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ - --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ - --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ - --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ - --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ - --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ - --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ - --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ - --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ - --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 +cffi==1.16.0 \ + --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ + --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ + --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ + --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ + --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ + --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ + --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ + --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ + --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ + --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ + --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ + --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ + --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ + --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ + --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ + --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ + --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ + --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ + --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ + --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ + --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ + --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ + --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ + --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ + --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ + --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ + --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ + --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ + --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ + --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ + --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ + --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ + --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ + --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ + --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ + --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ + --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ + --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ + --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ + --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ + --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ + --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ + --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ + --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ + --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ + --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ + --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ + --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ + --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ + --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ + --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ + --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 # via cryptography charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ @@ -109,78 +93,74 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -commonmark==0.9.1 \ - --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ - --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 - # via rich -cryptography==41.0.4 \ - --hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \ - --hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \ - --hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \ - --hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \ - --hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \ - --hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \ - --hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \ - --hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \ - --hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \ - --hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \ - --hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \ - --hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \ - --hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \ - --hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \ - --hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \ - --hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \ - --hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \ - --hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \ - --hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \ - --hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \ - --hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \ - --hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \ - --hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f +cryptography==41.0.5 \ + --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ + --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ + --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ + --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ + --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ + --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ + --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ + --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ + --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ + --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ + --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ + --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ + --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ + --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ + --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ + --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ + --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ + --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ + --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ + --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ + --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ + --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ + --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 # via # gcp-releasetool # secretstorage -distlib==0.3.6 \ - --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ - --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e +distlib==0.3.7 \ + --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ + --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 # via virtualenv -docutils==0.19 \ - --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ - --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc +docutils==0.20.1 \ + --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ + --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b # via readme-renderer -filelock==3.8.0 \ - --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ - --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c # via virtualenv -gcp-docuploader==0.6.4 \ - --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ - --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf +gcp-docuploader==0.6.5 \ + --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ + --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.10.5 \ - --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \ - --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9 +gcp-releasetool==1.16.0 \ + --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ + --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 # via -r requirements.in -google-api-core==2.10.2 \ - --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ - --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e +google-api-core==2.12.0 \ + --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ + --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 # via # google-cloud-core # google-cloud-storage -google-auth==2.14.1 \ - --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ - --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 +google-auth==2.23.4 \ + --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ + --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 # via # gcp-releasetool # google-api-core # google-cloud-core # google-cloud-storage -google-cloud-core==2.3.2 \ - --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ - --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a +google-cloud-core==2.3.3 \ + --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ + --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 # via google-cloud-storage -google-cloud-storage==2.6.0 \ - --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ - --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 +google-cloud-storage==2.13.0 \ + --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ + --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -251,29 +231,31 @@ google-crc32c==1.5.0 \ --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 - # via google-resumable-media -google-resumable-media==2.4.0 \ - --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ - --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.6.0 \ + --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ + --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b # via google-cloud-storage -googleapis-common-protos==1.57.0 \ - --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ - --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c +googleapis-common-protos==1.61.0 \ + --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ + --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b # via google-api-core idna==3.4 \ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==5.0.0 \ - --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ - --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 +importlib-metadata==6.8.0 \ + --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ + --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 # via # -r requirements.in # keyring # twine -jaraco-classes==3.2.3 \ - --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ - --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a +jaraco-classes==3.3.0 \ + --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ + --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -285,75 +267,121 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.11.0 \ - --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ - --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 +keyring==24.2.0 \ + --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ + --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 # via # gcp-releasetool # twine -markupsafe==2.1.1 \ - --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ - --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ - --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ - --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ - --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ - --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ - --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ - --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ - --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ - --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ - --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ - --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ - --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ - --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ - --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ - --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ - --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ - --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ - --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ - --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ - --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ - --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ - --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ - --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ - --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ - --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ - --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ - --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ - --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ - --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ - --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ - --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ - --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ - --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ - --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ - --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ - --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ - --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ - --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ - --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 +markdown-it-py==3.0.0 \ + --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ + --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb + # via rich +markupsafe==2.1.3 \ + --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ + --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ + --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ + --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ + --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ + --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ + --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ + --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ + --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ + --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ + --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ + --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ + --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ + --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ + --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ + --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ + --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ + --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ + --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ + --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ + --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ + --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ + --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ + --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ + --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ + --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ + --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ + --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ + --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ + --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ + --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ + --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ + --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ + --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ + --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ + --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ + --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ + --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ + --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ + --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ + --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ + --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ + --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ + --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ + --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ + --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ + --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ + --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ + --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ + --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ + --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ + --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ + --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ + --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ + --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ + --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ + --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ + --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ + --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ + --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 # via jinja2 -more-itertools==9.0.0 \ - --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ - --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab +mdurl==0.1.2 \ + --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ + --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba + # via markdown-it-py +more-itertools==10.1.0 \ + --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ + --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 # via jaraco-classes -nox==2022.11.21 \ - --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \ - --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684 +nh3==0.2.14 \ + --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ + --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ + --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ + --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ + --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ + --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ + --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ + --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ + --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ + --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ + --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ + --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ + --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ + --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ + --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ + --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 + # via readme-renderer +nox==2023.4.22 \ + --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ + --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f # via -r requirements.in -packaging==21.3 \ - --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ - --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 +packaging==23.2 \ + --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ + --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 # via # gcp-releasetool # nox -pkginfo==1.8.3 \ - --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ - --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c +pkginfo==1.9.6 \ + --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ + --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 # via twine -platformdirs==2.5.4 \ - --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ - --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 +platformdirs==3.11.0 \ + --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ + --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv protobuf==3.20.3 \ --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ @@ -383,34 +411,30 @@ protobuf==3.20.3 \ # gcp-releasetool # google-api-core # googleapis-common-protos -pyasn1==0.4.8 \ - --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ - --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba +pyasn1==0.5.0 \ + --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ + --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde # via # pyasn1-modules # rsa -pyasn1-modules==0.2.8 \ - --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ - --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 +pyasn1-modules==0.3.0 \ + --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ + --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d # via google-auth pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi -pygments==2.15.0 \ - --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \ - --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500 +pygments==2.16.1 \ + --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ + --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 # via # readme-renderer # rich -pyjwt==2.6.0 \ - --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ - --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 +pyjwt==2.8.0 \ + --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ + --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool -pyparsing==3.0.9 \ - --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ - --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc - # via packaging pyperclip==1.8.2 \ --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 # via gcp-releasetool @@ -418,9 +442,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.3 \ - --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ - --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 +readme-renderer==42.0 \ + --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ + --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 # via twine requests==2.31.0 \ --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ @@ -431,17 +455,17 @@ requests==2.31.0 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.10.1 \ - --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ - --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d +requests-toolbelt==1.0.0 \ + --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ + --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.6.0 \ - --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ - --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 +rich==13.6.0 \ + --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ + --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -455,43 +479,37 @@ six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via - # bleach # gcp-docuploader - # google-auth # python-dateutil -twine==4.0.1 \ - --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ - --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 +twine==4.0.2 \ + --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ + --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 # via -r requirements.in -typing-extensions==4.4.0 \ - --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ - --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e +typing-extensions==4.8.0 \ + --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ + --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef # via -r requirements.in -urllib3==1.26.18 \ - --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ - --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 +urllib3==2.0.7 \ + --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ + --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e # via # requests # twine -virtualenv==20.16.7 \ - --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ - --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 +virtualenv==20.24.6 \ + --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ + --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 # via nox -webencodings==0.5.1 \ - --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ - --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 - # via bleach -wheel==0.38.4 \ - --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ - --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 +wheel==0.41.3 \ + --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ + --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 # via -r requirements.in -zipp==3.10.0 \ - --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ - --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 +zipp==3.17.0 \ + --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ + --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.5.1 \ - --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ - --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f +setuptools==68.2.2 \ + --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ + --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a # via -r requirements.in From 3703df3344a4bc8a92270c995e61827f9fdcb848 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 1 Dec 2023 15:33:24 -0500 Subject: [PATCH 553/611] feat: Introduce compatibility with native namespace packages (#497) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Introduce compatibility with native namespace packages * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * update replacement in owlbot.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google-cloud-datastore/google/__init__.py | 22 ----------- .../google/cloud/__init__.py | 22 ----------- packages/google-cloud-datastore/mypy.ini | 2 +- packages/google-cloud-datastore/noxfile.py | 2 +- packages/google-cloud-datastore/owlbot.py | 2 +- packages/google-cloud-datastore/setup.py | 9 +---- .../tests/unit/test_packaging.py | 37 +++++++++++++++++++ 7 files changed, 41 insertions(+), 55 deletions(-) delete mode 100644 packages/google-cloud-datastore/google/__init__.py delete mode 100644 packages/google-cloud-datastore/google/cloud/__init__.py create mode 100644 packages/google-cloud-datastore/tests/unit/test_packaging.py diff --git a/packages/google-cloud-datastore/google/__init__.py b/packages/google-cloud-datastore/google/__init__.py deleted file mode 100644 index 4755e2b063bc..000000000000 --- a/packages/google-cloud-datastore/google/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) # type: ignore diff --git a/packages/google-cloud-datastore/google/cloud/__init__.py b/packages/google-cloud-datastore/google/cloud/__init__.py deleted file mode 100644 index 4755e2b063bc..000000000000 --- a/packages/google-cloud-datastore/google/cloud/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) # type: ignore diff --git a/packages/google-cloud-datastore/mypy.ini b/packages/google-cloud-datastore/mypy.ini index 17f8a8f6afbb..a94e6b3f0f73 100644 --- a/packages/google-cloud-datastore/mypy.ini +++ b/packages/google-cloud-datastore/mypy.ini @@ -1,5 +1,5 @@ [mypy] -python_version = 3.6 +python_version = 3.8 namespace_packages = True ignore_missing_imports = True diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 318f0f47c5e9..f1b48044081b 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -137,7 +137,7 @@ def mypy(session): session.install( "mypy", "types-setuptools", "types-mock", "types-protobuf", "types-requests" ) - session.run("mypy", "google/") + session.run("mypy", "-p", "google") @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index d2007bf87f74..777683921ed6 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -283,7 +283,7 @@ def mypy(session): session.install( "mypy", "types-setuptools", "types-mock", "types-protobuf", "types-requests" ) - session.run("mypy", "google/") + session.run("mypy", "-p", "google") @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 5447922df83c..e5d616fd1bc1 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -55,16 +55,10 @@ # benchmarks, etc. packages = [ package - for package in setuptools.PEP420PackageFinder.find() + for package in setuptools.find_namespace_packages() if package.startswith("google") ] -# Determine which namespaces are needed. -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") - - setuptools.setup( name=name, version=version, @@ -91,7 +85,6 @@ ], platforms="Posix; MacOS X; Windows", packages=packages, - namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, python_requires=">=3.7", diff --git a/packages/google-cloud-datastore/tests/unit/test_packaging.py b/packages/google-cloud-datastore/tests/unit/test_packaging.py new file mode 100644 index 000000000000..afc9431ceda2 --- /dev/null +++ b/packages/google-cloud-datastore/tests/unit/test_packaging.py @@ -0,0 +1,37 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import subprocess +import sys + + +def test_namespace_package_compat(tmp_path): + # The ``google`` namespace package should not be masked + # by the presence of ``google-cloud-datastore``. + google = tmp_path / "google" + google.mkdir() + google.joinpath("othermod.py").write_text("") + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + cmd = [sys.executable, "-m", "google.othermod"] + subprocess.check_call(cmd, env=env) + + # The ``google.cloud`` namespace package should not be masked + # by the presence of ``google-cloud-datastore``. + google_cloud = tmp_path / "google" / "cloud" + google_cloud.mkdir() + google_cloud.joinpath("othermod.py").write_text("") + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + cmd = [sys.executable, "-m", "google.cloud.othermod"] + subprocess.check_call(cmd, env=env) From 5c473c7bdc97755b0976ff780b12ce1fb27d2595 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Dec 2023 14:52:24 -0800 Subject: [PATCH 554/611] feat: Add support for Python 3.12 (#498) * chore(python): Add Python 3.12 Source-Link: https://github.com/googleapis/synthtool/commit/af16e6d4672cc7b400f144de2fc3068b54ff47d2 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 * Add python 3.12 to setup.py, constraints and required checks --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou Co-authored-by: Daniel Sanche --- .../.github/.OwlBot.lock.yaml | 4 +- .../.github/sync-repo-settings.yaml | 1 + .../.github/workflows/unittest.yml | 2 +- .../.kokoro/samples/python3.12/common.cfg | 40 +++++++++++++++++++ .../.kokoro/samples/python3.12/continuous.cfg | 6 +++ .../samples/python3.12/periodic-head.cfg | 11 +++++ .../.kokoro/samples/python3.12/periodic.cfg | 6 +++ .../.kokoro/samples/python3.12/presubmit.cfg | 6 +++ .../google-cloud-datastore/CONTRIBUTING.rst | 6 ++- packages/google-cloud-datastore/noxfile.py | 2 +- .../samples/snippets/noxfile.py | 2 +- .../snippets/schedule-export/noxfile.py | 2 +- packages/google-cloud-datastore/setup.py | 1 + .../testing/constraints-3.12.txt | 0 14 files changed, 81 insertions(+), 8 deletions(-) create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.12/common.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.12/continuous.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.12/periodic-head.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.12/periodic.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.12/presubmit.cfg create mode 100644 packages/google-cloud-datastore/testing/constraints-3.12.txt diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 453b540c1e58..eb4d9f794dc1 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 -# created: 2023-11-08T19:46:45.022803742Z + digest: sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 +# created: 2023-11-23T18:17:28.105124211Z diff --git a/packages/google-cloud-datastore/.github/sync-repo-settings.yaml b/packages/google-cloud-datastore/.github/sync-repo-settings.yaml index dc8cd53ebddb..4b8f66f76573 100644 --- a/packages/google-cloud-datastore/.github/sync-repo-settings.yaml +++ b/packages/google-cloud-datastore/.github/sync-repo-settings.yaml @@ -28,6 +28,7 @@ branchProtectionRules: - 'unit (3.9)' - 'unit (3.10)' - 'unit (3.11)' + - 'unit (3.12)' - 'cover' - 'mypy' # List of explicit permissions to add (additive only) diff --git a/packages/google-cloud-datastore/.github/workflows/unittest.yml b/packages/google-cloud-datastore/.github/workflows/unittest.yml index 8057a7691b12..a32027b49bc2 100644 --- a/packages/google-cloud-datastore/.github/workflows/unittest.yml +++ b/packages/google-cloud-datastore/.github/workflows/unittest.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.12/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.12/common.cfg new file mode 100644 index 000000000000..204de15ee400 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.12/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.12" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-312" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-datastore/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.12/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.12/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.12/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.12/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.12/periodic-head.cfg new file mode 100644 index 000000000000..714045a75ed7 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.12/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.12/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.12/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.12/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.12/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.12/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.12/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/CONTRIBUTING.rst b/packages/google-cloud-datastore/CONTRIBUTING.rst index bcd67e5af249..854a6c7c59e5 100644 --- a/packages/google-cloud-datastore/CONTRIBUTING.rst +++ b/packages/google-cloud-datastore/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.11 -- -k + $ nox -s unit-3.12 -- -k .. note:: @@ -251,12 +251,14 @@ We support: - `Python 3.9`_ - `Python 3.10`_ - `Python 3.11`_ +- `Python 3.12`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ .. _Python 3.10: https://docs.python.org/3.10/ .. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index f1b48044081b..2f7a6fca2b81 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -34,7 +34,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", diff --git a/packages/google-cloud-datastore/samples/snippets/noxfile.py b/packages/google-cloud-datastore/samples/snippets/noxfile.py index 7c8a63994cbd..483b55901791 100644 --- a/packages/google-cloud-datastore/samples/snippets/noxfile.py +++ b/packages/google-cloud-datastore/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py b/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py index 7c8a63994cbd..483b55901791 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index e5d616fd1bc1..07827e71a172 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -79,6 +79,7 @@ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Operating System :: OS Independent", "Topic :: Internet", "Topic :: Software Development :: Libraries :: Python Modules", diff --git a/packages/google-cloud-datastore/testing/constraints-3.12.txt b/packages/google-cloud-datastore/testing/constraints-3.12.txt new file mode 100644 index 000000000000..e69de29bb2d1 From d044a4a3943f294d1473d0aac628c8ecce20e0f8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Dec 2023 15:04:09 -0800 Subject: [PATCH 555/611] feat: added Generator API (#502) --- .../services/datastore_admin/async_client.py | 38 +++++++-------- .../services/datastore/async_client.py | 46 +++++++++---------- .../test_datastore_admin.py | 8 ++-- .../unit/gapic/datastore_v1/test_datastore.py | 8 ++-- 4 files changed, 50 insertions(+), 50 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index f0178210b18b..40cde7575721 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -33,14 +33,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -376,7 +376,7 @@ async def sample_export_entities(): This corresponds to the ``output_url_prefix`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -549,7 +549,7 @@ async def sample_import_entities(): This corresponds to the ``entity_filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -687,7 +687,7 @@ async def sample_create_index(): request (Optional[Union[google.cloud.datastore_admin_v1.types.CreateIndexRequest, dict]]): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.CreateIndex][google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -796,7 +796,7 @@ async def sample_delete_index(): request (Optional[Union[google.cloud.datastore_admin_v1.types.DeleteIndexRequest, dict]]): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.DeleteIndex][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -891,7 +891,7 @@ async def sample_get_index(): request (Optional[Union[google.cloud.datastore_admin_v1.types.GetIndexRequest, dict]]): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.GetIndex][google.datastore.admin.v1.DatastoreAdmin.GetIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -908,7 +908,7 @@ async def sample_get_index(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_index, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -987,7 +987,7 @@ async def sample_list_indexes(): request (Optional[Union[google.cloud.datastore_admin_v1.types.ListIndexesRequest, dict]]): The request object. The request for [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1009,7 +1009,7 @@ async def sample_list_indexes(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_indexes, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1065,7 +1065,7 @@ async def list_operations( request (:class:`~.operations_pb2.ListOperationsRequest`): The request object. Request message for `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1082,7 +1082,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_operations, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -1119,7 +1119,7 @@ async def get_operation( request (:class:`~.operations_pb2.GetOperationRequest`): The request object. Request message for `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1136,7 +1136,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -1178,7 +1178,7 @@ async def delete_operation( request (:class:`~.operations_pb2.DeleteOperationRequest`): The request object. Request message for `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1194,7 +1194,7 @@ async def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -1232,7 +1232,7 @@ async def cancel_operation( request (:class:`~.operations_pb2.CancelOperationRequest`): The request object. Request message for `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1248,7 +1248,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.cancel_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index b5b700efa84b..c49b4b079319 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -33,14 +33,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.cloud.datastore_v1.types import aggregation_result from google.cloud.datastore_v1.types import datastore @@ -278,7 +278,7 @@ async def sample_lookup(): This corresponds to the ``keys`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -315,7 +315,7 @@ async def sample_lookup(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.lookup, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -388,7 +388,7 @@ async def sample_run_query(): request (Optional[Union[google.cloud.datastore_v1.types.RunQueryRequest, dict]]): The request object. The request for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -407,7 +407,7 @@ async def sample_run_query(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.run_query, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -480,7 +480,7 @@ async def sample_run_aggregation_query(): request (Optional[Union[google.cloud.datastore_v1.types.RunAggregationQueryRequest, dict]]): The request object. The request for [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -499,7 +499,7 @@ async def sample_run_aggregation_query(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.run_aggregation_query, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -580,7 +580,7 @@ async def sample_begin_transaction(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -724,7 +724,7 @@ async def sample_commit(): This corresponds to the ``mutations`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -844,7 +844,7 @@ async def sample_rollback(): This corresponds to the ``transaction`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -962,7 +962,7 @@ async def sample_allocate_ids(): This corresponds to the ``keys`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1078,7 +1078,7 @@ async def sample_reserve_ids(): This corresponds to the ``keys`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1113,7 +1113,7 @@ async def sample_reserve_ids(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.reserve_ids, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1160,7 +1160,7 @@ async def list_operations( request (:class:`~.operations_pb2.ListOperationsRequest`): The request object. Request message for `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1177,7 +1177,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_operations, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -1214,7 +1214,7 @@ async def get_operation( request (:class:`~.operations_pb2.GetOperationRequest`): The request object. Request message for `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1231,7 +1231,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -1273,7 +1273,7 @@ async def delete_operation( request (:class:`~.operations_pb2.DeleteOperationRequest`): The request object. Request message for `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1289,7 +1289,7 @@ async def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -1327,7 +1327,7 @@ async def cancel_operation( request (:class:`~.operations_pb2.CancelOperationRequest`): The request object. Request message for `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1343,7 +1343,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.cancel_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index ab0e0a2a9026..eb8b8a4f44b5 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -4242,7 +4242,7 @@ def test_delete_operation(transport: str = "grpc"): @pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc"): +async def test_delete_operation_async(transport: str = "grpc_asyncio"): client = DatastoreAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4381,7 +4381,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = DatastoreAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4520,7 +4520,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): +async def test_get_operation_async(transport: str = "grpc_asyncio"): client = DatastoreAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4665,7 +4665,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): +async def test_list_operations_async(transport: str = "grpc_asyncio"): client = DatastoreAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 958a45f31e6c..52d8de52a8e5 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -5487,7 +5487,7 @@ def test_delete_operation(transport: str = "grpc"): @pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc"): +async def test_delete_operation_async(transport: str = "grpc_asyncio"): client = DatastoreAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5626,7 +5626,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = DatastoreAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5765,7 +5765,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): +async def test_get_operation_async(transport: str = "grpc_asyncio"): client = DatastoreAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5910,7 +5910,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): +async def test_list_operations_async(transport: str = "grpc_asyncio"): client = DatastoreAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, From 47c718b08d3eff676b3ccbe588a8e0e4ea50a48f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Dec 2023 16:47:24 -0800 Subject: [PATCH 556/611] chore: bump cryptography from 41.0.5 to 41.0.6 in /synthtool/gcp/templates/python_library/.kokoro (#501) Source-Link: https://github.com/googleapis/synthtool/commit/9367caadcbb30b5b2719f30eb00c44cc913550ed Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c Co-authored-by: Owl Bot Co-authored-by: Daniel Sanche Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 48 +++++++++---------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index eb4d9f794dc1..773c1dfd2146 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 -# created: 2023-11-23T18:17:28.105124211Z + digest: sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c +# created: 2023-11-29T14:54:29.548172703Z diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index 8957e21104e2..e5c1ffca94b7 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -93,30 +93,30 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==41.0.5 \ - --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ - --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ - --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ - --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ - --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ - --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ - --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ - --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ - --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ - --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ - --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ - --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ - --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ - --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ - --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ - --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ - --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ - --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ - --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ - --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ - --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ - --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ - --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 +cryptography==41.0.6 \ + --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ + --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ + --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ + --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ + --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ + --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ + --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ + --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ + --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ + --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ + --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ + --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ + --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ + --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ + --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ + --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ + --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ + --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ + --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ + --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ + --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ + --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ + --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae # via # gcp-releasetool # secretstorage From c2ef8f19339751e149d33943a220a16a712cee4e Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 7 Dec 2023 20:33:05 +0100 Subject: [PATCH 557/611] chore(deps): update all dependencies (#505) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/workflows/mypy.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/.github/workflows/mypy.yml b/packages/google-cloud-datastore/.github/workflows/mypy.yml index 1445918662a8..3915cddd3d1c 100644 --- a/packages/google-cloud-datastore/.github/workflows/mypy.yml +++ b/packages/google-cloud-datastore/.github/workflows/mypy.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.8" - name: Install nox From ef910cdbdde1a0aa5bbdb11972f029ef26a1a060 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 10 Dec 2023 09:01:28 -0500 Subject: [PATCH 558/611] build: update actions/checkout and actions/setup-python (#507) Source-Link: https://github.com/googleapis/synthtool/commit/3551acd1261fd8f616cbfd054cda9bd6d6ac75f4 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:230f7fe8a0d2ed81a519cfc15c6bb11c5b46b9fb449b8b1219b3771bcb520ad2 Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 4 ++-- .../google-cloud-datastore/.github/workflows/docs.yml | 8 ++++---- .../google-cloud-datastore/.github/workflows/lint.yml | 4 ++-- .../google-cloud-datastore/.github/workflows/unittest.yml | 8 ++++---- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 773c1dfd2146..40bf99731959 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c -# created: 2023-11-29T14:54:29.548172703Z + digest: sha256:230f7fe8a0d2ed81a519cfc15c6bb11c5b46b9fb449b8b1219b3771bcb520ad2 +# created: 2023-12-09T15:16:25.430769578Z diff --git a/packages/google-cloud-datastore/.github/workflows/docs.yml b/packages/google-cloud-datastore/.github/workflows/docs.yml index 221806cedf58..698fbc5c94da 100644 --- a/packages/google-cloud-datastore/.github/workflows/docs.yml +++ b/packages/google-cloud-datastore/.github/workflows/docs.yml @@ -8,9 +8,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.9" - name: Install nox @@ -24,9 +24,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install nox diff --git a/packages/google-cloud-datastore/.github/workflows/lint.yml b/packages/google-cloud-datastore/.github/workflows/lint.yml index 16d5a9e90f6d..4866193af2a9 100644 --- a/packages/google-cloud-datastore/.github/workflows/lint.yml +++ b/packages/google-cloud-datastore/.github/workflows/lint.yml @@ -8,9 +8,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.8" - name: Install nox diff --git a/packages/google-cloud-datastore/.github/workflows/unittest.yml b/packages/google-cloud-datastore/.github/workflows/unittest.yml index a32027b49bc2..d6ca65627c2d 100644 --- a/packages/google-cloud-datastore/.github/workflows/unittest.yml +++ b/packages/google-cloud-datastore/.github/workflows/unittest.yml @@ -11,9 +11,9 @@ jobs: python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - name: Install nox @@ -37,9 +37,9 @@ jobs: - unit steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.8" - name: Install coverage From 91ff2a8c641da8b761fd505dac8969a2433bdce6 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 12 Dec 2023 12:34:21 -0800 Subject: [PATCH 559/611] chore(main): release 2.19.0 (#481) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 18 ++++++++++++++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../cloud/datastore_admin/gapic_version.py | 2 +- .../cloud/datastore_admin_v1/gapic_version.py | 2 +- .../google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 24 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index a627e662e002..b7f666a684a7 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.18.0" + ".": "2.19.0" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 499af3b62e77..52d6dfc7cd5d 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,24 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.19.0](https://github.com/googleapis/python-datastore/compare/v2.18.0...v2.19.0) (2023-12-10) + + +### Features + +* Add support for Python 3.12 ([#498](https://github.com/googleapis/python-datastore/issues/498)) ([d1d60fa](https://github.com/googleapis/python-datastore/commit/d1d60fa602eca2062a505a0750b0ce6dccc771cd)) +* Introduce compatibility with native namespace packages ([#497](https://github.com/googleapis/python-datastore/issues/497)) ([87b3392](https://github.com/googleapis/python-datastore/commit/87b339228896da197b0ee77e2b00994431ae8d2e)) + + +### Bug Fixes + +* Use `retry_async` instead of `retry` in async client ([4e15ce6](https://github.com/googleapis/python-datastore/commit/4e15ce640580f14fb1ee5d8ad49ea48e860ff1da)) + + +### Documentation + +* Minor formatting ([#476](https://github.com/googleapis/python-datastore/issues/476)) ([b13b15c](https://github.com/googleapis/python-datastore/commit/b13b15cd95c02c923f9991b088bb71eda777cf46)) + ## [2.18.0](https://github.com/googleapis/python-datastore/compare/v2.17.0...v2.18.0) (2023-09-05) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index 38172a8ea25b..28762874367c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.18.0" # {x-release-please-version} +__version__ = "2.19.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index a613e5ea2f7e..2605c08a3adf 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.18.0" +__version__ = "2.19.0" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index f09943f6bdf7..0f1a446f3802 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.18.0" # {x-release-please-version} +__version__ = "2.19.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index 552e8442b58e..8dc121fd5787 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.18.0" # {x-release-please-version} +__version__ = "2.19.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index 552e8442b58e..8dc121fd5787 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.18.0" # {x-release-please-version} +__version__ = "2.19.0" # {x-release-please-version} From 7fd42d658a6022221ad6b67fa2aec01a812da65d Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 12 Dec 2023 23:18:29 +0100 Subject: [PATCH 560/611] chore(deps): update dependency google-cloud-datastore to v2.19.0 (#508) --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index d4e90e37456f..5bccacc5f61e 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.18.0 \ No newline at end of file +google-cloud-datastore==2.19.0 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index a84b83a1fe64..b748abdc9c1e 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.18.0 +google-cloud-datastore==2.19.0 From f0f26d85b2ffc9f32e2df00ff4b6a7cf5941d8d0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 14 Dec 2023 19:27:11 -0500 Subject: [PATCH 561/611] build: update actions/upload-artifact and actions/download-artifact (#510) Source-Link: https://github.com/googleapis/synthtool/commit/280ddaed417057dfe5b1395731de07b7d09f5058 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:346ab2efb51649c5dde7756cbbdc60dd394852ba83b9bbffc292a63549f33c17 Co-authored-by: Owl Bot --- .../google-cloud-datastore/.github/.OwlBot.lock.yaml | 4 ++-- .../.github/workflows/unittest.yml | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 40bf99731959..9bee24097165 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:230f7fe8a0d2ed81a519cfc15c6bb11c5b46b9fb449b8b1219b3771bcb520ad2 -# created: 2023-12-09T15:16:25.430769578Z + digest: sha256:346ab2efb51649c5dde7756cbbdc60dd394852ba83b9bbffc292a63549f33c17 +# created: 2023-12-14T22:17:57.611773021Z diff --git a/packages/google-cloud-datastore/.github/workflows/unittest.yml b/packages/google-cloud-datastore/.github/workflows/unittest.yml index d6ca65627c2d..f4a337c496a0 100644 --- a/packages/google-cloud-datastore/.github/workflows/unittest.yml +++ b/packages/google-cloud-datastore/.github/workflows/unittest.yml @@ -26,9 +26,9 @@ jobs: run: | nox -s unit-${{ matrix.python }} - name: Upload coverage results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: coverage-artifacts + name: coverage-artifact-${{ matrix.python }} path: .coverage-${{ matrix.python }} cover: @@ -47,11 +47,11 @@ jobs: python -m pip install --upgrade setuptools pip wheel python -m pip install coverage - name: Download coverage results - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: - name: coverage-artifacts path: .coverage-results/ - name: Report coverage results run: | - coverage combine .coverage-results/.coverage* + find .coverage-results -type f -name '*.zip' -exec unzip {} \; + coverage combine .coverage-results/**/.coverage* coverage report --show-missing --fail-under=100 From 6e9f1b2542c604f5a24a0a91a86b07cb1965efd7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 11 Jan 2024 12:38:55 -0800 Subject: [PATCH 562/611] feat: add new types QueryMode, QueryPlan, ResultSetStats (#512) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add new types QueryMode, QueryPlan, ResultSetStats feat: add QueryMode field to RunQueryRequest feat: add ResultSetStats field to RunQueryResponse feat: add QueryMode field to RunAggregationQueryRequest feat: add ResultSetStats field to RunAggregationQueryResponse PiperOrigin-RevId: 595774772 Source-Link: https://github.com/googleapis/googleapis/commit/03e7ed44ac499ee71baa2770e78045a3a85f30f5 Source-Link: https://github.com/googleapis/googleapis-gen/commit/dc63e0dea8423c230d5fb0937acb3c98719c9395 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZGM2M2UwZGVhODQyM2MyMzBkNWZiMDkzN2FjYjNjOTg3MTljOTM5NSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google/cloud/datastore_v1/__init__.py | 6 + .../services/datastore/async_client.py | 1 + .../datastore_v1/services/datastore/client.py | 1 + .../cloud/datastore_v1/types/__init__.py | 8 ++ .../cloud/datastore_v1/types/datastore.py | 43 ++++++++ .../cloud/datastore_v1/types/query_profile.py | 104 ++++++++++++++++++ .../scripts/fixup_datastore_v1_keywords.py | 4 +- .../unit/gapic/datastore_v1/test_datastore.py | 1 + 8 files changed, 166 insertions(+), 2 deletions(-) create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/types/query_profile.py diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py index b494905bdb04..c72ebcf6ce44 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py @@ -61,6 +61,9 @@ from .types.query import PropertyReference from .types.query import Query from .types.query import QueryResultBatch +from .types.query_profile import QueryPlan +from .types.query_profile import ResultSetStats +from .types.query_profile import QueryMode __all__ = ( "DatastoreAsyncClient", @@ -93,10 +96,13 @@ "PropertyOrder", "PropertyReference", "Query", + "QueryMode", + "QueryPlan", "QueryResultBatch", "ReadOptions", "ReserveIdsRequest", "ReserveIdsResponse", + "ResultSetStats", "RollbackRequest", "RollbackResponse", "RunAggregationQueryRequest", diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index c49b4b079319..b49689202476 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -46,6 +46,7 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.cloud.datastore_v1.types import query_profile from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index 58c8ad221d85..b070d01c09f8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -50,6 +50,7 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.cloud.datastore_v1.types import query_profile from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py index b6ff2a444e92..84db9bf8390a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py @@ -61,6 +61,11 @@ Query, QueryResultBatch, ) +from .query_profile import ( + QueryPlan, + ResultSetStats, + QueryMode, +) __all__ = ( "AggregationResult", @@ -103,4 +108,7 @@ "PropertyReference", "Query", "QueryResultBatch", + "QueryPlan", + "ResultSetStats", + "QueryMode", ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index 6c768904ef28..07ccfba8d09c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -22,6 +22,7 @@ from google.cloud.datastore_v1.types import aggregation_result from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query as gd_query +from google.cloud.datastore_v1.types import query_profile from google.protobuf import timestamp_pb2 # type: ignore @@ -185,6 +186,11 @@ class RunQueryRequest(proto.Message): non-aggregation query. This field is a member of `oneof`_ ``query_type``. + mode (google.cloud.datastore_v1.types.QueryMode): + Optional. The mode in which the query request is processed. + This field is optional, and when not provided, it defaults + to ``NORMAL`` mode where no additional statistics will be + returned with the query results. """ project_id: str = proto.Field( @@ -217,6 +223,11 @@ class RunQueryRequest(proto.Message): oneof="query_type", message=gd_query.GqlQuery, ) + mode: query_profile.QueryMode = proto.Field( + proto.ENUM, + number=11, + enum=query_profile.QueryMode, + ) class RunQueryResponse(proto.Message): @@ -237,6 +248,12 @@ class RunQueryResponse(proto.Message): [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] was set in [RunQueryRequest.read_options][google.datastore.v1.RunQueryRequest.read_options]. + stats (google.cloud.datastore_v1.types.ResultSetStats): + Query plan and execution statistics. Note that the returned + stats are subject to change as Firestore evolves. + + This is only present when the request specifies a mode other + than ``NORMAL``. """ batch: gd_query.QueryResultBatch = proto.Field( @@ -253,6 +270,11 @@ class RunQueryResponse(proto.Message): proto.BYTES, number=5, ) + stats: query_profile.ResultSetStats = proto.Field( + proto.MESSAGE, + number=6, + message=query_profile.ResultSetStats, + ) class RunAggregationQueryRequest(proto.Message): @@ -292,6 +314,11 @@ class RunAggregationQueryRequest(proto.Message): aggregation query. This field is a member of `oneof`_ ``query_type``. + mode (google.cloud.datastore_v1.types.QueryMode): + Optional. The mode in which the query request is processed. + This field is optional, and when not provided, it defaults + to ``NORMAL`` mode where no additional statistics will be + returned with the query results. """ project_id: str = proto.Field( @@ -324,6 +351,11 @@ class RunAggregationQueryRequest(proto.Message): oneof="query_type", message=gd_query.GqlQuery, ) + mode: query_profile.QueryMode = proto.Field( + proto.ENUM, + number=10, + enum=query_profile.QueryMode, + ) class RunAggregationQueryResponse(proto.Message): @@ -345,6 +377,12 @@ class RunAggregationQueryResponse(proto.Message): [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] was set in [RunAggregationQueryRequest.read_options][google.datastore.v1.RunAggregationQueryRequest.read_options]. + stats (google.cloud.datastore_v1.types.ResultSetStats): + Query plan and execution statistics. Note that the returned + stats are subject to change as Firestore evolves. + + This is only present when the request specifies a mode other + than ``NORMAL``. """ batch: aggregation_result.AggregationResultBatch = proto.Field( @@ -361,6 +399,11 @@ class RunAggregationQueryResponse(proto.Message): proto.BYTES, number=5, ) + stats: query_profile.ResultSetStats = proto.Field( + proto.MESSAGE, + number=6, + message=query_profile.ResultSetStats, + ) class BeginTransactionRequest(proto.Message): diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query_profile.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query_profile.py new file mode 100644 index 000000000000..3258a0f7351c --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query_profile.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.datastore.v1", + manifest={ + "QueryMode", + "QueryPlan", + "ResultSetStats", + }, +) + + +class QueryMode(proto.Enum): + r"""The mode in which the query request must be processed. + + Values: + NORMAL (0): + The default mode. Only the query results are + returned. + PLAN (1): + This mode returns only the query plan, + without any results or execution statistics + information. + PROFILE (2): + This mode returns both the query plan and the + execution statistics along with the results. + """ + NORMAL = 0 + PLAN = 1 + PROFILE = 2 + + +class QueryPlan(proto.Message): + r"""Plan for the query. + + Attributes: + plan_info (google.protobuf.struct_pb2.Struct): + Planning phase information for the query. It will include: + + { "indexes_used": [ {"query_scope": "Collection", + "properties": "(foo ASC, **name** ASC)"}, {"query_scope": + "Collection", "properties": "(bar ASC, **name** ASC)"} ] } + """ + + plan_info: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=1, + message=struct_pb2.Struct, + ) + + +class ResultSetStats(proto.Message): + r"""Planning and execution statistics for the query. + + Attributes: + query_plan (google.cloud.datastore_v1.types.QueryPlan): + Plan for the query. + query_stats (google.protobuf.struct_pb2.Struct): + Aggregated statistics from the execution of the query. + + This will only be present when the request specifies + ``PROFILE`` mode. For example, a query will return the + statistics including: + + { "results_returned": "20", "documents_scanned": "20", + "indexes_entries_scanned": "10050", "total_execution_time": + "100.7 msecs" } + """ + + query_plan: "QueryPlan" = proto.Field( + proto.MESSAGE, + number=1, + message="QueryPlan", + ) + query_stats: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py index 4a8be1c96b8b..eb323e40581a 100644 --- a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py @@ -45,8 +45,8 @@ class datastoreCallTransformer(cst.CSTTransformer): 'lookup': ('project_id', 'keys', 'database_id', 'read_options', ), 'reserve_ids': ('project_id', 'keys', 'database_id', ), 'rollback': ('project_id', 'transaction', 'database_id', ), - 'run_aggregation_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'aggregation_query', 'gql_query', ), - 'run_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'query', 'gql_query', ), + 'run_aggregation_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'aggregation_query', 'gql_query', 'mode', ), + 'run_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'query', 'gql_query', 'mode', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 52d8de52a8e5..8603a840f6eb 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -51,6 +51,7 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.cloud.datastore_v1.types import query_profile from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import struct_pb2 # type: ignore From ada6f6a762aa4995395497cd5ad999683c1fb07a Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 22 Jan 2024 20:40:58 +0100 Subject: [PATCH 563/611] chore(deps): update dependency pytest to v7.4.4 (#511) --- .../samples/snippets/requirements-test.txt | 2 +- .../samples/snippets/schedule-export/requirements-test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt index dfba3f35d90f..80d3b1a9ebe0 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff===1.11.1; python_version < "3.7" backoff==2.2.1; python_version >= "3.7" -pytest==7.4.3 +pytest==7.4.4 flaky==3.7.0 diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt index f16ee69ae921..fa427e190024 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.3 \ No newline at end of file +pytest==7.4.4 \ No newline at end of file From fa78520871bee966940a5abc4c217bd8c96974c9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 22 Jan 2024 16:17:19 -0500 Subject: [PATCH 564/611] build(python): fix `docs` and `docfx` builds (#515) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * build(python): fix `docs` and `docfx` builds Source-Link: https://github.com/googleapis/synthtool/commit/fac8444edd5f5526e804c306b766a271772a3e2f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa * exclude types-protobuf==4.24.0.20240106 release * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 6 ++-- .../.kokoro/requirements.txt | 6 ++-- packages/google-cloud-datastore/noxfile.py | 28 +++++++++++++++++-- packages/google-cloud-datastore/owlbot.py | 4 ++- 4 files changed, 35 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 9bee24097165..d8a1bbca7179 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:346ab2efb51649c5dde7756cbbdc60dd394852ba83b9bbffc292a63549f33c17 -# created: 2023-12-14T22:17:57.611773021Z + digest: sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa +# created: 2024-01-15T16:32:08.142785673Z diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index e5c1ffca94b7..bb3d6ca38b14 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -263,9 +263,9 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.2 \ - --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ - --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 +jinja2==3.1.3 \ + --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ + --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 # via gcp-releasetool keyring==24.2.0 \ --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 2f7a6fca2b81..e4e112d5531d 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -134,8 +134,14 @@ def format(session): def mypy(session): """Verify type hints are mypy compatible.""" session.install("-e", ".") + # Exclude types-protobuf==4.24.0.20240106 + # See https://github.com/python/typeshed/issues/11254 session.install( - "mypy", "types-setuptools", "types-mock", "types-protobuf", "types-requests" + "mypy", + "types-setuptools", + "types-mock", + "types-protobuf!=4.24.0.20240106", + "types-requests", ) session.run("mypy", "-p", "google") @@ -304,7 +310,16 @@ def docs(session): session.install("-e", ".") session.install( - "sphinx==4.0.1", + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", "alabaster", "recommonmark", ) @@ -341,6 +356,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 777683921ed6..82565aa3c809 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -280,8 +280,10 @@ def lint_setup_py\(session\): def mypy(session): """Verify type hints are mypy compatible.""" session.install("-e", ".") + # Exclude types-protobuf==4.24.0.20240106 + # See https://github.com/python/typeshed/issues/11254 session.install( - "mypy", "types-setuptools", "types-mock", "types-protobuf", "types-requests" + "mypy", "types-setuptools", "types-mock", "types-protobuf!=4.24.0.20240106", "types-requests" ) session.run("mypy", "-p", "google") From ad9af50c814631d5f99992ad783ec43be5e482ae Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 24 Jan 2024 04:00:56 -0800 Subject: [PATCH 565/611] chore: Update CODEOWNERS (#516) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update CODEOWNERS * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/CODEOWNERS | 8 ++++---- packages/google-cloud-datastore/.repo-metadata.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/.github/CODEOWNERS b/packages/google-cloud-datastore/.github/CODEOWNERS index eb5a61d0ffd2..6475082470f2 100644 --- a/packages/google-cloud-datastore/.github/CODEOWNERS +++ b/packages/google-cloud-datastore/.github/CODEOWNERS @@ -5,8 +5,8 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax # Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# @googleapis/yoshi-python @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk are the default owners for changes in this repo -* @googleapis/yoshi-python @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk +# @googleapis/yoshi-python @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk @googleapis/api-firestore-partners are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk @googleapis/api-firestore-partners -# @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk are the default owners for samples changes -/samples/ @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk +# @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk @googleapis/api-firestore-partners are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk @googleapis/api-firestore-partners diff --git a/packages/google-cloud-datastore/.repo-metadata.json b/packages/google-cloud-datastore/.repo-metadata.json index e6645432a60d..d7b18d4a99cf 100644 --- a/packages/google-cloud-datastore/.repo-metadata.json +++ b/packages/google-cloud-datastore/.repo-metadata.json @@ -11,7 +11,7 @@ "distribution_name": "google-cloud-datastore", "api_id": "datastore.googleapis.com", "default_version": "v1", - "codeowner_team": "@googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk", + "codeowner_team": "@googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk @googleapis/api-firestore-partners", "api_shortname": "datastore", "api_description": "is a fully managed, schemaless database for\nstoring non-relational data. Cloud Datastore automatically scales with\nyour users and supports ACID transactions, high availability of reads and\nwrites, strong consistency for reads and ancestor queries, and eventual\nconsistency for all other queries." } From 777ca09c124f85f576b2a0ed26ed859775e86c32 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 25 Jan 2024 16:01:00 -0500 Subject: [PATCH 566/611] chore(revert): revert add new types QueryMode, QueryPlan, ResultSetStats --- .../google/cloud/datastore_v1/__init__.py | 6 - .../services/datastore/async_client.py | 1 - .../datastore_v1/services/datastore/client.py | 1 - .../cloud/datastore_v1/types/__init__.py | 8 -- .../cloud/datastore_v1/types/datastore.py | 43 -------- .../cloud/datastore_v1/types/query_profile.py | 104 ------------------ .../scripts/fixup_datastore_v1_keywords.py | 4 +- .../unit/gapic/datastore_v1/test_datastore.py | 1 - 8 files changed, 2 insertions(+), 166 deletions(-) delete mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/types/query_profile.py diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py index c72ebcf6ce44..b494905bdb04 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py @@ -61,9 +61,6 @@ from .types.query import PropertyReference from .types.query import Query from .types.query import QueryResultBatch -from .types.query_profile import QueryPlan -from .types.query_profile import ResultSetStats -from .types.query_profile import QueryMode __all__ = ( "DatastoreAsyncClient", @@ -96,13 +93,10 @@ "PropertyOrder", "PropertyReference", "Query", - "QueryMode", - "QueryPlan", "QueryResultBatch", "ReadOptions", "ReserveIdsRequest", "ReserveIdsResponse", - "ResultSetStats", "RollbackRequest", "RollbackResponse", "RunAggregationQueryRequest", diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index b49689202476..c49b4b079319 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -46,7 +46,6 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query -from google.cloud.datastore_v1.types import query_profile from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index b070d01c09f8..58c8ad221d85 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -50,7 +50,6 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query -from google.cloud.datastore_v1.types import query_profile from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py index 84db9bf8390a..b6ff2a444e92 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py @@ -61,11 +61,6 @@ Query, QueryResultBatch, ) -from .query_profile import ( - QueryPlan, - ResultSetStats, - QueryMode, -) __all__ = ( "AggregationResult", @@ -108,7 +103,4 @@ "PropertyReference", "Query", "QueryResultBatch", - "QueryPlan", - "ResultSetStats", - "QueryMode", ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index 07ccfba8d09c..6c768904ef28 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -22,7 +22,6 @@ from google.cloud.datastore_v1.types import aggregation_result from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query as gd_query -from google.cloud.datastore_v1.types import query_profile from google.protobuf import timestamp_pb2 # type: ignore @@ -186,11 +185,6 @@ class RunQueryRequest(proto.Message): non-aggregation query. This field is a member of `oneof`_ ``query_type``. - mode (google.cloud.datastore_v1.types.QueryMode): - Optional. The mode in which the query request is processed. - This field is optional, and when not provided, it defaults - to ``NORMAL`` mode where no additional statistics will be - returned with the query results. """ project_id: str = proto.Field( @@ -223,11 +217,6 @@ class RunQueryRequest(proto.Message): oneof="query_type", message=gd_query.GqlQuery, ) - mode: query_profile.QueryMode = proto.Field( - proto.ENUM, - number=11, - enum=query_profile.QueryMode, - ) class RunQueryResponse(proto.Message): @@ -248,12 +237,6 @@ class RunQueryResponse(proto.Message): [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] was set in [RunQueryRequest.read_options][google.datastore.v1.RunQueryRequest.read_options]. - stats (google.cloud.datastore_v1.types.ResultSetStats): - Query plan and execution statistics. Note that the returned - stats are subject to change as Firestore evolves. - - This is only present when the request specifies a mode other - than ``NORMAL``. """ batch: gd_query.QueryResultBatch = proto.Field( @@ -270,11 +253,6 @@ class RunQueryResponse(proto.Message): proto.BYTES, number=5, ) - stats: query_profile.ResultSetStats = proto.Field( - proto.MESSAGE, - number=6, - message=query_profile.ResultSetStats, - ) class RunAggregationQueryRequest(proto.Message): @@ -314,11 +292,6 @@ class RunAggregationQueryRequest(proto.Message): aggregation query. This field is a member of `oneof`_ ``query_type``. - mode (google.cloud.datastore_v1.types.QueryMode): - Optional. The mode in which the query request is processed. - This field is optional, and when not provided, it defaults - to ``NORMAL`` mode where no additional statistics will be - returned with the query results. """ project_id: str = proto.Field( @@ -351,11 +324,6 @@ class RunAggregationQueryRequest(proto.Message): oneof="query_type", message=gd_query.GqlQuery, ) - mode: query_profile.QueryMode = proto.Field( - proto.ENUM, - number=10, - enum=query_profile.QueryMode, - ) class RunAggregationQueryResponse(proto.Message): @@ -377,12 +345,6 @@ class RunAggregationQueryResponse(proto.Message): [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] was set in [RunAggregationQueryRequest.read_options][google.datastore.v1.RunAggregationQueryRequest.read_options]. - stats (google.cloud.datastore_v1.types.ResultSetStats): - Query plan and execution statistics. Note that the returned - stats are subject to change as Firestore evolves. - - This is only present when the request specifies a mode other - than ``NORMAL``. """ batch: aggregation_result.AggregationResultBatch = proto.Field( @@ -399,11 +361,6 @@ class RunAggregationQueryResponse(proto.Message): proto.BYTES, number=5, ) - stats: query_profile.ResultSetStats = proto.Field( - proto.MESSAGE, - number=6, - message=query_profile.ResultSetStats, - ) class BeginTransactionRequest(proto.Message): diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query_profile.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query_profile.py deleted file mode 100644 index 3258a0f7351c..000000000000 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query_profile.py +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package="google.datastore.v1", - manifest={ - "QueryMode", - "QueryPlan", - "ResultSetStats", - }, -) - - -class QueryMode(proto.Enum): - r"""The mode in which the query request must be processed. - - Values: - NORMAL (0): - The default mode. Only the query results are - returned. - PLAN (1): - This mode returns only the query plan, - without any results or execution statistics - information. - PROFILE (2): - This mode returns both the query plan and the - execution statistics along with the results. - """ - NORMAL = 0 - PLAN = 1 - PROFILE = 2 - - -class QueryPlan(proto.Message): - r"""Plan for the query. - - Attributes: - plan_info (google.protobuf.struct_pb2.Struct): - Planning phase information for the query. It will include: - - { "indexes_used": [ {"query_scope": "Collection", - "properties": "(foo ASC, **name** ASC)"}, {"query_scope": - "Collection", "properties": "(bar ASC, **name** ASC)"} ] } - """ - - plan_info: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=1, - message=struct_pb2.Struct, - ) - - -class ResultSetStats(proto.Message): - r"""Planning and execution statistics for the query. - - Attributes: - query_plan (google.cloud.datastore_v1.types.QueryPlan): - Plan for the query. - query_stats (google.protobuf.struct_pb2.Struct): - Aggregated statistics from the execution of the query. - - This will only be present when the request specifies - ``PROFILE`` mode. For example, a query will return the - statistics including: - - { "results_returned": "20", "documents_scanned": "20", - "indexes_entries_scanned": "10050", "total_execution_time": - "100.7 msecs" } - """ - - query_plan: "QueryPlan" = proto.Field( - proto.MESSAGE, - number=1, - message="QueryPlan", - ) - query_stats: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=2, - message=struct_pb2.Struct, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py index eb323e40581a..4a8be1c96b8b 100644 --- a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py @@ -45,8 +45,8 @@ class datastoreCallTransformer(cst.CSTTransformer): 'lookup': ('project_id', 'keys', 'database_id', 'read_options', ), 'reserve_ids': ('project_id', 'keys', 'database_id', ), 'rollback': ('project_id', 'transaction', 'database_id', ), - 'run_aggregation_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'aggregation_query', 'gql_query', 'mode', ), - 'run_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'query', 'gql_query', 'mode', ), + 'run_aggregation_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'aggregation_query', 'gql_query', ), + 'run_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'query', 'gql_query', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 8603a840f6eb..52d8de52a8e5 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -51,7 +51,6 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query -from google.cloud.datastore_v1.types import query_profile from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import struct_pb2 # type: ignore From 7f023a8ba3e8d78d0d1f476fe69ca4dbf1942ea2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 24 Feb 2024 08:17:00 -0500 Subject: [PATCH 567/611] build(deps): bump cryptography from 41.0.6 to 42.0.0 in /synthtool/gcp/templates/python_library/.kokoro (#523) Source-Link: https://github.com/googleapis/synthtool/commit/e13b22b1f660c80e4c3e735a9177d2f16c4b8bdc Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 57 +++++++++++-------- 2 files changed, 35 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index d8a1bbca7179..2aefd0e91175 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa -# created: 2024-01-15T16:32:08.142785673Z + digest: sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 +# created: 2024-02-06T03:20:16.660474034Z diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index bb3d6ca38b14..8c11c9f3e9b6 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -93,30 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==41.0.6 \ - --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ - --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ - --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ - --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ - --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ - --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ - --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ - --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ - --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ - --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ - --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ - --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ - --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ - --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ - --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ - --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ - --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ - --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ - --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ - --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ - --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ - --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ - --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae +cryptography==42.0.0 \ + --hash=sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b \ + --hash=sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd \ + --hash=sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94 \ + --hash=sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221 \ + --hash=sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e \ + --hash=sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513 \ + --hash=sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d \ + --hash=sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc \ + --hash=sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0 \ + --hash=sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2 \ + --hash=sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87 \ + --hash=sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01 \ + --hash=sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0 \ + --hash=sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4 \ + --hash=sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b \ + --hash=sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81 \ + --hash=sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3 \ + --hash=sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4 \ + --hash=sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf \ + --hash=sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec \ + --hash=sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce \ + --hash=sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0 \ + --hash=sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f \ + --hash=sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f \ + --hash=sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3 \ + --hash=sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689 \ + --hash=sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08 \ + --hash=sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139 \ + --hash=sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434 \ + --hash=sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17 \ + --hash=sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8 \ + --hash=sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440 # via # gcp-releasetool # secretstorage From d82247e8ee0e07dd445c77c57c6450de5aab2641 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 10:39:30 -0500 Subject: [PATCH 568/611] build(deps): bump cryptography from 42.0.2 to 42.0.4 in .kokoro (#527) Source-Link: https://github.com/googleapis/synthtool/commit/d895aec3679ad22aa120481f746bf9f2f325f26f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 66 +++++++++---------- 2 files changed, 35 insertions(+), 35 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 2aefd0e91175..e4e943e0259a 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 -# created: 2024-02-06T03:20:16.660474034Z + digest: sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad +# created: 2024-02-27T15:56:18.442440378Z diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index 8c11c9f3e9b6..bda8e38c4f31 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -93,39 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.0 \ - --hash=sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b \ - --hash=sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd \ - --hash=sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94 \ - --hash=sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221 \ - --hash=sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e \ - --hash=sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513 \ - --hash=sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d \ - --hash=sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc \ - --hash=sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0 \ - --hash=sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2 \ - --hash=sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87 \ - --hash=sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01 \ - --hash=sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0 \ - --hash=sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4 \ - --hash=sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b \ - --hash=sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81 \ - --hash=sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3 \ - --hash=sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4 \ - --hash=sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf \ - --hash=sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec \ - --hash=sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce \ - --hash=sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0 \ - --hash=sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f \ - --hash=sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f \ - --hash=sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3 \ - --hash=sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689 \ - --hash=sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08 \ - --hash=sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139 \ - --hash=sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434 \ - --hash=sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17 \ - --hash=sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8 \ - --hash=sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440 +cryptography==42.0.4 \ + --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ + --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ + --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ + --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ + --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ + --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ + --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ + --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ + --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ + --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ + --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ + --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ + --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ + --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ + --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ + --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ + --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ + --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ + --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ + --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ + --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ + --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ + --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ + --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ + --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ + --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ + --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ + --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ + --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ + --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ + --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ + --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 # via # gcp-releasetool # secretstorage From f795b21098c08e2903352bdf349d6d6ee5489af9 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 6 Mar 2024 10:41:00 -0500 Subject: [PATCH 569/611] chore: fix typos (#525) * chore: fix minor typos * protobf -> protobuf --- .../google/cloud/datastore/_http.py | 2 +- .../tests/system/test_transaction.py | 2 +- .../tests/unit/test_helpers.py | 18 +++++++++--------- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/_http.py b/packages/google-cloud-datastore/google/cloud/datastore/_http.py index a4441c095165..1eccfd351555 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/_http.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/_http.py @@ -32,7 +32,7 @@ def _make_retry_timeout_kwargs(retry, timeout): - """Helper for methods taking optional retry / timout args.""" + """Helper for methods taking optional retry / timeout args.""" kwargs = {} if retry is not None: diff --git a/packages/google-cloud-datastore/tests/system/test_transaction.py b/packages/google-cloud-datastore/tests/system/test_transaction.py index a93538fbeb43..6dc9dacd6691 100644 --- a/packages/google-cloud-datastore/tests/system/test_transaction.py +++ b/packages/google-cloud-datastore/tests/system/test_transaction.py @@ -49,7 +49,7 @@ def test_transaction_via_explicit_begin_get_commit( # github.com/GoogleCloudPlatform/google-cloud-python/issues/1859 # Note that this example lacks the threading which provokes the race # condition in that issue: we are basically just exercising the - # "explict" path for using transactions. + # "explicit" path for using transactions. before_1 = 100 before_2 = 0 transfer_amount = 40 diff --git a/packages/google-cloud-datastore/tests/unit/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py index 467a2df18d78..89bf61657ed2 100644 --- a/packages/google-cloud-datastore/tests/unit/test_helpers.py +++ b/packages/google-cloud-datastore/tests/unit/test_helpers.py @@ -208,7 +208,7 @@ def _compare_entity_proto(entity_pb1, entity_pb2): assert val1 == val2 -def test_enity_to_protobf_w_empty(): +def test_entity_to_protobuf_w_empty(): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import entity_to_protobuf @@ -218,7 +218,7 @@ def test_enity_to_protobf_w_empty(): _compare_entity_proto(entity_pb, entity_pb2.Entity()) -def test_enity_to_protobf_w_key_only(): +def test_entity_to_protobuf_w_key_only(): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import entity_to_protobuf @@ -239,7 +239,7 @@ def test_enity_to_protobf_w_key_only(): _compare_entity_proto(entity_pb, expected_pb) -def test_enity_to_protobf_w_simple_fields(): +def test_entity_to_protobuf_w_simple_fields(): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -261,7 +261,7 @@ def test_enity_to_protobf_w_simple_fields(): _compare_entity_proto(entity_pb, expected_pb) -def test_enity_to_protobf_w_with_empty_list(): +def test_entity_to_protobuf_w_with_empty_list(): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import entity_to_protobuf @@ -277,7 +277,7 @@ def test_enity_to_protobf_w_with_empty_list(): _compare_entity_proto(entity_pb, expected_pb) -def test_enity_to_protobf_w_inverts_to_protobuf(): +def test_entity_to_protobuf_w_inverts_to_protobuf(): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.helpers import _new_value_pb from google.cloud.datastore.helpers import entity_from_protobuf @@ -332,7 +332,7 @@ def test_enity_to_protobf_w_inverts_to_protobuf(): _compare_entity_proto(original_pb, new_pb) -def test_enity_to_protobf_w_meaning_with_change(): +def test_entity_to_protobuf_w_meaning_with_change(): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -352,7 +352,7 @@ def test_enity_to_protobf_w_meaning_with_change(): _compare_entity_proto(entity_pb, expected_pb) -def test_enity_to_protobf_w_variable_meanings(): +def test_entity_to_protobuf_w_variable_meanings(): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -380,7 +380,7 @@ def test_enity_to_protobf_w_variable_meanings(): _compare_entity_proto(entity_pb, expected_pb) -def test_enity_to_protobf_w_dict_to_entity(): +def test_entity_to_protobuf_w_dict_to_entity(): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import entity_to_protobuf @@ -401,7 +401,7 @@ def test_enity_to_protobf_w_dict_to_entity(): assert entity_pb == expected_pb -def test_enity_to_protobf_w_dict_to_entity_recursive(): +def test_entity_to_protobuf_w_dict_to_entity_recursive(): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import entity_to_protobuf From 9c188991444851001d1ff1f6a9e298cf3309d7d9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 21:06:10 +0000 Subject: [PATCH 570/611] feat: add new types ExplainOptions, ExplainMetrics, PlanSummary, ExecutionStats (#521) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Allow users to explicitly configure universe domain chore: Update gapic-generator-python to v1.14.0 PiperOrigin-RevId: 603108274 Source-Link: https://github.com/googleapis/googleapis/commit/3d83e3652f689ab51c3f95f876458c6faef619bf Source-Link: https://github.com/googleapis/googleapis-gen/commit/baf5e9bbb14a768b2b4c9eae9feb78f18f1757fa Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYmFmNWU5YmJiMTRhNzY4YjJiNGM5ZWFlOWZlYjc4ZjE4ZjE3NTdmYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix: Resolve AttributeError 'Credentials' object has no attribute 'universe_domain' fix: Add google-auth as a direct dependency fix: Add staticmethod decorator to methods added in v1.14.0 chore: Update gapic-generator-python to v1.14.1 PiperOrigin-RevId: 603728206 Source-Link: https://github.com/googleapis/googleapis/commit/9063da8b4d45339db4e2d7d92a27c6708620e694 Source-Link: https://github.com/googleapis/googleapis-gen/commit/891c67d0a855b08085eb301dabb14064ef4b2c6d Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiODkxYzY3ZDBhODU1YjA4MDg1ZWIzMDFkYWJiMTQwNjRlZjRiMmM2ZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(diregapic): s/bazel/bazelisk/ in DIREGAPIC build GitHub action PiperOrigin-RevId: 604714585 Source-Link: https://github.com/googleapis/googleapis/commit/e4dce1324f4cb6dedb6822cb157e13cb8e0b3073 Source-Link: https://github.com/googleapis/googleapis-gen/commit/4036f78305c5c2aab80ff91960b3a3d983ff4b03 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDAzNmY3ODMwNWM1YzJhYWI4MGZmOTE5NjBiM2EzZDk4M2ZmNGIwMyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(deps): Require `google-api-core>=1.34.1` fix: Resolve issue with missing import for certain enums in `**/types/…` PiperOrigin-RevId: 607041732 Source-Link: https://github.com/googleapis/googleapis/commit/b4532678459355676c95c00e39866776b7f40b2e Source-Link: https://github.com/googleapis/googleapis-gen/commit/cd796416f0f54cb22b2c44fb2d486960e693a346 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2Q3OTY0MTZmMGY1NGNiMjJiMmM0NGZiMmQ0ODY5NjBlNjkzYTM0NiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(deps): Exclude google-auth 2.24.0 and 2.25.0 chore: Update gapic-generator-python to v1.14.4 PiperOrigin-RevId: 611561820 Source-Link: https://github.com/googleapis/googleapis/commit/87ef1fe57feede1f23b523f3c7fc4c3f2b92d6d2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/197316137594aafad94dea31226528fbcc39310c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTk3MzE2MTM3NTk0YWFmYWQ5NGRlYTMxMjI2NTI4ZmJjYzM5MzEwYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Add include_recaptcha_script for as a new action in firewall policies PiperOrigin-RevId: 612851792 Source-Link: https://github.com/googleapis/googleapis/commit/49ea2c0fc42dd48996b833f05a258ad7e8590d3d Source-Link: https://github.com/googleapis/googleapis-gen/commit/460fdcbbbe00f35b1c591b1f3ef0c77ebd3ce277 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDYwZmRjYmJiZTAwZjM1YjFjNTkxYjFmM2VmMGM3N2ViZDNjZTI3NyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix coverage * fix(deps): require google-auth >= 2.14.1 * feat: add new types ExplainOptions, ExplainMetrics, PlanSummary, ExecutionStats feat: add ExplainOptions field to RunQueryRequest feat: add ExplainMetrics field to RunQueryResponse feat: add ExplainOptions field to RunAggregationQueryRequest feat: add ExplainMetrics field to RunAggregationQueryResponse PiperOrigin-RevId: 615158168 Source-Link: https://github.com/googleapis/googleapis/commit/4d535ac0538bb2d4b406250d7ec10b25a17a54cf Source-Link: https://github.com/googleapis/googleapis-gen/commit/02e272ded538b0f97832bfad47decbc3dc65a89a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDJlMjcyZGVkNTM4YjBmOTc4MzJiZmFkNDdkZWNiYzNkYzY1YTg5YSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google/cloud/datastore_admin/__init__.py | 2 +- .../cloud/datastore_admin_v1/__init__.py | 2 +- .../datastore_admin_v1/services/__init__.py | 2 +- .../services/datastore_admin/__init__.py | 2 +- .../services/datastore_admin/async_client.py | 98 +++- .../services/datastore_admin/client.py | 322 ++++++++++-- .../services/datastore_admin/pagers.py | 2 +- .../datastore_admin/transports/__init__.py | 2 +- .../datastore_admin/transports/base.py | 8 +- .../datastore_admin/transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- .../datastore_admin/transports/rest.py | 26 +- .../datastore_admin_v1/types/__init__.py | 2 +- .../types/datastore_admin.py | 2 +- .../cloud/datastore_admin_v1/types/index.py | 2 +- .../datastore_admin_v1/types/migration.py | 2 +- .../google/cloud/datastore_v1/__init__.py | 10 +- .../cloud/datastore_v1/services/__init__.py | 2 +- .../services/datastore/__init__.py | 2 +- .../services/datastore/async_client.py | 105 +++- .../datastore_v1/services/datastore/client.py | 326 ++++++++++-- .../services/datastore/transports/__init__.py | 2 +- .../services/datastore/transports/base.py | 8 +- .../services/datastore/transports/grpc.py | 4 +- .../datastore/transports/grpc_asyncio.py | 4 +- .../services/datastore/transports/rest.py | 48 +- .../cloud/datastore_v1/types/__init__.py | 12 +- .../datastore_v1/types/aggregation_result.py | 2 +- .../cloud/datastore_v1/types/datastore.py | 43 +- .../google/cloud/datastore_v1/types/entity.py | 2 +- .../google/cloud/datastore_v1/types/query.py | 2 +- .../cloud/datastore_v1/types/query_profile.py | 144 ++++++ .../fixup_datastore_admin_v1_keywords.py | 2 +- .../scripts/fixup_datastore_v1_keywords.py | 6 +- packages/google-cloud-datastore/setup.py | 3 + .../testing/constraints-3.7.txt | 1 + .../testing/constraints-3.8.txt | 1 + .../google-cloud-datastore/tests/__init__.py | 2 +- .../tests/unit/__init__.py | 2 +- .../tests/unit/gapic/__init__.py | 2 +- .../unit/gapic/datastore_admin_v1/__init__.py | 2 +- .../test_datastore_admin.py | 481 +++++++++++++++-- .../tests/unit/gapic/datastore_v1/__init__.py | 2 +- .../unit/gapic/datastore_v1/test_datastore.py | 488 +++++++++++++++--- 44 files changed, 1899 insertions(+), 291 deletions(-) create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/types/query_profile.py diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/__init__.py index 17ff436e52f9..09b75aef618d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py index b909402ef504..6d57bbb98d64 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py index 89a37dc92c5a..8f6cf068242c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py index bbf2af15324b..be83caf7cb18 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 40cde7575721..b564e8c1a38f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -38,9 +38,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -113,8 +113,12 @@ class DatastoreAdminAsyncClient: _client: DatastoreAdminClient + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = DatastoreAdminClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = DatastoreAdminClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DatastoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DatastoreAdminClient._DEFAULT_UNIVERSE common_billing_account_path = staticmethod( DatastoreAdminClient.common_billing_account_path @@ -219,6 +223,25 @@ def transport(self) -> DatastoreAdminTransport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial( type(DatastoreAdminClient).get_transport_class, type(DatastoreAdminClient) ) @@ -231,7 +254,7 @@ def __init__( client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the datastore admin client. + """Instantiates the datastore admin async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -242,23 +265,38 @@ def __init__( transport (Union[str, ~.DatastoreAdminTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -432,6 +470,9 @@ async def sample_export_entities(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -611,6 +652,9 @@ async def sample_import_entities(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -721,6 +765,9 @@ async def sample_create_index(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -833,6 +880,9 @@ async def sample_delete_index(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -933,6 +983,9 @@ async def sample_get_index(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1031,6 +1084,9 @@ async def sample_list_indexes(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1094,6 +1150,9 @@ async def list_operations( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1148,6 +1207,9 @@ async def get_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1206,6 +1268,9 @@ async def delete_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -1260,6 +1325,9 @@ async def cancel_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index cadac67edfe1..de174f58dbd3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ Union, cast, ) +import warnings from google.cloud.datastore_admin_v1 import gapic_version as package_version @@ -42,9 +43,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -183,11 +184,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "datastore.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "datastore.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -316,7 +321,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[client_options_lib.ClientOptions] = None ): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -346,6 +351,11 @@ def get_mtls_endpoint_and_cert_source( Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -379,6 +389,178 @@ def get_mtls_endpoint_and_cert_source( return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = DatastoreAdminClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = DatastoreAdminClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DatastoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DatastoreAdminClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = DatastoreAdminClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or DatastoreAdminClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__( self, *, @@ -398,22 +580,32 @@ def __init__( transport (Union[str, DatastoreAdminTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -424,17 +616,34 @@ def __init__( google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = DatastoreAdminClient._read_environment_variables() + self._client_cert_source = DatastoreAdminClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert ) + self._universe_domain = DatastoreAdminClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False - api_key_value = getattr(client_options, "api_key", None) + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( "client_options.api_key and credentials are mutually exclusive" @@ -443,20 +652,33 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, DatastoreAdminTransport): + transport_provided = isinstance(transport, DatastoreAdminTransport) + if transport_provided: # transport is a DatastoreAdminTransport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(DatastoreAdminTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or DatastoreAdminClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr( @@ -466,17 +688,17 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def export_entities( @@ -640,6 +862,9 @@ def sample_export_entities(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -818,6 +1043,9 @@ def sample_import_entities(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -929,6 +1157,9 @@ def sample_create_index(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1042,6 +1273,9 @@ def sample_delete_index(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1133,6 +1367,9 @@ def sample_get_index(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1222,6 +1459,9 @@ def sample_list_indexes(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1298,6 +1538,9 @@ def list_operations( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1352,6 +1595,9 @@ def get_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1410,6 +1656,9 @@ def delete_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1464,6 +1713,9 @@ def cancel_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py index 139e1983967f..eb4bd0dcff0a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py index d1930caaaf71..f3b7656e10c6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index d37482a19e6e..bddab4904ec7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -63,7 +63,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'datastore.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -126,6 +126,10 @@ def __init__( host += ":443" self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index f5fd0e906280..68867594195f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -120,7 +120,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'datastore.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index 3c6aa35167f8..367a5ab69922 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -165,7 +165,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'datastore.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py index b29a8b7591bb..8776e623e810 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -35,9 +35,9 @@ import warnings try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.cloud.datastore_admin_v1.types import datastore_admin @@ -443,7 +443,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'datastore.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -599,9 +599,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -610,7 +608,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -688,7 +685,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -772,9 +768,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -783,7 +777,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -859,7 +852,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -945,9 +937,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -956,7 +946,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1034,7 +1023,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py index 7ba786ca6db5..ca082a05529e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py index 447827271d96..eb838570faea 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py index dfb44417e1d6..77a7079de6e8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py index 12e933592df4..ec69e94191c4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py index b494905bdb04..8c9d09fe7872 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -61,6 +61,10 @@ from .types.query import PropertyReference from .types.query import Query from .types.query import QueryResultBatch +from .types.query_profile import ExecutionStats +from .types.query_profile import ExplainMetrics +from .types.query_profile import ExplainOptions +from .types.query_profile import PlanSummary __all__ = ( "DatastoreAsyncClient", @@ -78,6 +82,9 @@ "DatastoreClient", "Entity", "EntityResult", + "ExecutionStats", + "ExplainMetrics", + "ExplainOptions", "Filter", "GqlQuery", "GqlQueryParameter", @@ -88,6 +95,7 @@ "Mutation", "MutationResult", "PartitionId", + "PlanSummary", "Projection", "PropertyFilter", "PropertyOrder", diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py index 89a37dc92c5a..8f6cf068242c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py index a93070e267de..e992abb33b29 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index c49b4b079319..e911a362b414 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -38,14 +38,15 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.cloud.datastore_v1.types import aggregation_result from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.cloud.datastore_v1.types import query_profile from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO @@ -65,8 +66,12 @@ class DatastoreAsyncClient: _client: DatastoreClient + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = DatastoreClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = DatastoreClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DatastoreClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DatastoreClient._DEFAULT_UNIVERSE common_billing_account_path = staticmethod( DatastoreClient.common_billing_account_path @@ -165,6 +170,25 @@ def transport(self) -> DatastoreTransport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial( type(DatastoreClient).get_transport_class, type(DatastoreClient) ) @@ -177,7 +201,7 @@ def __init__( client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the datastore client. + """Instantiates the datastore async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -188,23 +212,38 @@ def __init__( transport (Union[str, ~.DatastoreTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -337,6 +376,9 @@ async def sample_lookup(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -429,6 +471,9 @@ async def sample_run_query(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -521,6 +566,9 @@ async def sample_run_aggregation_query(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -625,6 +673,9 @@ async def sample_begin_transaction(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -775,6 +826,9 @@ async def sample_commit(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -892,6 +946,9 @@ async def sample_rollback(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1009,6 +1066,9 @@ async def sample_allocate_ids(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1135,6 +1195,9 @@ async def sample_reserve_ids(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1189,6 +1252,9 @@ async def list_operations( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1243,6 +1309,9 @@ async def get_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1301,6 +1370,9 @@ async def delete_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -1355,6 +1427,9 @@ async def cancel_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index 58c8ad221d85..0a4981751ffc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ Union, cast, ) +import warnings from google.cloud.datastore_v1 import gapic_version as package_version @@ -42,14 +43,15 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.cloud.datastore_v1.types import aggregation_result from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.cloud.datastore_v1.types import query_profile from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO @@ -133,11 +135,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "datastore.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "datastore.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -266,7 +272,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[client_options_lib.ClientOptions] = None ): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -296,6 +302,11 @@ def get_mtls_endpoint_and_cert_source( Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -329,6 +340,178 @@ def get_mtls_endpoint_and_cert_source( return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = DatastoreClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = DatastoreClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DatastoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DatastoreClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = DatastoreClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or DatastoreClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__( self, *, @@ -348,22 +531,32 @@ def __init__( transport (Union[str, DatastoreTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -374,17 +567,34 @@ def __init__( google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = DatastoreClient._read_environment_variables() + self._client_cert_source = DatastoreClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = DatastoreClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env ) + self._api_endpoint = None # updated below, depending on `transport` - api_key_value = getattr(client_options, "api_key", None) + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( "client_options.api_key and credentials are mutually exclusive" @@ -393,20 +603,30 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, DatastoreTransport): + transport_provided = isinstance(transport, DatastoreTransport) + if transport_provided: # transport is a DatastoreTransport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(DatastoreTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or DatastoreClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr( @@ -416,17 +636,17 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def lookup( @@ -545,6 +765,9 @@ def sample_lookup(): gapic_v1.routing_header.to_grpc_metadata(header_params), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -633,6 +856,9 @@ def sample_run_query(): gapic_v1.routing_header.to_grpc_metadata(header_params), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -721,6 +947,9 @@ def sample_run_aggregation_query(): gapic_v1.routing_header.to_grpc_metadata(header_params), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -830,6 +1059,9 @@ def sample_begin_transaction(): gapic_v1.routing_header.to_grpc_metadata(header_params), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -985,6 +1217,9 @@ def sample_commit(): gapic_v1.routing_header.to_grpc_metadata(header_params), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1107,6 +1342,9 @@ def sample_rollback(): gapic_v1.routing_header.to_grpc_metadata(header_params), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1229,6 +1467,9 @@ def sample_allocate_ids(): gapic_v1.routing_header.to_grpc_metadata(header_params), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1350,6 +1591,9 @@ def sample_reserve_ids(): gapic_v1.routing_header.to_grpc_metadata(header_params), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1417,6 +1661,9 @@ def list_operations( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1471,6 +1718,9 @@ def get_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1529,6 +1779,9 @@ def delete_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1583,6 +1836,9 @@ def cancel_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py index 911cdd46a9cc..727e271c7369 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py index e97e6b6b8529..3c31a4a7dbf8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -61,7 +61,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'datastore.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -124,6 +124,10 @@ def __init__( host += ":443" self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py index 3e31e98e69d1..ebc16b2189ae 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -71,7 +71,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'datastore.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index 2a3a66b0fe95..7b3997ddd336 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -116,7 +116,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'datastore.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py index aa5b0d1e3585..245516187538 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -34,9 +34,9 @@ import warnings try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.cloud.datastore_v1.types import datastore @@ -443,7 +443,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'datastore.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -555,9 +555,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -566,7 +564,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -655,9 +652,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -666,7 +661,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -753,9 +747,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -764,7 +756,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -851,9 +842,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -862,7 +851,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -949,9 +937,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -960,7 +946,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1048,9 +1033,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1059,7 +1042,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1148,9 +1130,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1159,7 +1139,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1246,9 +1225,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1257,7 +1234,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py index b6ff2a444e92..6aa3d84692ce 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -61,6 +61,12 @@ Query, QueryResultBatch, ) +from .query_profile import ( + ExecutionStats, + ExplainMetrics, + ExplainOptions, + PlanSummary, +) __all__ = ( "AggregationResult", @@ -103,4 +109,8 @@ "PropertyReference", "Query", "QueryResultBatch", + "ExecutionStats", + "ExplainMetrics", + "ExplainOptions", + "PlanSummary", ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py index dd53cfa3d4de..b35ca1f90538 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index 6c768904ef28..ccea0458cbfd 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -22,6 +22,7 @@ from google.cloud.datastore_v1.types import aggregation_result from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query as gd_query +from google.cloud.datastore_v1.types import query_profile from google.protobuf import timestamp_pb2 # type: ignore @@ -185,6 +186,11 @@ class RunQueryRequest(proto.Message): non-aggregation query. This field is a member of `oneof`_ ``query_type``. + explain_options (google.cloud.datastore_v1.types.ExplainOptions): + Optional. Explain options for the query. If + set, additional query statistics will be + returned. If not, only query results will be + returned. """ project_id: str = proto.Field( @@ -217,6 +223,11 @@ class RunQueryRequest(proto.Message): oneof="query_type", message=gd_query.GqlQuery, ) + explain_options: query_profile.ExplainOptions = proto.Field( + proto.MESSAGE, + number=12, + message=query_profile.ExplainOptions, + ) class RunQueryResponse(proto.Message): @@ -237,6 +248,11 @@ class RunQueryResponse(proto.Message): [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] was set in [RunQueryRequest.read_options][google.datastore.v1.RunQueryRequest.read_options]. + explain_metrics (google.cloud.datastore_v1.types.ExplainMetrics): + Query explain metrics. This is only present when the + [RunQueryRequest.explain_options][google.datastore.v1.RunQueryRequest.explain_options] + is provided, and it is sent only once with the last response + in the stream. """ batch: gd_query.QueryResultBatch = proto.Field( @@ -253,6 +269,11 @@ class RunQueryResponse(proto.Message): proto.BYTES, number=5, ) + explain_metrics: query_profile.ExplainMetrics = proto.Field( + proto.MESSAGE, + number=9, + message=query_profile.ExplainMetrics, + ) class RunAggregationQueryRequest(proto.Message): @@ -292,6 +313,11 @@ class RunAggregationQueryRequest(proto.Message): aggregation query. This field is a member of `oneof`_ ``query_type``. + explain_options (google.cloud.datastore_v1.types.ExplainOptions): + Optional. Explain options for the query. If + set, additional query statistics will be + returned. If not, only query results will be + returned. """ project_id: str = proto.Field( @@ -324,6 +350,11 @@ class RunAggregationQueryRequest(proto.Message): oneof="query_type", message=gd_query.GqlQuery, ) + explain_options: query_profile.ExplainOptions = proto.Field( + proto.MESSAGE, + number=11, + message=query_profile.ExplainOptions, + ) class RunAggregationQueryResponse(proto.Message): @@ -345,6 +376,11 @@ class RunAggregationQueryResponse(proto.Message): [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] was set in [RunAggregationQueryRequest.read_options][google.datastore.v1.RunAggregationQueryRequest.read_options]. + explain_metrics (google.cloud.datastore_v1.types.ExplainMetrics): + Query explain metrics. This is only present when the + [RunAggregationQueryRequest.explain_options][google.datastore.v1.RunAggregationQueryRequest.explain_options] + is provided, and it is sent only once with the last response + in the stream. """ batch: aggregation_result.AggregationResultBatch = proto.Field( @@ -361,6 +397,11 @@ class RunAggregationQueryResponse(proto.Message): proto.BYTES, number=5, ) + explain_metrics: query_profile.ExplainMetrics = proto.Field( + proto.MESSAGE, + number=9, + message=query_profile.ExplainMetrics, + ) class BeginTransactionRequest(proto.Message): diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index 09c0ecc809c3..5c5bcdc403f2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index 0ddd68111db4..2ce1000fe956 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query_profile.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query_profile.py new file mode 100644 index 000000000000..8dca0f6e7498 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query_profile.py @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.datastore.v1", + manifest={ + "ExplainOptions", + "ExplainMetrics", + "PlanSummary", + "ExecutionStats", + }, +) + + +class ExplainOptions(proto.Message): + r"""Explain options for the query. + + Attributes: + analyze (bool): + Optional. Whether to execute this query. + + When false (the default), the query will be + planned, returning only metrics from the + planning stages. + + When true, the query will be planned and + executed, returning the full query results along + with both planning and execution stage metrics. + """ + + analyze: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class ExplainMetrics(proto.Message): + r"""Explain metrics for the query. + + Attributes: + plan_summary (google.cloud.datastore_v1.types.PlanSummary): + Planning phase information for the query. + execution_stats (google.cloud.datastore_v1.types.ExecutionStats): + Aggregated stats from the execution of the query. Only + present when + [ExplainOptions.analyze][google.datastore.v1.ExplainOptions.analyze] + is set to true. + """ + + plan_summary: "PlanSummary" = proto.Field( + proto.MESSAGE, + number=1, + message="PlanSummary", + ) + execution_stats: "ExecutionStats" = proto.Field( + proto.MESSAGE, + number=2, + message="ExecutionStats", + ) + + +class PlanSummary(proto.Message): + r"""Planning phase information for the query. + + Attributes: + indexes_used (MutableSequence[google.protobuf.struct_pb2.Struct]): + The indexes selected for the query. For example: [ + {"query_scope": "Collection", "properties": "(foo ASC, + **name** ASC)"}, {"query_scope": "Collection", "properties": + "(bar ASC, **name** ASC)"} ] + """ + + indexes_used: MutableSequence[struct_pb2.Struct] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=struct_pb2.Struct, + ) + + +class ExecutionStats(proto.Message): + r"""Execution statistics for the query. + + Attributes: + results_returned (int): + Total number of results returned, including + documents, projections, aggregation results, + keys. + execution_duration (google.protobuf.duration_pb2.Duration): + Total time to execute the query in the + backend. + read_operations (int): + Total billable read operations. + debug_stats (google.protobuf.struct_pb2.Struct): + Debugging statistics from the execution of the query. Note + that the debugging stats are subject to change as Firestore + evolves. It could include: { "indexes_entries_scanned": + "1000", "documents_scanned": "20", "billing_details" : { + "documents_billable": "20", "index_entries_billable": + "1000", "min_query_cost": "0" } } + """ + + results_returned: int = proto.Field( + proto.INT64, + number=1, + ) + execution_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + read_operations: int = proto.Field( + proto.INT64, + number=4, + ) + debug_stats: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py index 44ae7ecb85c3..2f999e1e5a8f 100644 --- a/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py index 4a8be1c96b8b..f0406904978a 100644 --- a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -45,8 +45,8 @@ class datastoreCallTransformer(cst.CSTTransformer): 'lookup': ('project_id', 'keys', 'database_id', 'read_options', ), 'reserve_ids': ('project_id', 'keys', 'database_id', ), 'rollback': ('project_id', 'transaction', 'database_id', ), - 'run_aggregation_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'aggregation_query', 'gql_query', ), - 'run_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'query', 'gql_query', ), + 'run_aggregation_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'aggregation_query', 'gql_query', 'explain_options', ), + 'run_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'query', 'gql_query', 'explain_options', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 07827e71a172..5f588e054fc0 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -30,6 +30,9 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "google-cloud-core >= 1.4.0, <3.0.0dev", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", diff --git a/packages/google-cloud-datastore/testing/constraints-3.7.txt b/packages/google-cloud-datastore/testing/constraints-3.7.txt index 4b49551c56a0..da9c6ca856ac 100644 --- a/packages/google-cloud-datastore/testing/constraints-3.7.txt +++ b/packages/google-cloud-datastore/testing/constraints-3.7.txt @@ -6,6 +6,7 @@ # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 google-api-core==1.34.0 +google-auth==2.14.1 google-cloud-core==1.4.0 proto-plus==1.22.0 libcst==0.2.5 diff --git a/packages/google-cloud-datastore/testing/constraints-3.8.txt b/packages/google-cloud-datastore/testing/constraints-3.8.txt index e69de29bb2d1..932ece692a79 100644 --- a/packages/google-cloud-datastore/testing/constraints-3.8.txt +++ b/packages/google-cloud-datastore/testing/constraints-3.8.txt @@ -0,0 +1 @@ +google-api-core==2.14.0 diff --git a/packages/google-cloud-datastore/tests/__init__.py b/packages/google-cloud-datastore/tests/__init__.py index 89a37dc92c5a..8f6cf068242c 100644 --- a/packages/google-cloud-datastore/tests/__init__.py +++ b/packages/google-cloud-datastore/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/__init__.py b/packages/google-cloud-datastore/tests/unit/__init__.py index 89a37dc92c5a..8f6cf068242c 100644 --- a/packages/google-cloud-datastore/tests/unit/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/__init__.py b/packages/google-cloud-datastore/tests/unit/gapic/__init__.py index 89a37dc92c5a..8f6cf068242c 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py index 89a37dc92c5a..8f6cf068242c 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index eb8b8a4f44b5..40adb3a4f549 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -29,6 +29,7 @@ import json import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers from requests import Response @@ -79,6 +80,17 @@ def modify_default_endpoint(client): ) +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -108,6 +120,274 @@ def test__get_default_mtls_endpoint(): ) +def test__read_environment_variables(): + assert DatastoreAdminClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DatastoreAdminClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DatastoreAdminClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + DatastoreAdminClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DatastoreAdminClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DatastoreAdminClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DatastoreAdminClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DatastoreAdminClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DatastoreAdminClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DatastoreAdminClient._get_client_cert_source(None, False) is None + assert ( + DatastoreAdminClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + DatastoreAdminClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + DatastoreAdminClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + DatastoreAdminClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + DatastoreAdminClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAdminClient), +) +@mock.patch.object( + DatastoreAdminAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAdminAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DatastoreAdminClient._DEFAULT_UNIVERSE + default_endpoint = DatastoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DatastoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + DatastoreAdminClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + DatastoreAdminClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == DatastoreAdminClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DatastoreAdminClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + DatastoreAdminClient._get_api_endpoint(None, None, default_universe, "always") + == DatastoreAdminClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DatastoreAdminClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == DatastoreAdminClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DatastoreAdminClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + DatastoreAdminClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + DatastoreAdminClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + DatastoreAdminClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + DatastoreAdminClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + DatastoreAdminClient._get_universe_domain(None, None) + == DatastoreAdminClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + DatastoreAdminClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport, "grpc"), + (DatastoreAdminClient, transports.DatastoreAdminRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -219,13 +499,13 @@ def test_datastore_admin_client_get_transport_class(): ) @mock.patch.object( DatastoreAdminClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(DatastoreAdminClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAdminClient), ) @mock.patch.object( DatastoreAdminAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(DatastoreAdminAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAdminAsyncClient), ) def test_datastore_admin_client_client_options( client_class, transport_class, transport_name @@ -267,7 +547,9 @@ def test_datastore_admin_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -297,15 +579,23 @@ def test_datastore_admin_client_client_options( # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -315,7 +605,9 @@ def test_datastore_admin_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -333,7 +625,9 @@ def test_datastore_admin_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -366,13 +660,13 @@ def test_datastore_admin_client_client_options( ) @mock.patch.object( DatastoreAdminClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(DatastoreAdminClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAdminClient), ) @mock.patch.object( DatastoreAdminAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(DatastoreAdminAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAdminAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_datastore_admin_client_mtls_env_auto( @@ -395,7 +689,9 @@ def test_datastore_admin_client_mtls_env_auto( if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -427,7 +723,9 @@ def test_datastore_admin_client_mtls_env_auto( return_value=client_cert_source_callback, ): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -461,7 +759,9 @@ def test_datastore_admin_client_mtls_env_auto( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -551,6 +851,115 @@ def test_datastore_admin_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [DatastoreAdminClient, DatastoreAdminAsyncClient] +) +@mock.patch.object( + DatastoreAdminClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAdminClient), +) +@mock.patch.object( + DatastoreAdminAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAdminAsyncClient), +) +def test_datastore_admin_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DatastoreAdminClient._DEFAULT_UNIVERSE + default_endpoint = DatastoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DatastoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -577,7 +986,9 @@ def test_datastore_admin_client_client_options_scopes( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -617,7 +1028,9 @@ def test_datastore_admin_client_client_options_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -677,7 +1090,9 @@ def test_datastore_admin_client_create_channel_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -1838,7 +2253,7 @@ async def test_list_indexes_field_headers_async(): def test_list_indexes_pager(transport_name: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1888,7 +2303,7 @@ def test_list_indexes_pager(transport_name: str = "grpc"): def test_list_indexes_pages(transport_name: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1930,7 +2345,7 @@ def test_list_indexes_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_indexes_async_pager(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1980,7 +2395,7 @@ async def test_list_indexes_async_pager(): @pytest.mark.asyncio async def test_list_indexes_async_pages(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2072,11 +2487,7 @@ def test_export_entities_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -2352,11 +2763,7 @@ def test_import_entities_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -3260,7 +3667,7 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = DatastoreAdminClient( @@ -4841,7 +5248,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py index 89a37dc92c5a..8f6cf068242c 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 52d8de52a8e5..8d427d56e118 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -29,6 +29,7 @@ import json import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers from requests import Response @@ -51,6 +52,7 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.cloud.datastore_v1.types import query_profile from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import struct_pb2 # type: ignore @@ -75,6 +77,17 @@ def modify_default_endpoint(client): ) +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -99,6 +112,254 @@ def test__get_default_mtls_endpoint(): assert DatastoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi +def test__read_environment_variables(): + assert DatastoreClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DatastoreClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DatastoreClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + DatastoreClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DatastoreClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DatastoreClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DatastoreClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DatastoreClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DatastoreClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DatastoreClient._get_client_cert_source(None, False) is None + assert ( + DatastoreClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + DatastoreClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + DatastoreClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + DatastoreClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + DatastoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreClient), +) +@mock.patch.object( + DatastoreAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DatastoreClient._DEFAULT_UNIVERSE + default_endpoint = DatastoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DatastoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + DatastoreClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + DatastoreClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == DatastoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DatastoreClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + DatastoreClient._get_api_endpoint(None, None, default_universe, "always") + == DatastoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DatastoreClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == DatastoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DatastoreClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + DatastoreClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + DatastoreClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + DatastoreClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + DatastoreClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + DatastoreClient._get_universe_domain(None, None) + == DatastoreClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + DatastoreClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DatastoreClient, transports.DatastoreGrpcTransport, "grpc"), + (DatastoreClient, transports.DatastoreRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -209,12 +470,14 @@ def test_datastore_client_get_transport_class(): ], ) @mock.patch.object( - DatastoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatastoreClient) + DatastoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreClient), ) @mock.patch.object( DatastoreAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(DatastoreAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAsyncClient), ) def test_datastore_client_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. @@ -254,7 +517,9 @@ def test_datastore_client_client_options(client_class, transport_class, transpor patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -284,15 +549,23 @@ def test_datastore_client_client_options(client_class, transport_class, transpor # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -302,7 +575,9 @@ def test_datastore_client_client_options(client_class, transport_class, transpor patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -320,7 +595,9 @@ def test_datastore_client_client_options(client_class, transport_class, transpor patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -352,12 +629,14 @@ def test_datastore_client_client_options(client_class, transport_class, transpor ], ) @mock.patch.object( - DatastoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatastoreClient) + DatastoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreClient), ) @mock.patch.object( DatastoreAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(DatastoreAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_datastore_client_mtls_env_auto( @@ -380,7 +659,9 @@ def test_datastore_client_mtls_env_auto( if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -412,7 +693,9 @@ def test_datastore_client_mtls_env_auto( return_value=client_cert_source_callback, ): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -446,7 +729,9 @@ def test_datastore_client_mtls_env_auto( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -532,6 +817,113 @@ def test_datastore_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [DatastoreClient, DatastoreAsyncClient]) +@mock.patch.object( + DatastoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreClient), +) +@mock.patch.object( + DatastoreAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DatastoreAsyncClient), +) +def test_datastore_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DatastoreClient._DEFAULT_UNIVERSE + default_endpoint = DatastoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DatastoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -558,7 +950,9 @@ def test_datastore_client_client_options_scopes( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -593,7 +987,9 @@ def test_datastore_client_client_options_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -646,7 +1042,9 @@ def test_datastore_client_create_channel_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -2477,11 +2875,7 @@ def test_lookup_rest_required_fields(request_type=datastore.LookupRequest): request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -2765,11 +3159,7 @@ def test_run_query_rest_required_fields(request_type=datastore.RunQueryRequest): request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -2976,11 +3366,7 @@ def test_run_aggregation_query_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -3189,11 +3575,7 @@ def test_begin_transaction_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -3456,11 +3838,7 @@ def test_commit_rest_required_fields(request_type=datastore.CommitRequest): request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -3743,11 +4121,7 @@ def test_rollback_rest_required_fields(request_type=datastore.RollbackRequest): request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -4017,11 +4391,7 @@ def test_allocate_ids_rest_required_fields(request_type=datastore.AllocateIdsReq request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -4296,11 +4666,7 @@ def test_reserve_ids_rest_required_fields(request_type=datastore.ReserveIdsReque request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -4564,7 +4930,7 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = DatastoreClient( @@ -6086,7 +6452,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, From 9b05a59597d37855119cccd92fd3bbf5cedf0fb3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 22 Mar 2024 04:20:33 -0400 Subject: [PATCH 571/611] chore(python): update dependencies in /.kokoro (#530) Source-Link: https://github.com/googleapis/synthtool/commit/db94845da69ccdfefd7ce55c84e6cfa74829747e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:a8a80fc6456e433df53fc2a0d72ca0345db0ddefb409f1b75b118dfd1babd952 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../google-cloud-datastore/.kokoro/build.sh | 7 -- .../.kokoro/docker/docs/Dockerfile | 4 + .../.kokoro/docker/docs/requirements.in | 1 + .../.kokoro/docker/docs/requirements.txt | 38 ++++++ .../.kokoro/requirements.in | 3 +- .../.kokoro/requirements.txt | 114 ++++++++---------- 7 files changed, 99 insertions(+), 72 deletions(-) create mode 100644 packages/google-cloud-datastore/.kokoro/docker/docs/requirements.in create mode 100644 packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index e4e943e0259a..4bdeef3904e2 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad -# created: 2024-02-27T15:56:18.442440378Z + digest: sha256:a8a80fc6456e433df53fc2a0d72ca0345db0ddefb409f1b75b118dfd1babd952 +# created: 2024-03-15T16:25:47.905264637Z diff --git a/packages/google-cloud-datastore/.kokoro/build.sh b/packages/google-cloud-datastore/.kokoro/build.sh index 3795bde04965..f9800c921fa5 100755 --- a/packages/google-cloud-datastore/.kokoro/build.sh +++ b/packages/google-cloud-datastore/.kokoro/build.sh @@ -33,13 +33,6 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json # Setup project id. export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -# Remove old nox -python3 -m pip uninstall --yes --quiet nox-automation - -# Install nox -python3 -m pip install --upgrade --quiet nox -python3 -m nox --version - # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile index 8e39a2cc438d..bdaf39fe22d0 100644 --- a/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile @@ -80,4 +80,8 @@ RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ # Test pip RUN python3 -m pip +# Install build requirements +COPY requirements.txt /requirements.txt +RUN python3 -m pip install --require-hashes -r requirements.txt + CMD ["python3.8"] diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.in b/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.in new file mode 100644 index 000000000000..816817c672a1 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.in @@ -0,0 +1 @@ +nox diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt new file mode 100644 index 000000000000..0e5d70f20f83 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt @@ -0,0 +1,38 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==3.2.3 \ + --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ + --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c + # via nox +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 + # via nox +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 + # via virtualenv +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c + # via virtualenv +nox==2024.3.2 \ + --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ + --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 + # via -r requirements.in +packaging==24.0 \ + --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ + --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 + # via nox +platformdirs==4.2.0 \ + --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ + --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 + # via virtualenv +virtualenv==20.25.1 \ + --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ + --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 + # via nox diff --git a/packages/google-cloud-datastore/.kokoro/requirements.in b/packages/google-cloud-datastore/.kokoro/requirements.in index ec867d9fd65a..fff4d9ce0d0a 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.in +++ b/packages/google-cloud-datastore/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x +gcp-releasetool>=2 # required for compatibility with cryptography>=42.x importlib-metadata typing-extensions twine @@ -8,3 +8,4 @@ setuptools nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 +cryptography>=42.0.5 diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index bda8e38c4f31..dd61f5f32018 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -93,40 +93,41 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.4 \ - --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ - --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ - --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ - --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ - --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ - --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ - --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ - --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ - --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ - --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ - --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ - --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ - --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ - --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ - --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ - --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ - --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ - --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ - --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ - --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ - --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ - --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ - --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ - --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ - --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ - --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ - --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ - --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ - --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ - --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ - --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ - --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 +cryptography==42.0.5 \ + --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ + --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ + --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ + --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ + --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ + --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ + --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ + --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ + --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ + --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ + --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ + --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ + --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ + --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ + --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ + --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ + --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ + --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ + --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ + --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ + --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ + --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ + --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ + --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ + --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ + --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ + --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ + --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ + --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ + --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ + --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ + --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 # via + # -r requirements.in # gcp-releasetool # secretstorage distlib==0.3.7 \ @@ -145,9 +146,9 @@ gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.16.0 \ - --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ - --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 +gcp-releasetool==2.0.0 \ + --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ + --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f # via -r requirements.in google-api-core==2.12.0 \ --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ @@ -392,29 +393,18 @@ platformdirs==3.11.0 \ --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv -protobuf==3.20.3 \ - --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ - --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ - --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ - --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ - --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ - --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ - --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ - --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ - --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ - --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ - --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ - --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ - --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ - --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ - --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ - --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ - --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ - --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ - --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ - --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ - --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ - --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee +protobuf==4.25.3 \ + --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ + --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ + --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ + --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ + --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ + --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ + --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ + --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ + --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ + --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ + --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 # via # gcp-docuploader # gcp-releasetool @@ -518,7 +508,7 @@ zipp==3.17.0 \ # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==68.2.2 \ - --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ - --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a +setuptools==69.2.0 \ + --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ + --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c # via -r requirements.in From 31758232891128a1901ac60caebe13d0c3ff0cf7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 26 Mar 2024 07:06:47 -0400 Subject: [PATCH 572/611] chore: Update gapic-generator-python to v1.16.1 (#532) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.16.1 PiperOrigin-RevId: 618243632 Source-Link: https://github.com/googleapis/googleapis/commit/078a38bd240827be8e69a5b62993380d1b047994 Source-Link: https://github.com/googleapis/googleapis-gen/commit/7af768c3f8ce58994482350f7401173329950a31 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2FmNzY4YzNmOGNlNTg5OTQ0ODIzNTBmNzQwMTE3MzMyOTk1MGEzMSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../test_datastore_admin.py | 332 ++++++++++++- .../unit/gapic/datastore_v1/test_datastore.py | 450 +++++++++++++++++- 2 files changed, 754 insertions(+), 28 deletions(-) diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index 40adb3a4f549..c08b309ad23f 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -1159,7 +1159,8 @@ def test_export_entities(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ExportEntitiesRequest() + request = datastore_admin.ExportEntitiesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1181,6 +1182,54 @@ def test_export_entities_empty_call(): assert args[0] == datastore_admin.ExportEntitiesRequest() +def test_export_entities_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore_admin.ExportEntitiesRequest( + project_id="project_id_value", + output_url_prefix="output_url_prefix_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_entities), "__call__") as call: + client.export_entities(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.ExportEntitiesRequest( + project_id="project_id_value", + output_url_prefix="output_url_prefix_value", + ) + + +@pytest.mark.asyncio +async def test_export_entities_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_entities), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.export_entities() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.ExportEntitiesRequest() + + @pytest.mark.asyncio async def test_export_entities_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ExportEntitiesRequest @@ -1205,7 +1254,8 @@ async def test_export_entities_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ExportEntitiesRequest() + request = datastore_admin.ExportEntitiesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1415,7 +1465,8 @@ def test_import_entities(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ImportEntitiesRequest() + request = datastore_admin.ImportEntitiesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1437,6 +1488,54 @@ def test_import_entities_empty_call(): assert args[0] == datastore_admin.ImportEntitiesRequest() +def test_import_entities_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore_admin.ImportEntitiesRequest( + project_id="project_id_value", + input_url="input_url_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_entities), "__call__") as call: + client.import_entities(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.ImportEntitiesRequest( + project_id="project_id_value", + input_url="input_url_value", + ) + + +@pytest.mark.asyncio +async def test_import_entities_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_entities), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.import_entities() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.ImportEntitiesRequest() + + @pytest.mark.asyncio async def test_import_entities_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ImportEntitiesRequest @@ -1461,7 +1560,8 @@ async def test_import_entities_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ImportEntitiesRequest() + request = datastore_admin.ImportEntitiesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1671,7 +1771,8 @@ def test_create_index(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.CreateIndexRequest() + request = datastore_admin.CreateIndexRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1693,6 +1794,52 @@ def test_create_index_empty_call(): assert args[0] == datastore_admin.CreateIndexRequest() +def test_create_index_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore_admin.CreateIndexRequest( + project_id="project_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_index), "__call__") as call: + client.create_index(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.CreateIndexRequest( + project_id="project_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_index_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_index), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_index() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.CreateIndexRequest() + + @pytest.mark.asyncio async def test_create_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.CreateIndexRequest @@ -1717,7 +1864,8 @@ async def test_create_index_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.CreateIndexRequest() + request = datastore_admin.CreateIndexRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1815,7 +1963,8 @@ def test_delete_index(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.DeleteIndexRequest() + request = datastore_admin.DeleteIndexRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1837,6 +1986,54 @@ def test_delete_index_empty_call(): assert args[0] == datastore_admin.DeleteIndexRequest() +def test_delete_index_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore_admin.DeleteIndexRequest( + project_id="project_id_value", + index_id="index_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + client.delete_index(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.DeleteIndexRequest( + project_id="project_id_value", + index_id="index_id_value", + ) + + +@pytest.mark.asyncio +async def test_delete_index_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_index() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.DeleteIndexRequest() + + @pytest.mark.asyncio async def test_delete_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.DeleteIndexRequest @@ -1861,7 +2058,8 @@ async def test_delete_index_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.DeleteIndexRequest() + request = datastore_admin.DeleteIndexRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1967,7 +2165,8 @@ def test_get_index(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.GetIndexRequest() + request = datastore_admin.GetIndexRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, index.Index) @@ -1994,6 +2193,60 @@ def test_get_index_empty_call(): assert args[0] == datastore_admin.GetIndexRequest() +def test_get_index_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore_admin.GetIndexRequest( + project_id="project_id_value", + index_id="index_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_index), "__call__") as call: + client.get_index(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.GetIndexRequest( + project_id="project_id_value", + index_id="index_id_value", + ) + + +@pytest.mark.asyncio +async def test_get_index_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_index), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + index.Index( + project_id="project_id_value", + index_id="index_id_value", + kind="kind_value", + ancestor=index.Index.AncestorMode.NONE, + state=index.Index.State.CREATING, + ) + ) + response = await client.get_index() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.GetIndexRequest() + + @pytest.mark.asyncio async def test_get_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.GetIndexRequest @@ -2024,7 +2277,8 @@ async def test_get_index_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.GetIndexRequest() + request = datastore_admin.GetIndexRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, index.Index) @@ -2129,7 +2383,8 @@ def test_list_indexes(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ListIndexesRequest() + request = datastore_admin.ListIndexesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListIndexesPager) @@ -2152,6 +2407,58 @@ def test_list_indexes_empty_call(): assert args[0] == datastore_admin.ListIndexesRequest() +def test_list_indexes_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore_admin.ListIndexesRequest( + project_id="project_id_value", + filter="filter_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: + client.list_indexes(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.ListIndexesRequest( + project_id="project_id_value", + filter="filter_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_indexes_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore_admin.ListIndexesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_indexes() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore_admin.ListIndexesRequest() + + @pytest.mark.asyncio async def test_list_indexes_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ListIndexesRequest @@ -2178,7 +2485,8 @@ async def test_list_indexes_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ListIndexesRequest() + request = datastore_admin.ListIndexesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListIndexesAsyncPager) diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 8d427d56e118..203d9c3af7f3 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -1113,7 +1113,8 @@ def test_lookup(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.LookupRequest() + request = datastore.LookupRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.LookupResponse) @@ -1136,6 +1137,56 @@ def test_lookup_empty_call(): assert args[0] == datastore.LookupRequest() +def test_lookup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore.LookupRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lookup), "__call__") as call: + client.lookup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.LookupRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_lookup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lookup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.LookupResponse( + transaction=b"transaction_blob", + ) + ) + response = await client.lookup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.LookupRequest() + + @pytest.mark.asyncio async def test_lookup_async( transport: str = "grpc_asyncio", request_type=datastore.LookupRequest @@ -1162,7 +1213,8 @@ async def test_lookup_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.LookupRequest() + request = datastore.LookupRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.LookupResponse) @@ -1377,7 +1429,8 @@ def test_run_query(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunQueryRequest() + request = datastore.RunQueryRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.RunQueryResponse) @@ -1400,6 +1453,56 @@ def test_run_query_empty_call(): assert args[0] == datastore.RunQueryRequest() +def test_run_query_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore.RunQueryRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + client.run_query(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.RunQueryRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_run_query_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RunQueryResponse( + transaction=b"transaction_blob", + ) + ) + response = await client.run_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.RunQueryRequest() + + @pytest.mark.asyncio async def test_run_query_async( transport: str = "grpc_asyncio", request_type=datastore.RunQueryRequest @@ -1426,7 +1529,8 @@ async def test_run_query_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunQueryRequest() + request = datastore.RunQueryRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.RunQueryResponse) @@ -1509,7 +1613,8 @@ def test_run_aggregation_query(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunAggregationQueryRequest() + request = datastore.RunAggregationQueryRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.RunAggregationQueryResponse) @@ -1534,6 +1639,60 @@ def test_run_aggregation_query_empty_call(): assert args[0] == datastore.RunAggregationQueryRequest() +def test_run_aggregation_query_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore.RunAggregationQueryRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + client.run_aggregation_query(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.RunAggregationQueryRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_run_aggregation_query_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RunAggregationQueryResponse( + transaction=b"transaction_blob", + ) + ) + response = await client.run_aggregation_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.RunAggregationQueryRequest() + + @pytest.mark.asyncio async def test_run_aggregation_query_async( transport: str = "grpc_asyncio", request_type=datastore.RunAggregationQueryRequest @@ -1562,7 +1721,8 @@ async def test_run_aggregation_query_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunAggregationQueryRequest() + request = datastore.RunAggregationQueryRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.RunAggregationQueryResponse) @@ -1649,7 +1809,8 @@ def test_begin_transaction(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.BeginTransactionRequest() + request = datastore.BeginTransactionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.BeginTransactionResponse) @@ -1674,6 +1835,60 @@ def test_begin_transaction_empty_call(): assert args[0] == datastore.BeginTransactionRequest() +def test_begin_transaction_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore.BeginTransactionRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + client.begin_transaction(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.BeginTransactionRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_begin_transaction_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.BeginTransactionResponse( + transaction=b"transaction_blob", + ) + ) + response = await client.begin_transaction() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.BeginTransactionRequest() + + @pytest.mark.asyncio async def test_begin_transaction_async( transport: str = "grpc_asyncio", request_type=datastore.BeginTransactionRequest @@ -1702,7 +1917,8 @@ async def test_begin_transaction_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.BeginTransactionRequest() + request = datastore.BeginTransactionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.BeginTransactionResponse) @@ -1873,7 +2089,8 @@ def test_commit(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.CommitRequest() + request = datastore.CommitRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.CommitResponse) @@ -1896,6 +2113,56 @@ def test_commit_empty_call(): assert args[0] == datastore.CommitRequest() +def test_commit_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore.CommitRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + client.commit(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.CommitRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_commit_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.CommitResponse( + index_updates=1389, + ) + ) + response = await client.commit() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.CommitRequest() + + @pytest.mark.asyncio async def test_commit_async( transport: str = "grpc_asyncio", request_type=datastore.CommitRequest @@ -1922,7 +2189,8 @@ async def test_commit_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.CommitRequest() + request = datastore.CommitRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.CommitResponse) @@ -2165,7 +2433,8 @@ def test_rollback(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RollbackRequest() + request = datastore.RollbackRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.RollbackResponse) @@ -2187,6 +2456,54 @@ def test_rollback_empty_call(): assert args[0] == datastore.RollbackRequest() +def test_rollback_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore.RollbackRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + client.rollback(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.RollbackRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_rollback_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RollbackResponse() + ) + response = await client.rollback() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.RollbackRequest() + + @pytest.mark.asyncio async def test_rollback_async( transport: str = "grpc_asyncio", request_type=datastore.RollbackRequest @@ -2211,7 +2528,8 @@ async def test_rollback_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RollbackRequest() + request = datastore.RollbackRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.RollbackResponse) @@ -2381,7 +2699,8 @@ def test_allocate_ids(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.AllocateIdsRequest() + request = datastore.AllocateIdsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.AllocateIdsResponse) @@ -2403,6 +2722,54 @@ def test_allocate_ids_empty_call(): assert args[0] == datastore.AllocateIdsRequest() +def test_allocate_ids_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore.AllocateIdsRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + client.allocate_ids(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.AllocateIdsRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_allocate_ids_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.AllocateIdsResponse() + ) + response = await client.allocate_ids() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.AllocateIdsRequest() + + @pytest.mark.asyncio async def test_allocate_ids_async( transport: str = "grpc_asyncio", request_type=datastore.AllocateIdsRequest @@ -2427,7 +2794,8 @@ async def test_allocate_ids_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.AllocateIdsRequest() + request = datastore.AllocateIdsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.AllocateIdsResponse) @@ -2617,7 +2985,8 @@ def test_reserve_ids(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datastore.ReserveIdsRequest() + request = datastore.ReserveIdsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.ReserveIdsResponse) @@ -2639,6 +3008,54 @@ def test_reserve_ids_empty_call(): assert args[0] == datastore.ReserveIdsRequest() +def test_reserve_ids_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datastore.ReserveIdsRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + client.reserve_ids(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.ReserveIdsRequest( + project_id="project_id_value", + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_reserve_ids_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.ReserveIdsResponse() + ) + response = await client.reserve_ids() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datastore.ReserveIdsRequest() + + @pytest.mark.asyncio async def test_reserve_ids_async( transport: str = "grpc_asyncio", request_type=datastore.ReserveIdsRequest @@ -2663,7 +3080,8 @@ async def test_reserve_ids_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datastore.ReserveIdsRequest() + request = datastore.ReserveIdsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, datastore.ReserveIdsResponse) From 1bf801a008c4d24475fd8e9bd1b33226f6be8c6b Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 4 Apr 2024 19:42:28 +0000 Subject: [PATCH 573/611] feat: add new_transaction support (#499) --- .../google/cloud/datastore/aggregation.py | 10 +- .../google/cloud/datastore/batch.py | 27 +++- .../google/cloud/datastore/client.py | 26 +++- .../google/cloud/datastore/helpers.py | 72 ++++++--- .../google/cloud/datastore/query.py | 11 +- .../google/cloud/datastore/transaction.py | 60 +++++++- .../tests/system/test_transaction.py | 51 +++++++ .../tests/unit/test_aggregation.py | 55 ++++++- .../tests/unit/test_client.py | 51 ++++++- .../tests/unit/test_helpers.py | 85 +++++++++++ .../tests/unit/test_query.py | 47 +++++- .../tests/unit/test_transaction.py | 144 ++++++++++++++++++ 12 files changed, 580 insertions(+), 59 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py b/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py index 47ebfebd09e3..1384f33206b9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py @@ -442,13 +442,11 @@ def _next_page(self): return None query_pb = self._build_protobuf() - transaction = self.client.current_transaction - if transaction is None: - transaction_id = None - else: - transaction_id = transaction.id + transaction_id, new_transaction_options = helpers.get_transaction_options( + self.client.current_transaction + ) read_options = helpers.get_read_options( - self._eventual, transaction_id, self._read_time + self._eventual, transaction_id, self._read_time, new_transaction_options ) partition_id = entity_pb2.PartitionId( diff --git a/packages/google-cloud-datastore/google/cloud/datastore/batch.py b/packages/google-cloud-datastore/google/cloud/datastore/batch.py index e0dbf26dc5ff..69100bc6350e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/batch.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/batch.py @@ -192,6 +192,19 @@ def mutations(self): """ return self._mutations + def _allow_mutations(self) -> bool: + """ + This method is called to see if the batch is in a proper state to allow + `put` and `delete` operations. + + the Transaction subclass overrides this method to support + the `begin_later` flag. + + :rtype: bool + :returns: True if the batch is in a state to allow mutations. + """ + return self._status == self._IN_PROGRESS + def put(self, entity): """Remember an entity's state to be saved during :meth:`commit`. @@ -218,7 +231,7 @@ def put(self, entity): progress, if entity has no key assigned, or if the key's ``project`` does not match ours. """ - if self._status != self._IN_PROGRESS: + if not self._allow_mutations(): raise ValueError("Batch must be in progress to put()") if entity.key is None: @@ -248,7 +261,7 @@ def delete(self, key): progress, if key is not complete, or if the key's ``project`` does not match ours. """ - if self._status != self._IN_PROGRESS: + if not self._allow_mutations(): raise ValueError("Batch must be in progress to delete()") if key.is_partial: @@ -370,10 +383,12 @@ def __enter__(self): def __exit__(self, exc_type, exc_val, exc_tb): try: - if exc_type is None: - self.commit() - else: - self.rollback() + # commit or rollback if not in terminal state + if self._status not in (self._ABORTED, self._FINISHED): + if exc_type is None: + self.commit() + else: + self.rollback() finally: self._client._pop_batch() diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index 3f5041d622f5..b1e79d91a818 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -122,7 +122,7 @@ def _extended_lookup( missing=None, deferred=None, eventual=False, - transaction_id=None, + transaction=None, retry=None, timeout=None, read_time=None, @@ -158,10 +158,10 @@ def _extended_lookup( consistency. If True, request ``EVENTUAL`` read consistency. - :type transaction_id: str - :param transaction_id: If passed, make the request in the scope of - the given transaction. Incompatible with - ``eventual==True`` or ``read_time``. + :type transaction: Transaction + :param transaction: If passed, make the request in the scope of + the given transaction. Incompatible with + ``eventual==True`` or ``read_time``. :type retry: :class:`google.api_core.retry.Retry` :param retry: @@ -177,7 +177,7 @@ def _extended_lookup( :type read_time: datetime :param read_time: (Optional) Read time to use for read consistency. Incompatible with - ``eventual==True`` or ``transaction_id``. + ``eventual==True`` or ``transaction``. This feature is in private preview. :type database: str @@ -199,8 +199,14 @@ def _extended_lookup( results = [] + transaction_id = None + transaction_id, new_transaction_options = helpers.get_transaction_options( + transaction + ) + read_options = helpers.get_read_options( + eventual, transaction_id, read_time, new_transaction_options + ) loop_num = 0 - read_options = helpers.get_read_options(eventual, transaction_id, read_time) while loop_num < _MAX_LOOPS: # loop against possible deferred. loop_num += 1 request = { @@ -214,6 +220,10 @@ def _extended_lookup( **kwargs, ) + # set new transaction id if we just started a transaction + if transaction and lookup_response.transaction: + transaction._begin_with_id(lookup_response.transaction) + # Accumulate the new results. results.extend(result.entity for result in lookup_response.found) @@ -570,7 +580,7 @@ def get_multi( eventual=eventual, missing=missing, deferred=deferred, - transaction_id=transaction and transaction.id, + transaction=transaction, retry=retry, timeout=timeout, read_time=read_time, diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index e889488337b1..6eaa3b89e37d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -230,7 +230,9 @@ def entity_to_protobuf(entity): return entity_pb -def get_read_options(eventual, transaction_id, read_time=None): +def get_read_options( + eventual, transaction_id, read_time=None, new_transaction_options=None +): """Validate rules for read options, and assign to the request. Helper method for ``lookup()`` and ``run_query``. @@ -245,33 +247,55 @@ def get_read_options(eventual, transaction_id, read_time=None): :type read_time: datetime :param read_time: Read data from the specified time (may be null). This feature is in private preview. + :type new_transaction_options: :class:`google.cloud.datastore_v1.types.TransactionOptions` + :param new_transaction_options: Options for a new transaction. + :rtype: :class:`.datastore_pb2.ReadOptions` :returns: The read options corresponding to the inputs. :raises: :class:`ValueError` if more than one of ``eventual==True``, - ``transaction``, and ``read_time`` is specified. + ``transaction_id``, ``read_time``, and ``new_transaction_options`` is specified. """ - if transaction_id is None: - if eventual: - if read_time is not None: - raise ValueError("eventual must be False when read_time is specified") - else: - return datastore_pb2.ReadOptions( - read_consistency=datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL - ) - else: - if read_time is None: - return datastore_pb2.ReadOptions() - else: - read_time_pb = timestamp_pb2.Timestamp() - read_time_pb.FromDatetime(read_time) - return datastore_pb2.ReadOptions(read_time=read_time_pb) - else: - if eventual: - raise ValueError("eventual must be False when in a transaction") - elif read_time is not None: - raise ValueError("transaction and read_time are mutual exclusive") - else: - return datastore_pb2.ReadOptions(transaction=transaction_id) + is_set = [ + bool(x) for x in (eventual, transaction_id, read_time, new_transaction_options) + ] + if sum(is_set) > 1: + raise ValueError( + "At most one of eventual, transaction, or read_time is allowed." + ) + new_options = datastore_pb2.ReadOptions() + if transaction_id is not None: + new_options.transaction = transaction_id + if read_time is not None: + read_time_pb = timestamp_pb2.Timestamp() + read_time_pb.FromDatetime(read_time) + new_options.read_time = read_time_pb + if new_transaction_options is not None: + new_options.new_transaction = new_transaction_options + if eventual: + new_options.read_consistency = ( + datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL + ) + return new_options + + +def get_transaction_options(transaction): + """ + Get the transaction_id or new_transaction_options field from an active transaction object, + for use in get_read_options + + These are mutually-exclusive fields, so one or both will be None. + + :rtype: Tuple[Optional[bytes], Optional[google.cloud.datastore_v1.types.TransactionOptions]] + :returns: The transaction_id and new_transaction_options fields from the transaction object. + """ + transaction_id, new_transaction_options = None, None + if transaction is not None: + if transaction.id is not None: + transaction_id = transaction.id + elif transaction._begin_later and transaction._status == transaction._INITIAL: + # If the transaction has not yet been begun, we can use the new_transaction_options field. + new_transaction_options = transaction._options + return transaction_id, new_transaction_options def key_from_protobuf(pb): diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 57c0702c2627..72d6fe51b0d3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -778,13 +778,12 @@ def _next_page(self): return None query_pb = self._build_protobuf() - transaction = self.client.current_transaction - if transaction is None: - transaction_id = None - else: - transaction_id = transaction.id + new_transaction_options = None + transaction_id, new_transaction_options = helpers.get_transaction_options( + self.client.current_transaction + ) read_options = helpers.get_read_options( - self._eventual, transaction_id, self._read_time + self._eventual, transaction_id, self._read_time, new_transaction_options ) partition_id = entity_pb2.PartitionId( diff --git a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py index 3e71ae269754..52c17ce29be3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/transaction.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/transaction.py @@ -13,7 +13,6 @@ # limitations under the License. """Create / interact with Google Cloud Datastore transactions.""" - from google.cloud.datastore.batch import Batch from google.cloud.datastore_v1.types import TransactionOptions from google.protobuf import timestamp_pb2 @@ -149,15 +148,23 @@ class Transaction(Batch): :param read_time: (Optional) Time at which the transaction reads entities. Only allowed when ``read_only=True``. This feature is in private preview. + :type begin_later: bool + :param begin_later: (Optional) If True, the transaction will be started + lazily (i.e. when the first RPC is made). If False, + the transaction will be started as soon as the context manager + is entered. `self.begin()` can also be called manually to begin + the transaction at any time. Default is False. + :raises: :class:`ValueError` if read_time is specified when ``read_only=False``. """ _status = None - def __init__(self, client, read_only=False, read_time=None): + def __init__(self, client, read_only=False, read_time=None, begin_later=False): super(Transaction, self).__init__(client) self._id = None + self._begin_later = begin_later if read_only: if read_time is not None: @@ -180,8 +187,8 @@ def __init__(self, client, read_only=False, read_time=None): def id(self): """Getter for the transaction ID. - :rtype: str - :returns: The ID of the current transaction. + :rtype: bytes or None + :returns: The ID of the current transaction, or None if not started. """ return self._id @@ -240,6 +247,21 @@ def begin(self, retry=None, timeout=None): self._status = self._ABORTED raise + def _begin_with_id(self, transaction_id): + """ + Attach newly created transaction to an existing transaction ID. + + This is used when begin_later is True, when the first lookup request + associated with this transaction creates a new transaction ID. + + :type transaction_id: bytes + :param transaction_id: ID of the transaction to attach to. + """ + if self._status is not self._INITIAL: + raise ValueError("Transaction already begun.") + self._id = transaction_id + self._status = self._IN_PROGRESS + def rollback(self, retry=None, timeout=None): """Rolls back the current transaction. @@ -258,6 +280,12 @@ def rollback(self, retry=None, timeout=None): Note that if ``retry`` is specified, the timeout applies to each individual attempt. """ + # if transaction has not started, abort it + if self._status == self._INITIAL: + self._status = self._ABORTED + self._id = None + return None + kwargs = _make_retry_timeout_kwargs(retry, timeout) try: @@ -296,6 +324,15 @@ def commit(self, retry=None, timeout=None): Note that if ``retry`` is specified, the timeout applies to each individual attempt. """ + # if transaction has not begun, either begin now, or abort if empty + if self._status == self._INITIAL: + if not self._mutations: + self._status = self._ABORTED + self._id = None + return None + else: + self.begin() + kwargs = _make_retry_timeout_kwargs(retry, timeout) try: @@ -321,3 +358,18 @@ def put(self, entity): raise RuntimeError("Transaction is read only") else: super(Transaction, self).put(entity) + + def __enter__(self): + if not self._begin_later: + self.begin() + self._client._push_batch(self) + return self + + def _allow_mutations(self): + """ + Mutations can be added to a transaction if it is in IN_PROGRESS state, + or if it is in INITIAL state and the begin_later flag is set. + """ + return self._status == self._IN_PROGRESS or ( + self._begin_later and self._status == self._INITIAL + ) diff --git a/packages/google-cloud-datastore/tests/system/test_transaction.py b/packages/google-cloud-datastore/tests/system/test_transaction.py index 6dc9dacd6691..2f7a6897f786 100644 --- a/packages/google-cloud-datastore/tests/system/test_transaction.py +++ b/packages/google-cloud-datastore/tests/system/test_transaction.py @@ -41,6 +41,57 @@ def test_transaction_via_with_statement( assert retrieved_entity == entity +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +@pytest.mark.parametrize("first_call", ["get", "put", "delete"]) +def test_transaction_begin_later( + datastore_client, entities_to_delete, database_id, first_call +): + """ + transactions with begin_later should call begin on first get rpc, or on commit + """ + key = datastore_client.key("Company", "Google") + entity = datastore.Entity(key=key) + entity["url"] = "www.google.com" + + datastore_client.put(entity) + result_entity = datastore_client.get(key) + + with datastore_client.transaction(begin_later=True) as xact: + assert xact._id is None + assert xact._status == xact._INITIAL + if first_call == "get": + datastore_client.get(entity.key) + assert xact._status == xact._IN_PROGRESS + assert xact._id is not None + elif first_call == "put": + xact.put(entity) + assert xact._status == xact._INITIAL + elif first_call == "delete": + xact.delete(result_entity.key) + assert xact._status == xact._INITIAL + assert xact._status == xact._FINISHED + + entities_to_delete.append(result_entity) + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +@pytest.mark.parametrize("raise_exception", [True, False]) +def test_transaction_begin_later_noop(datastore_client, database_id, raise_exception): + """ + empty begin later transactions should terminate quietly + """ + try: + with datastore_client.transaction(begin_later=True) as xact: + assert xact._id is None + assert xact._status == xact._INITIAL + if raise_exception: + raise RuntimeError("test") + except RuntimeError: + pass + assert xact._status == xact._ABORTED + assert xact._id is None + + @pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) def test_transaction_via_explicit_begin_get_commit( datastore_client, entities_to_delete, database_id diff --git a/packages/google-cloud-datastore/tests/unit/test_aggregation.py b/packages/google-cloud-datastore/tests/unit/test_aggregation.py index 15d11acaeb45..8284b8086ccb 100644 --- a/packages/google-cloud-datastore/tests/unit/test_aggregation.py +++ b/packages/google-cloud-datastore/tests/unit/test_aggregation.py @@ -471,7 +471,9 @@ def _next_page_helper(txn_id=None, retry=None, timeout=None, database_id=None): if txn_id is None: client = _Client(project, datastore_api=ds_api, database=database_id) else: - transaction = mock.Mock(id=txn_id, spec=["id"]) + transaction = mock.Mock( + id=txn_id, _begin_later=False, spec=["id", "_begin_later"] + ) client = _Client( project, datastore_api=ds_api, transaction=transaction, database=database_id ) @@ -612,6 +614,57 @@ def test_transaction_id_populated(database_id, aggregation_type, aggregation_arg assert read_options.transaction == client.current_transaction.id +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +@pytest.mark.parametrize( + "aggregation_type,aggregation_args", + [ + ("count", ()), + ( + "sum", + ("appearances",), + ), + ("avg", ("appearances",)), + ], +) +def test_transaction_begin_later(database_id, aggregation_type, aggregation_args): + """ + When an aggregation is run in the context of a transaction with begin_later=True, + the new_transaction field should be populated in the request read_options. + """ + import mock + from google.cloud.datastore_v1.types import TransactionOptions + + # make a fake begin_later transaction + transaction = mock.Mock() + transaction.id = None + transaction._begin_later = True + transaction._status = transaction._INITIAL + transaction._options = TransactionOptions(read_only=TransactionOptions.ReadOnly()) + mock_datastore_api = mock.Mock() + mock_gapic = mock_datastore_api.run_aggregation_query + mock_gapic.return_value = _make_aggregation_query_response([]) + client = _Client( + None, + datastore_api=mock_datastore_api, + database=database_id, + transaction=transaction, + ) + + query = _make_query(client) + aggregation_query = _make_aggregation_query(client=client, query=query) + + # initiate requested aggregation (ex count, sum, avg) + getattr(aggregation_query, aggregation_type)(*aggregation_args) + # run mock query + list(aggregation_query.fetch()) + assert mock_gapic.call_count == 1 + request = mock_gapic.call_args[1]["request"] + read_options = request["read_options"] + # ensure new_transaction is populated + assert not read_options.transaction + assert read_options.new_transaction == transaction._options + + class _Client(object): def __init__( self, diff --git a/packages/google-cloud-datastore/tests/unit/test_client.py b/packages/google-cloud-datastore/tests/unit/test_client.py index 412f3923e145..2b5c01f42f1e 100644 --- a/packages/google-cloud-datastore/tests/unit/test_client.py +++ b/packages/google-cloud-datastore/tests/unit/test_client.py @@ -705,6 +705,52 @@ def test_client_get_multi_hit_w_transaction(database_id): ds_api.lookup.assert_called_once_with(request=expected_request) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_client_get_multi_hit_w_transaction_begin_later(database_id): + """ + Transactions with begin_later set should begin on first read + """ + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + from google.cloud.datastore.key import Key + + kind = "Kind" + id_ = 1234 + expected_server_id = b"123" + + # Make a found entity pb to be returned from mock backend. + entity_pb = _make_entity_pb(PROJECT, kind, id_, "foo", "Foo", database=database_id) + + # Make a connection to return the entity pb. + creds = _make_credentials() + client = _make_client(credentials=creds, database=database_id) + lookup_response = _make_lookup_response( + results=[entity_pb], transaction=expected_server_id + ) + ds_api = _make_datastore_api(lookup_response=lookup_response) + client._datastore_api_internal = ds_api + + key = Key(kind, id_, project=PROJECT, database=database_id) + txn = client.transaction(begin_later=True) + assert txn._id is None + assert txn._status == txn._INITIAL + client.get_multi([key], transaction=txn) + + # transaction should now be started + assert txn._id == expected_server_id + assert txn._id is not None + assert txn._status == txn._IN_PROGRESS + + # check rpc args + expected_read_options = datastore_pb2.ReadOptions(new_transaction=txn._options) + expected_request = { + "project_id": PROJECT, + "keys": [key.to_protobuf()], + "read_options": expected_read_options, + } + set_database_id_to_request(expected_request, database_id) + ds_api.lookup.assert_called_once_with(request=expected_request) + + @pytest.mark.parametrize("database_id", [None, "somedb"]) def test_client_get_multi_hit_w_read_time(database_id): from datetime import datetime @@ -1847,7 +1893,7 @@ def _make_commit_response(*keys): return datastore_pb2.CommitResponse(mutation_results=mutation_results) -def _make_lookup_response(results=(), missing=(), deferred=()): +def _make_lookup_response(results=(), missing=(), deferred=(), transaction=None): entity_results_found = [ mock.Mock(entity=result, spec=["entity"]) for result in results ] @@ -1858,7 +1904,8 @@ def _make_lookup_response(results=(), missing=(), deferred=()): found=entity_results_found, missing=entity_results_missing, deferred=deferred, - spec=["found", "missing", "deferred"], + transaction=transaction, + spec=["found", "missing", "deferred", "transaction"], ) diff --git a/packages/google-cloud-datastore/tests/unit/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py index 89bf61657ed2..38702dbad21b 100644 --- a/packages/google-cloud-datastore/tests/unit/test_helpers.py +++ b/packages/google-cloud-datastore/tests/unit/test_helpers.py @@ -586,6 +586,91 @@ def test__get_read_options_w_default_wo_txn_w_read_time(): assert read_options == expected +def test__get_read_options_w_new_transaction(): + from google.cloud.datastore.helpers import get_read_options + from google.cloud.datastore_v1.types import datastore as datastore_pb2 + + input_options = datastore_pb2.TransactionOptions() + read_options = get_read_options(False, None, new_transaction_options=input_options) + expected = datastore_pb2.ReadOptions(new_transaction=input_options) + assert read_options == expected + + +@pytest.mark.parametrize( + "args", + [ + (True, "id"), + (True, "id", None), + (True, None, "read_time"), + (True, None, None, "new"), + (False, "id", "read_time"), + (False, "id", None, "new"), + (False, None, "read_time", "new"), + ], +) +def test__get_read_options_w_multiple_args(args): + """ + arguments are mutually exclusive. + Should raise ValueError if multiple are set + """ + from google.cloud.datastore.helpers import get_read_options + + with pytest.raises(ValueError): + get_read_options(*args) + + +def test__get_transaction_options_none(): + """ + test with empty transaction input + """ + from google.cloud.datastore.helpers import get_transaction_options + + t_id, new_t = get_transaction_options(None) + assert t_id is None + assert new_t is None + + +def test__get_transaction_options_w_id(): + """ + test with transaction with id set + """ + from google.cloud.datastore.helpers import get_transaction_options + from google.cloud.datastore import Transaction + + expected_id = b"123abc" + txn = Transaction(None, begin_later=True) + txn._id = expected_id + t_id, new_t = get_transaction_options(txn) + assert t_id == expected_id + assert new_t is None + + +def test__get_transaction_options_w_begin_later(): + """ + if begin later is set and it hasn't begun, should return new_transaction_options + """ + from google.cloud.datastore.helpers import get_transaction_options + from google.cloud.datastore import Transaction + + txn = Transaction(None, begin_later=True) + t_id, new_t = get_transaction_options(txn) + assert t_id is None + assert new_t is txn._options + + +def test__get_transaction_options_not_started(): + """ + If the transaction is noet set as begin_later, but it hasn't begun, return None for both + """ + from google.cloud.datastore.helpers import get_transaction_options + from google.cloud.datastore import Transaction + + txn = Transaction(None, begin_later=False) + t_id, new_t = get_transaction_options(txn) + assert t_id is None + assert new_t is None + + def test__pb_attr_value_w_datetime_naive(): import calendar import datetime diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 84c0bedf5010..6c2063bbe244 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -667,7 +667,7 @@ def test_eventual_transaction_fails(database_id): @pytest.mark.parametrize("database_id", [None, "somedb"]) def test_transaction_id_populated(database_id): """ - When an aggregation is run in the context of a transaction, the transaction + When an query is run in the context of a transaction, the transaction ID should be populated in the request. """ import mock @@ -698,6 +698,47 @@ def test_transaction_id_populated(database_id): assert read_options.transaction == client.current_transaction.id +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_query_transaction_begin_later(database_id): + """ + When an aggregation is run in the context of a transaction with begin_later=True, + the new_transaction field should be populated in the request read_options. + """ + import mock + from google.cloud.datastore_v1.types import TransactionOptions + + # make a fake begin_later transaction + transaction = mock.Mock() + transaction.id = None + transaction._begin_later = True + transaction._status = transaction._INITIAL + transaction._options = TransactionOptions(read_only=TransactionOptions.ReadOnly()) + + mock_datastore_api = mock.Mock() + mock_gapic = mock_datastore_api.run_query + + more_results_enum = 3 # NO_MORE_RESULTS + response_pb = _make_query_response([], b"", more_results_enum, 0) + mock_gapic.return_value = response_pb + + client = _Client( + None, + datastore_api=mock_datastore_api, + database=database_id, + transaction=transaction, + ) + + query = _make_query(client) + # run mock query + list(query.fetch()) + assert mock_gapic.call_count == 1 + request = mock_gapic.call_args[1]["request"] + read_options = request["read_options"] + # ensure new_transaction is populated + assert not read_options.transaction + assert read_options.new_transaction == transaction._options + + def test_iterator_constructor_defaults(): query = object() client = object() @@ -885,7 +926,9 @@ def _next_page_helper( if txn_id is None: client = _Client(project, database=database, datastore_api=ds_api) else: - transaction = mock.Mock(id=txn_id, spec=["id"]) + transaction = mock.Mock( + id=txn_id, _begin_later=False, spec=["id", "_begin_later"] + ) client = _Client( project, database=database, datastore_api=ds_api, transaction=transaction ) diff --git a/packages/google-cloud-datastore/tests/unit/test_transaction.py b/packages/google-cloud-datastore/tests/unit/test_transaction.py index 23574ef4cabd..cee384bba76e 100644 --- a/packages/google-cloud-datastore/tests/unit/test_transaction.py +++ b/packages/google-cloud-datastore/tests/unit/test_transaction.py @@ -81,6 +81,27 @@ def test_transaction_constructor_read_write_w_read_time(database_id): _make_transaction(client, read_only=False, read_time=read_time) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_constructor_begin_later(database_id): + from google.cloud.datastore.transaction import Transaction + + project = "PROJECT" + client = _Client(project, database=database_id) + expected_id = b"1234" + + xact = _make_transaction(client, begin_later=True) + assert xact._status == Transaction._INITIAL + assert xact.id is None + + xact._begin_with_id(expected_id) + assert xact._status == Transaction._IN_PROGRESS + assert xact.id == expected_id + + # calling a second time should raise exeception + with pytest.raises(ValueError): + xact._begin_with_id(expected_id) + + @pytest.mark.parametrize("database_id", [None, "somedb"]) def test_transaction_current(database_id): from google.cloud.datastore_v1.types import datastore as datastore_pb2 @@ -375,6 +396,7 @@ def test_transaction_context_manager_no_raise(database_id): xact = _make_transaction(client) with xact: + assert xact._status == xact._IN_PROGRESS # only set between begin / commit assert xact.id == id_ @@ -427,6 +449,34 @@ class Foo(Exception): client._datastore_api.rollback.assert_called_once_with(request=expected_request) +@pytest.mark.parametrize("with_exception", [False, True]) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_context_manager_w_begin_later(database_id, with_exception): + """ + If begin_later is set, don't begin transaction when entering context manager + """ + project = "PROJECT" + id_ = 912830 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api, database=database_id) + xact = _make_transaction(client, begin_later=True) + + try: + with xact: + assert xact._status == xact._INITIAL + assert xact.id is None + if with_exception: + raise RuntimeError("expected") + except RuntimeError: + pass + # should be finalized after context manager block + assert xact._status == xact._ABORTED + assert xact.id is None + # no need to call commit or rollback + assert ds_api.commit.call_count == 0 + assert ds_api.rollback.call_count == 0 + + @pytest.mark.parametrize("database_id", [None, "somedb"]) def test_transaction_put_read_only(database_id): project = "PROJECT" @@ -441,6 +491,100 @@ def test_transaction_put_read_only(database_id): xact.put(entity) +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_put_w_begin_later(database_id): + """ + If begin_later is set, should be able to call put without begin first + """ + project = "PROJECT" + id_ = 943243 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api, database=database_id) + entity = _Entity(database=database_id) + with _make_transaction(client, begin_later=True) as xact: + assert xact._status == xact._INITIAL + assert len(xact.mutations) == 0 + xact.put(entity) + assert len(xact.mutations) == 1 + # should still be in initial state + assert xact._status == xact._INITIAL + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_delete_w_begin_later(database_id): + """ + If begin_later is set, should be able to call delete without begin first + """ + project = "PROJECT" + id_ = 943243 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api, database=database_id) + entity = _Entity(database=database_id) + with _make_transaction(client, begin_later=True) as xact: + assert xact._status == xact._INITIAL + assert len(xact.mutations) == 0 + xact.delete(entity.key.completed_key("name")) + assert len(xact.mutations) == 1 + # should still be in initial state + assert xact._status == xact._INITIAL + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_rollback_no_begin(database_id): + """ + If rollback is called without begin, transaciton should abort + """ + project = "PROJECT" + id_ = 943243 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api, database=database_id) + with _make_transaction(client, begin_later=True) as xact: + assert xact._status == xact._INITIAL + with mock.patch.object(xact, "begin") as begin: + xact.rollback() + begin.assert_not_called() + assert xact._status == xact._ABORTED + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_transaction_commit_no_begin(database_id): + """ + If commit is called without begin, and it has mutations staged, + should call begin before commit + """ + project = "PROJECT" + id_ = 943243 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api, database=database_id) + entity = _Entity(database=database_id) + with _make_transaction(client, begin_later=True) as xact: + assert xact._status == xact._INITIAL + xact.put(entity) + assert xact._status == xact._INITIAL + with mock.patch.object(xact, "begin") as begin: + begin.side_effect = lambda: setattr(xact, "_status", xact._IN_PROGRESS) + xact.commit() + begin.assert_called_once_with() + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_empty_transaction_commit(database_id): + """ + If commit is called without begin, and it has no mutations staged, + should abort + """ + project = "PROJECT" + id_ = 943243 + ds_api = _make_datastore_api(xact_id=id_) + client = _Client(project, datastore_api=ds_api, database=database_id) + with _make_transaction(client, begin_later=True) as xact: + assert xact._status == xact._INITIAL + with mock.patch.object(xact, "begin") as begin: + xact.commit() + begin.assert_not_called() + assert xact._status == xact._ABORTED + + def _make_key(kind, id_, project, database=None): from google.cloud.datastore_v1.types import entity as entity_pb2 From 4c79e820833540c6d6c9b87804f81bb6d63adf60 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 12 Apr 2024 17:39:27 -0400 Subject: [PATCH 574/611] chore(python): bump idna from 3.4 to 3.7 in .kokoro (#537) * chore(python): bump idna from 3.4 to 3.7 in .kokoro Source-Link: https://github.com/googleapis/synthtool/commit/d50980e704793a2d3310bfb3664f3a82f24b5796 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 * Apply changes from googleapis/synthtool#1950 --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 ++-- .../.github/auto-label.yaml | 5 ++++ .../.github/blunderbuss.yml | 23 +++++++++++++++++++ .../.kokoro/requirements.txt | 6 ++--- packages/google-cloud-datastore/README.rst | 2 +- .../google-cloud-datastore/docs/index.rst | 5 ++++ .../docs/summary_overview.md | 22 ++++++++++++++++++ 7 files changed, 61 insertions(+), 6 deletions(-) create mode 100644 packages/google-cloud-datastore/.github/blunderbuss.yml create mode 100644 packages/google-cloud-datastore/docs/summary_overview.md diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 4bdeef3904e2..81f87c56917d 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:a8a80fc6456e433df53fc2a0d72ca0345db0ddefb409f1b75b118dfd1babd952 -# created: 2024-03-15T16:25:47.905264637Z + digest: sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 +# created: 2024-04-12T11:35:58.922854369Z diff --git a/packages/google-cloud-datastore/.github/auto-label.yaml b/packages/google-cloud-datastore/.github/auto-label.yaml index b2016d119b40..8b37ee89711f 100644 --- a/packages/google-cloud-datastore/.github/auto-label.yaml +++ b/packages/google-cloud-datastore/.github/auto-label.yaml @@ -13,3 +13,8 @@ # limitations under the License. requestsize: enabled: true + +path: + pullrequest: true + paths: + samples: "samples" diff --git a/packages/google-cloud-datastore/.github/blunderbuss.yml b/packages/google-cloud-datastore/.github/blunderbuss.yml new file mode 100644 index 000000000000..54156a1b55b2 --- /dev/null +++ b/packages/google-cloud-datastore/.github/blunderbuss.yml @@ -0,0 +1,23 @@ +# Blunderbuss config +# +# This file controls who is assigned for pull requests and issues. +# Note: This file is autogenerated. To make changes to the assignee +# team, please update `codeowner_team` in `.repo-metadata.json`. +assign_issues: + - googleapis/cloud-native-db-dpes + - googleapis/api-datastore-sdk + - googleapis/api-firestore-partners + +assign_issues_by: + - labels: + - "samples" + to: + - googleapis/python-samples-reviewers + - googleapis/cloud-native-db-dpes + - googleapis/api-datastore-sdk + - googleapis/api-firestore-partners + +assign_prs: + - googleapis/cloud-native-db-dpes + - googleapis/api-datastore-sdk + - googleapis/api-firestore-partners diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index dd61f5f32018..51f92b8e12f1 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -252,9 +252,9 @@ googleapis-common-protos==1.61.0 \ --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b # via google-api-core -idna==3.4 \ - --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ - --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 +idna==3.7 \ + --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ + --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests importlib-metadata==6.8.0 \ --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index 0d5b7f269581..c5f944dc9b40 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -19,7 +19,7 @@ consistency for all other queries. .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-datastore.svg :target: https://pypi.org/project/google-cloud-datastore/ .. _Google Cloud Datastore API: https://cloud.google.com/datastore -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/datastore/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/datastore/latest/summary_overview .. _Product Documentation: https://cloud.google.com/datastore Quick Start diff --git a/packages/google-cloud-datastore/docs/index.rst b/packages/google-cloud-datastore/docs/index.rst index 890ec56aee80..abf8561bc7a0 100644 --- a/packages/google-cloud-datastore/docs/index.rst +++ b/packages/google-cloud-datastore/docs/index.rst @@ -43,3 +43,8 @@ For a list of all ``google-cloud-datastore`` releases: :maxdepth: 2 changelog + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-datastore/docs/summary_overview.md b/packages/google-cloud-datastore/docs/summary_overview.md new file mode 100644 index 000000000000..2473abe737bc --- /dev/null +++ b/packages/google-cloud-datastore/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Google Cloud Datastore API + +Overview of the APIs available for Google Cloud Datastore API. + +## All entries + +Classes, methods and properties & attributes for +Google Cloud Datastore API. + +[classes](https://cloud.google.com/python/docs/reference/datastore/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/datastore/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/datastore/latest/summary_property.html) From 6633b5c014f8cd791fabf2797704f29d60f13bd3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 8 Jul 2024 09:36:02 -0700 Subject: [PATCH 575/611] feat(spanner): Add support for Cloud Spanner Scheduled Backups (#540) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.18.0 PiperOrigin-RevId: 638650618 Source-Link: https://github.com/googleapis/googleapis/commit/6330f0389afdd04235c59898cc44f715b077aa25 Source-Link: https://github.com/googleapis/googleapis-gen/commit/44fa4f1979dc45c1778fd7caf13f8e61c6d1cae8 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDRmYTRmMTk3OWRjNDVjMTc3OGZkN2NhZjEzZjhlNjFjNmQxY2FlOCJ9 feat: New PropertyMask field which allows partial commits, lookups, and query results PiperOrigin-RevId: 635449160 Source-Link: https://github.com/googleapis/googleapis/commit/dde0ec1f36cb8cbf9036dd0f1e8e5eda7882db4e Source-Link: https://github.com/googleapis/googleapis-gen/commit/8caa60d9aea82964a19cdf8faf91384911db8bdd Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOGNhYTYwZDlhZWE4Mjk2NGExOWNkZjhmYWY5MTM4NDkxMWRiOGJkZCJ9 chore: Update gapic-generator-python to v1.17.1 PiperOrigin-RevId: 629071173 Source-Link: https://github.com/googleapis/googleapis/commit/4afa392105cc62e965631d15b772ff68454ecf1c Source-Link: https://github.com/googleapis/googleapis-gen/commit/16dbbb4d0457db5e61ac9f99b0d52a46154455ac Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTZkYmJiNGQwNDU3ZGI1ZTYxYWM5Zjk5YjBkNTJhNDYxNTQ0NTVhYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat(spanner): Add support for Cloud Spanner Scheduled Backups PiperOrigin-RevId: 649277844 Source-Link: https://github.com/googleapis/googleapis/commit/fd7efa2da3860e813485e63661d3bdd21fc9ba82 Source-Link: https://github.com/googleapis/googleapis-gen/commit/50be251329d8db5b555626ebd4886721f547d3cc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTBiZTI1MTMyOWQ4ZGI1YjU1NTYyNmViZDQ4ODY3MjFmNTQ3ZDNjYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/datastore_admin/async_client.py | 120 ++- .../services/datastore_admin/client.py | 68 +- .../datastore_admin/transports/base.py | 4 +- .../datastore_admin/transports/grpc.py | 28 +- .../transports/grpc_asyncio.py | 84 +- .../google/cloud/datastore_v1/__init__.py | 2 + .../services/datastore/async_client.py | 176 ++-- .../datastore_v1/services/datastore/client.py | 96 +- .../services/datastore/transports/base.py | 4 +- .../services/datastore/transports/grpc.py | 28 +- .../datastore/transports/grpc_asyncio.py | 114 ++- .../cloud/datastore_v1/types/__init__.py | 2 + .../cloud/datastore_v1/types/datastore.py | 66 ++ .../scripts/fixup_datastore_v1_keywords.py | 4 +- .../test_datastore_admin.py | 760 +++++++++++++- .../unit/gapic/datastore_v1/test_datastore.py | 950 ++++++++++++++++++ 16 files changed, 2203 insertions(+), 303 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index b564e8c1a38f..4b7b0c8d854b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -37,6 +38,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -250,7 +252,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DatastoreAdminTransport] = "grpc_asyncio", + transport: Optional[ + Union[str, DatastoreAdminTransport, Callable[..., DatastoreAdminTransport]] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -262,9 +266,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.DatastoreAdminTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,DatastoreAdminTransport,Callable[..., DatastoreAdminTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DatastoreAdminTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -429,8 +435,8 @@ async def sample_export_entities(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project_id, labels, entity_filter, output_url_prefix] ) @@ -440,7 +446,10 @@ async def sample_export_entities(): "the individual field arguments should be set." ) - request = datastore_admin.ExportEntitiesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore_admin.ExportEntitiesRequest): + request = datastore_admin.ExportEntitiesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -456,11 +465,9 @@ async def sample_export_entities(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.export_entities, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.export_entities + ] # Certain fields should be provided within the metadata header; # add these here. @@ -613,8 +620,8 @@ async def sample_import_entities(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, labels, input_url, entity_filter]) if request is not None and has_flattened_params: raise ValueError( @@ -622,7 +629,10 @@ async def sample_import_entities(): "the individual field arguments should be set." ) - request = datastore_admin.ImportEntitiesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore_admin.ImportEntitiesRequest): + request = datastore_admin.ImportEntitiesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -638,11 +648,9 @@ async def sample_import_entities(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.import_entities, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.import_entities + ] # Certain fields should be provided within the metadata header; # add these here. @@ -747,15 +755,16 @@ async def sample_create_index(): """ # Create or coerce a protobuf request object. - request = datastore_admin.CreateIndexRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore_admin.CreateIndexRequest): + request = datastore_admin.CreateIndexRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_index, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_index + ] # Certain fields should be provided within the metadata header; # add these here. @@ -859,15 +868,16 @@ async def sample_delete_index(): """ # Create or coerce a protobuf request object. - request = datastore_admin.DeleteIndexRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore_admin.DeleteIndexRequest): + request = datastore_admin.DeleteIndexRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_index, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_index + ] # Certain fields should be provided within the metadata header; # add these here. @@ -952,25 +962,16 @@ async def sample_get_index(): Datastore composite index definition. """ # Create or coerce a protobuf request object. - request = datastore_admin.GetIndexRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore_admin.GetIndexRequest): + request = datastore_admin.GetIndexRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_index, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_index + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1056,25 +1057,16 @@ async def sample_list_indexes(): """ # Create or coerce a protobuf request object. - request = datastore_admin.ListIndexesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore_admin.ListIndexesRequest): + request = datastore_admin.ListIndexesRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_indexes, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_indexes + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index de174f58dbd3..e6f35ba3f0ce 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -565,7 +566,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DatastoreAdminTransport]] = None, + transport: Optional[ + Union[str, DatastoreAdminTransport, Callable[..., DatastoreAdminTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -577,9 +580,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, DatastoreAdminTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,DatastoreAdminTransport,Callable[..., DatastoreAdminTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DatastoreAdminTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -688,8 +693,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[DatastoreAdminTransport], Callable[..., DatastoreAdminTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DatastoreAdminTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -822,8 +834,8 @@ def sample_export_entities(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project_id, labels, entity_filter, output_url_prefix] ) @@ -833,10 +845,8 @@ def sample_export_entities(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datastore_admin.ExportEntitiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore_admin.ExportEntitiesRequest): request = datastore_admin.ExportEntitiesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1005,8 +1015,8 @@ def sample_import_entities(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, labels, input_url, entity_filter]) if request is not None and has_flattened_params: raise ValueError( @@ -1014,10 +1024,8 @@ def sample_import_entities(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datastore_admin.ImportEntitiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore_admin.ImportEntitiesRequest): request = datastore_admin.ImportEntitiesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1138,10 +1146,8 @@ def sample_create_index(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datastore_admin.CreateIndexRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore_admin.CreateIndexRequest): request = datastore_admin.CreateIndexRequest(request) @@ -1251,10 +1257,8 @@ def sample_delete_index(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datastore_admin.DeleteIndexRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore_admin.DeleteIndexRequest): request = datastore_admin.DeleteIndexRequest(request) @@ -1345,10 +1349,8 @@ def sample_get_index(): Datastore composite index definition. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datastore_admin.GetIndexRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore_admin.GetIndexRequest): request = datastore_admin.GetIndexRequest(request) @@ -1440,10 +1442,8 @@ def sample_list_indexes(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datastore_admin.ListIndexesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore_admin.ListIndexesRequest): request = datastore_admin.ListIndexesRequest(request) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index bddab4904ec7..8c3a00f364fe 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -88,6 +88,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -100,7 +102,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index 68867594195f..4d08d9c43999 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -106,7 +106,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -126,14 +126,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -143,11 +146,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -174,9 +177,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -215,7 +219,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index 367a5ab69922..7526fc5cda8f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -121,7 +123,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -151,7 +152,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -171,15 +172,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -189,11 +193,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -220,9 +224,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -260,7 +265,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -517,6 +524,61 @@ def list_indexes( ) return self._stubs["list_indexes"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.export_entities: gapic_v1.method_async.wrap_method( + self.export_entities, + default_timeout=60.0, + client_info=client_info, + ), + self.import_entities: gapic_v1.method_async.wrap_method( + self.import_entities, + default_timeout=60.0, + client_info=client_info, + ), + self.create_index: gapic_v1.method_async.wrap_method( + self.create_index, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_index: gapic_v1.method_async.wrap_method( + self.delete_index, + default_timeout=60.0, + client_info=client_info, + ), + self.get_index: gapic_v1.method_async.wrap_method( + self.get_index, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_indexes: gapic_v1.method_async.wrap_method( + self.list_indexes, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py index 8c9d09fe7872..b1855affcf3f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py @@ -33,6 +33,7 @@ from .types.datastore import LookupResponse from .types.datastore import Mutation from .types.datastore import MutationResult +from .types.datastore import PropertyMask from .types.datastore import ReadOptions from .types.datastore import ReserveIdsRequest from .types.datastore import ReserveIdsResponse @@ -98,6 +99,7 @@ "PlanSummary", "Projection", "PropertyFilter", + "PropertyMask", "PropertyOrder", "PropertyReference", "Query", diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index e911a362b414..d6c347f6ee9d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -37,6 +38,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -197,7 +199,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DatastoreTransport] = "grpc_asyncio", + transport: Optional[ + Union[str, DatastoreTransport, Callable[..., DatastoreTransport]] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -209,9 +213,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.DatastoreTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,DatastoreTransport,Callable[..., DatastoreTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DatastoreTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -330,8 +336,8 @@ async def sample_lookup(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, read_options, keys]) if request is not None and has_flattened_params: raise ValueError( @@ -339,7 +345,10 @@ async def sample_lookup(): "the individual field arguments should be set." ) - request = datastore.LookupRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore.LookupRequest): + request = datastore.LookupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -352,21 +361,7 @@ async def sample_lookup(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.lookup, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.lookup] # Certain fields should be provided within the metadata header; # add these here. @@ -443,25 +438,16 @@ async def sample_run_query(): """ # Create or coerce a protobuf request object. - request = datastore.RunQueryRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore.RunQueryRequest): + request = datastore.RunQueryRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.run_query, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.run_query + ] # Certain fields should be provided within the metadata header; # add these here. @@ -538,25 +524,16 @@ async def sample_run_aggregation_query(): """ # Create or coerce a protobuf request object. - request = datastore.RunAggregationQueryRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore.RunAggregationQueryRequest): + request = datastore.RunAggregationQueryRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.run_aggregation_query, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.run_aggregation_query + ] # Certain fields should be provided within the metadata header; # add these here. @@ -641,8 +618,8 @@ async def sample_begin_transaction(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id]) if request is not None and has_flattened_params: raise ValueError( @@ -650,7 +627,10 @@ async def sample_begin_transaction(): "the individual field arguments should be set." ) - request = datastore.BeginTransactionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore.BeginTransactionRequest): + request = datastore.BeginTransactionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -659,11 +639,9 @@ async def sample_begin_transaction(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.begin_transaction, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.begin_transaction + ] # Certain fields should be provided within the metadata header; # add these here. @@ -788,8 +766,8 @@ async def sample_commit(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, mode, transaction, mutations]) if request is not None and has_flattened_params: raise ValueError( @@ -797,7 +775,10 @@ async def sample_commit(): "the individual field arguments should be set." ) - request = datastore.CommitRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore.CommitRequest): + request = datastore.CommitRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -812,11 +793,7 @@ async def sample_commit(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.commit, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.commit] # Certain fields should be provided within the metadata header; # add these here. @@ -912,8 +889,8 @@ async def sample_rollback(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, transaction]) if request is not None and has_flattened_params: raise ValueError( @@ -921,7 +898,10 @@ async def sample_rollback(): "the individual field arguments should be set." ) - request = datastore.RollbackRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore.RollbackRequest): + request = datastore.RollbackRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -932,11 +912,7 @@ async def sample_rollback(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.rollback, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.rollback] # Certain fields should be provided within the metadata header; # add these here. @@ -1032,8 +1008,8 @@ async def sample_allocate_ids(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, keys]) if request is not None and has_flattened_params: raise ValueError( @@ -1041,7 +1017,10 @@ async def sample_allocate_ids(): "the individual field arguments should be set." ) - request = datastore.AllocateIdsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore.AllocateIdsRequest): + request = datastore.AllocateIdsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1052,11 +1031,9 @@ async def sample_allocate_ids(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.allocate_ids, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.allocate_ids + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1151,8 +1128,8 @@ async def sample_reserve_ids(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, keys]) if request is not None and has_flattened_params: raise ValueError( @@ -1160,7 +1137,10 @@ async def sample_reserve_ids(): "the individual field arguments should be set." ) - request = datastore.ReserveIdsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datastore.ReserveIdsRequest): + request = datastore.ReserveIdsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1171,21 +1151,9 @@ async def sample_reserve_ids(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.reserve_ids, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.reserve_ids + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index 0a4981751ffc..6c3cb8029228 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -516,7 +517,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DatastoreTransport]] = None, + transport: Optional[ + Union[str, DatastoreTransport, Callable[..., DatastoreTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -528,9 +531,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, DatastoreTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,DatastoreTransport,Callable[..., DatastoreTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DatastoreTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -636,8 +641,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[DatastoreTransport], Callable[..., DatastoreTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DatastoreTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -724,8 +736,8 @@ def sample_lookup(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, read_options, keys]) if request is not None and has_flattened_params: raise ValueError( @@ -733,10 +745,8 @@ def sample_lookup(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datastore.LookupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore.LookupRequest): request = datastore.LookupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -832,10 +842,8 @@ def sample_run_query(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datastore.RunQueryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore.RunQueryRequest): request = datastore.RunQueryRequest(request) @@ -923,10 +931,8 @@ def sample_run_aggregation_query(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datastore.RunAggregationQueryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore.RunAggregationQueryRequest): request = datastore.RunAggregationQueryRequest(request) @@ -1022,8 +1028,8 @@ def sample_begin_transaction(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1031,10 +1037,8 @@ def sample_begin_transaction(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datastore.BeginTransactionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore.BeginTransactionRequest): request = datastore.BeginTransactionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1174,8 +1178,8 @@ def sample_commit(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, mode, transaction, mutations]) if request is not None and has_flattened_params: raise ValueError( @@ -1183,10 +1187,8 @@ def sample_commit(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datastore.CommitRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore.CommitRequest): request = datastore.CommitRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1303,8 +1305,8 @@ def sample_rollback(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, transaction]) if request is not None and has_flattened_params: raise ValueError( @@ -1312,10 +1314,8 @@ def sample_rollback(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datastore.RollbackRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore.RollbackRequest): request = datastore.RollbackRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1428,8 +1428,8 @@ def sample_allocate_ids(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, keys]) if request is not None and has_flattened_params: raise ValueError( @@ -1437,10 +1437,8 @@ def sample_allocate_ids(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datastore.AllocateIdsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore.AllocateIdsRequest): request = datastore.AllocateIdsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1552,8 +1550,8 @@ def sample_reserve_ids(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, keys]) if request is not None and has_flattened_params: raise ValueError( @@ -1561,10 +1559,8 @@ def sample_reserve_ids(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datastore.ReserveIdsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datastore.ReserveIdsRequest): request = datastore.ReserveIdsRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py index 3c31a4a7dbf8..db08f5b4da36 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -86,6 +86,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -98,7 +100,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py index ebc16b2189ae..620576e29f89 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -57,7 +57,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -77,14 +77,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -94,11 +97,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -124,9 +127,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -165,7 +169,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index 7b3997ddd336..b826d7c6bcf5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -72,7 +74,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -102,7 +103,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -122,15 +123,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -140,11 +144,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -170,9 +174,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -210,7 +215,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -460,6 +467,91 @@ def reserve_ids( ) return self._stubs["reserve_ids"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.lookup: gapic_v1.method_async.wrap_method( + self.lookup, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.run_query: gapic_v1.method_async.wrap_method( + self.run_query, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.run_aggregation_query: gapic_v1.method_async.wrap_method( + self.run_aggregation_query, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.begin_transaction: gapic_v1.method_async.wrap_method( + self.begin_transaction, + default_timeout=60.0, + client_info=client_info, + ), + self.commit: gapic_v1.method_async.wrap_method( + self.commit, + default_timeout=60.0, + client_info=client_info, + ), + self.rollback: gapic_v1.method_async.wrap_method( + self.rollback, + default_timeout=60.0, + client_info=client_info, + ), + self.allocate_ids: gapic_v1.method_async.wrap_method( + self.allocate_ids, + default_timeout=60.0, + client_info=client_info, + ), + self.reserve_ids: gapic_v1.method_async.wrap_method( + self.reserve_ids, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py index 6aa3d84692ce..3ae809b44e96 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py @@ -28,6 +28,7 @@ LookupResponse, Mutation, MutationResult, + PropertyMask, ReadOptions, ReserveIdsRequest, ReserveIdsResponse, @@ -81,6 +82,7 @@ "LookupResponse", "Mutation", "MutationResult", + "PropertyMask", "ReadOptions", "ReserveIdsRequest", "ReserveIdsResponse", diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index ccea0458cbfd..11974c3d968c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -47,6 +47,7 @@ "ReserveIdsResponse", "Mutation", "MutationResult", + "PropertyMask", "ReadOptions", "TransactionOptions", }, @@ -70,6 +71,15 @@ class LookupRequest(proto.Message): The options for this lookup request. keys (MutableSequence[google.cloud.datastore_v1.types.Key]): Required. Keys of entities to look up. + property_mask (google.cloud.datastore_v1.types.PropertyMask): + The properties to return. Defaults to returning all + properties. + + If this field is set and an entity has a property not + referenced in the mask, it will be absent from + [LookupResponse.found.entity.properties][]. + + The entity's key is always returned. """ project_id: str = proto.Field( @@ -90,6 +100,11 @@ class LookupRequest(proto.Message): number=3, message=entity.Key, ) + property_mask: "PropertyMask" = proto.Field( + proto.MESSAGE, + number=5, + message="PropertyMask", + ) class LookupResponse(proto.Message): @@ -186,6 +201,12 @@ class RunQueryRequest(proto.Message): non-aggregation query. This field is a member of `oneof`_ ``query_type``. + property_mask (google.cloud.datastore_v1.types.PropertyMask): + The properties to return. This field must not be set for a + projection query. + + See + [LookupRequest.property_mask][google.datastore.v1.LookupRequest.property_mask]. explain_options (google.cloud.datastore_v1.types.ExplainOptions): Optional. Explain options for the query. If set, additional query statistics will be @@ -223,6 +244,11 @@ class RunQueryRequest(proto.Message): oneof="query_type", message=gd_query.GqlQuery, ) + property_mask: "PropertyMask" = proto.Field( + proto.MESSAGE, + number=10, + message="PropertyMask", + ) explain_options: query_profile.ExplainOptions = proto.Field( proto.MESSAGE, number=12, @@ -770,6 +796,14 @@ class Mutation(proto.Message): mutation conflicts. This field is a member of `oneof`_ ``conflict_detection_strategy``. + property_mask (google.cloud.datastore_v1.types.PropertyMask): + The properties to write in this mutation. None of the + properties in the mask may have a reserved name, except for + ``__key__``. This field is ignored for ``delete``. + + If the entity already exists, only properties referenced in + the mask are updated, others are left untouched. Properties + referenced in the mask but not in the entity are deleted. """ insert: entity.Entity = proto.Field( @@ -807,6 +841,11 @@ class Mutation(proto.Message): oneof="conflict_detection_strategy", message=timestamp_pb2.Timestamp, ) + property_mask: "PropertyMask" = proto.Field( + proto.MESSAGE, + number=9, + message="PropertyMask", + ) class MutationResult(proto.Message): @@ -866,6 +905,33 @@ class MutationResult(proto.Message): ) +class PropertyMask(proto.Message): + r"""The set of arbitrarily nested property paths used to restrict + an operation to only a subset of properties in an entity. + + Attributes: + paths (MutableSequence[str]): + The paths to the properties covered by this mask. + + A path is a list of property names separated by dots + (``.``), for example ``foo.bar`` means the property ``bar`` + inside the entity property ``foo`` inside the entity + associated with this path. + + If a property name contains a dot ``.`` or a backslash + ``\``, then that name must be escaped. + + A path must not be empty, and may not reference a value + inside an [array + value][google.datastore.v1.Value.array_value]. + """ + + paths: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class ReadOptions(proto.Message): r"""The options shared by read requests. diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py index f0406904978a..661d509b47c3 100644 --- a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py @@ -42,11 +42,11 @@ class datastoreCallTransformer(cst.CSTTransformer): 'allocate_ids': ('project_id', 'keys', 'database_id', ), 'begin_transaction': ('project_id', 'database_id', 'transaction_options', ), 'commit': ('project_id', 'database_id', 'mode', 'transaction', 'single_use_transaction', 'mutations', ), - 'lookup': ('project_id', 'keys', 'database_id', 'read_options', ), + 'lookup': ('project_id', 'keys', 'database_id', 'read_options', 'property_mask', ), 'reserve_ids': ('project_id', 'keys', 'database_id', ), 'rollback': ('project_id', 'transaction', 'database_id', ), 'run_aggregation_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'aggregation_query', 'gql_query', 'explain_options', ), - 'run_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'query', 'gql_query', 'explain_options', ), + 'run_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'query', 'gql_query', 'property_mask', 'explain_options', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index c08b309ad23f..8e65052ceed6 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -1176,6 +1176,9 @@ def test_export_entities_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_entities), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.export_entities() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1200,6 +1203,9 @@ def test_export_entities_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_entities), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.export_entities(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1209,6 +1215,45 @@ def test_export_entities_non_empty_request_with_auto_populated_field(): ) +def test_export_entities_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.export_entities in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.export_entities] = mock_rpc + request = {} + client.export_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_export_entities_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1230,6 +1275,51 @@ async def test_export_entities_empty_call_async(): assert args[0] == datastore_admin.ExportEntitiesRequest() +@pytest.mark.asyncio +async def test_export_entities_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.export_entities + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.export_entities + ] = mock_object + + request = {} + await client.export_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.export_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_export_entities_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ExportEntitiesRequest @@ -1482,6 +1572,9 @@ def test_import_entities_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.import_entities), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.import_entities() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1506,6 +1599,9 @@ def test_import_entities_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.import_entities), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.import_entities(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1515,6 +1611,45 @@ def test_import_entities_non_empty_request_with_auto_populated_field(): ) +def test_import_entities_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.import_entities in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.import_entities] = mock_rpc + request = {} + client.import_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_import_entities_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1536,6 +1671,51 @@ async def test_import_entities_empty_call_async(): assert args[0] == datastore_admin.ImportEntitiesRequest() +@pytest.mark.asyncio +async def test_import_entities_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.import_entities + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.import_entities + ] = mock_object + + request = {} + await client.import_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.import_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_import_entities_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ImportEntitiesRequest @@ -1788,6 +1968,9 @@ def test_create_index_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_index), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_index() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1811,6 +1994,9 @@ def test_create_index_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_index), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_index(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1819,6 +2005,45 @@ def test_create_index_non_empty_request_with_auto_populated_field(): ) +def test_create_index_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_index in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_index] = mock_rpc + request = {} + client.create_index(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_index(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_index_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1840,6 +2065,51 @@ async def test_create_index_empty_call_async(): assert args[0] == datastore_admin.CreateIndexRequest() +@pytest.mark.asyncio +async def test_create_index_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_index + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_index + ] = mock_object + + request = {} + await client.create_index(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_index(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.CreateIndexRequest @@ -1980,6 +2250,9 @@ def test_delete_index_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_index() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2004,6 +2277,9 @@ def test_delete_index_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_index(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2013,6 +2289,45 @@ def test_delete_index_non_empty_request_with_auto_populated_field(): ) +def test_delete_index_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_index in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_index] = mock_rpc + request = {} + client.delete_index(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_index(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_index_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2034,6 +2349,51 @@ async def test_delete_index_empty_call_async(): assert args[0] == datastore_admin.DeleteIndexRequest() +@pytest.mark.asyncio +async def test_delete_index_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_index + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_index + ] = mock_object + + request = {} + await client.delete_index(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_index(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.DeleteIndexRequest @@ -2187,6 +2547,9 @@ def test_get_index_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_index), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_index() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2211,6 +2574,9 @@ def test_get_index_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_index), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_index(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2220,6 +2586,41 @@ def test_get_index_non_empty_request_with_auto_populated_field(): ) +def test_get_index_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_index in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_index] = mock_rpc + request = {} + client.get_index(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_index(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_index_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2247,6 +2648,45 @@ async def test_get_index_empty_call_async(): assert args[0] == datastore_admin.GetIndexRequest() +@pytest.mark.asyncio +async def test_get_index_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_index + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_index + ] = mock_object + + request = {} + await client.get_index(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_index(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.GetIndexRequest @@ -2401,6 +2841,9 @@ def test_list_indexes_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_indexes() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2426,6 +2869,9 @@ def test_list_indexes_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_indexes(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2436,6 +2882,41 @@ def test_list_indexes_non_empty_request_with_auto_populated_field(): ) +def test_list_indexes_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_indexes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_indexes] = mock_rpc + request = {} + client.list_indexes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_indexes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_indexes_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2459,6 +2940,47 @@ async def test_list_indexes_empty_call_async(): assert args[0] == datastore_admin.ListIndexesRequest() +@pytest.mark.asyncio +async def test_list_indexes_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_indexes + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_indexes + ] = mock_object + + request = {} + await client.list_indexes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_indexes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_indexes_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ListIndexesRequest @@ -2596,13 +3118,13 @@ def test_list_indexes_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("project_id", ""),)), ) pager = client.list_indexes(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2784,6 +3306,46 @@ def test_export_entities_rest(request_type): assert response.operation.name == "operations/spam" +def test_export_entities_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.export_entities in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.export_entities] = mock_rpc + + request = {} + client.export_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_export_entities_rest_required_fields( request_type=datastore_admin.ExportEntitiesRequest, ): @@ -3060,6 +3622,46 @@ def test_import_entities_rest(request_type): assert response.operation.name == "operations/spam" +def test_import_entities_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.import_entities in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.import_entities] = mock_rpc + + request = {} + client.import_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_import_entities_rest_required_fields( request_type=datastore_admin.ImportEntitiesRequest, ): @@ -3411,6 +4013,46 @@ def get_message_fields(field): assert response.operation.name == "operations/spam" +def test_create_index_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_index in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_index] = mock_rpc + + request = {} + client.create_index(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_index(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_index_rest_interceptors(null_interceptor): transport = transports.DatastoreAdminRestTransport( @@ -3535,6 +4177,46 @@ def test_delete_index_rest(request_type): assert response.operation.name == "operations/spam" +def test_delete_index_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_index in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_index] = mock_rpc + + request = {} + client.delete_index(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_index(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_index_rest_interceptors(null_interceptor): transport = transports.DatastoreAdminRestTransport( @@ -3672,6 +4354,42 @@ def test_get_index_rest(request_type): assert response.state == index.Index.State.CREATING +def test_get_index_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_index in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_index] = mock_rpc + + request = {} + client.get_index(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_index(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_index_rest_interceptors(null_interceptor): transport = transports.DatastoreAdminRestTransport( @@ -3797,6 +4515,42 @@ def test_list_indexes_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_indexes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_indexes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_indexes] = mock_rpc + + request = {} + client.list_indexes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_indexes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_indexes_rest_interceptors(null_interceptor): transport = transports.DatastoreAdminRestTransport( diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 203d9c3af7f3..73f3d837a3f4 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -1131,6 +1131,9 @@ def test_lookup_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.lookup() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1155,6 +1158,9 @@ def test_lookup_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.lookup(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1164,6 +1170,41 @@ def test_lookup_non_empty_request_with_auto_populated_field(): ) +def test_lookup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.lookup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.lookup] = mock_rpc + request = {} + client.lookup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.lookup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_lookup_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1187,6 +1228,45 @@ async def test_lookup_empty_call_async(): assert args[0] == datastore.LookupRequest() +@pytest.mark.asyncio +async def test_lookup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.lookup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.lookup + ] = mock_object + + request = {} + await client.lookup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.lookup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_lookup_async( transport: str = "grpc_asyncio", request_type=datastore.LookupRequest @@ -1447,6 +1527,9 @@ def test_run_query_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.run_query), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.run_query() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1471,6 +1554,9 @@ def test_run_query_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.run_query), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.run_query(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1480,6 +1566,41 @@ def test_run_query_non_empty_request_with_auto_populated_field(): ) +def test_run_query_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.run_query] = mock_rpc + request = {} + client.run_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_run_query_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1503,6 +1624,45 @@ async def test_run_query_empty_call_async(): assert args[0] == datastore.RunQueryRequest() +@pytest.mark.asyncio +async def test_run_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.run_query + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.run_query + ] = mock_object + + request = {} + await client.run_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.run_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_run_query_async( transport: str = "grpc_asyncio", request_type=datastore.RunQueryRequest @@ -1633,6 +1793,9 @@ def test_run_aggregation_query_empty_call(): with mock.patch.object( type(client.transport.run_aggregation_query), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.run_aggregation_query() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1659,6 +1822,9 @@ def test_run_aggregation_query_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.run_aggregation_query), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.run_aggregation_query(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1668,6 +1834,46 @@ def test_run_aggregation_query_non_empty_request_with_auto_populated_field(): ) +def test_run_aggregation_query_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.run_aggregation_query + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.run_aggregation_query + ] = mock_rpc + request = {} + client.run_aggregation_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_aggregation_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_run_aggregation_query_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1693,6 +1899,47 @@ async def test_run_aggregation_query_empty_call_async(): assert args[0] == datastore.RunAggregationQueryRequest() +@pytest.mark.asyncio +async def test_run_aggregation_query_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.run_aggregation_query + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.run_aggregation_query + ] = mock_object + + request = {} + await client.run_aggregation_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.run_aggregation_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_run_aggregation_query_async( transport: str = "grpc_asyncio", request_type=datastore.RunAggregationQueryRequest @@ -1829,6 +2076,9 @@ def test_begin_transaction_empty_call(): with mock.patch.object( type(client.transport.begin_transaction), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.begin_transaction() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1855,6 +2105,9 @@ def test_begin_transaction_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.begin_transaction), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.begin_transaction(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1864,6 +2117,43 @@ def test_begin_transaction_non_empty_request_with_auto_populated_field(): ) +def test_begin_transaction_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.begin_transaction in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.begin_transaction + ] = mock_rpc + request = {} + client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.begin_transaction(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_begin_transaction_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1889,6 +2179,47 @@ async def test_begin_transaction_empty_call_async(): assert args[0] == datastore.BeginTransactionRequest() +@pytest.mark.asyncio +async def test_begin_transaction_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.begin_transaction + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.begin_transaction + ] = mock_object + + request = {} + await client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.begin_transaction(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_begin_transaction_async( transport: str = "grpc_asyncio", request_type=datastore.BeginTransactionRequest @@ -2107,6 +2438,9 @@ def test_commit_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.commit() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2131,6 +2465,9 @@ def test_commit_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.commit(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2140,6 +2477,41 @@ def test_commit_non_empty_request_with_auto_populated_field(): ) +def test_commit_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.commit in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.commit] = mock_rpc + request = {} + client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.commit(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_commit_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2163,6 +2535,45 @@ async def test_commit_empty_call_async(): assert args[0] == datastore.CommitRequest() +@pytest.mark.asyncio +async def test_commit_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.commit + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.commit + ] = mock_object + + request = {} + await client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.commit(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_commit_async( transport: str = "grpc_asyncio", request_type=datastore.CommitRequest @@ -2450,6 +2861,9 @@ def test_rollback_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.rollback() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2474,6 +2888,9 @@ def test_rollback_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.rollback(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2483,6 +2900,41 @@ def test_rollback_non_empty_request_with_auto_populated_field(): ) +def test_rollback_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.rollback in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.rollback] = mock_rpc + request = {} + client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.rollback(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_rollback_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2504,6 +2956,45 @@ async def test_rollback_empty_call_async(): assert args[0] == datastore.RollbackRequest() +@pytest.mark.asyncio +async def test_rollback_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.rollback + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.rollback + ] = mock_object + + request = {} + await client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.rollback(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_rollback_async( transport: str = "grpc_asyncio", request_type=datastore.RollbackRequest @@ -2716,6 +3207,9 @@ def test_allocate_ids_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.allocate_ids() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2740,6 +3234,9 @@ def test_allocate_ids_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.allocate_ids(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2749,6 +3246,41 @@ def test_allocate_ids_non_empty_request_with_auto_populated_field(): ) +def test_allocate_ids_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.allocate_ids in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.allocate_ids] = mock_rpc + request = {} + client.allocate_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.allocate_ids(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_allocate_ids_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2770,6 +3302,47 @@ async def test_allocate_ids_empty_call_async(): assert args[0] == datastore.AllocateIdsRequest() +@pytest.mark.asyncio +async def test_allocate_ids_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.allocate_ids + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.allocate_ids + ] = mock_object + + request = {} + await client.allocate_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.allocate_ids(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_allocate_ids_async( transport: str = "grpc_asyncio", request_type=datastore.AllocateIdsRequest @@ -3002,6 +3575,9 @@ def test_reserve_ids_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.reserve_ids() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3026,6 +3602,9 @@ def test_reserve_ids_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.reserve_ids(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3035,6 +3614,41 @@ def test_reserve_ids_non_empty_request_with_auto_populated_field(): ) +def test_reserve_ids_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.reserve_ids in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.reserve_ids] = mock_rpc + request = {} + client.reserve_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.reserve_ids(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_reserve_ids_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3056,6 +3670,47 @@ async def test_reserve_ids_empty_call_async(): assert args[0] == datastore.ReserveIdsRequest() +@pytest.mark.asyncio +async def test_reserve_ids_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.reserve_ids + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.reserve_ids + ] = mock_object + + request = {} + await client.reserve_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.reserve_ids(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_reserve_ids_async( transport: str = "grpc_asyncio", request_type=datastore.ReserveIdsRequest @@ -3285,6 +3940,42 @@ def test_lookup_rest(request_type): assert response.transaction == b"transaction_blob" +def test_lookup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.lookup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.lookup] = mock_rpc + + request = {} + client.lookup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.lookup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_lookup_rest_required_fields(request_type=datastore.LookupRequest): transport_class = transports.DatastoreRestTransport @@ -3569,6 +4260,42 @@ def test_run_query_rest(request_type): assert response.transaction == b"transaction_blob" +def test_run_query_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.run_query] = mock_rpc + + request = {} + client.run_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_run_query_rest_required_fields(request_type=datastore.RunQueryRequest): transport_class = transports.DatastoreRestTransport @@ -3774,6 +4501,47 @@ def test_run_aggregation_query_rest(request_type): assert response.transaction == b"transaction_blob" +def test_run_aggregation_query_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.run_aggregation_query + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.run_aggregation_query + ] = mock_rpc + + request = {} + client.run_aggregation_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_aggregation_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_run_aggregation_query_rest_required_fields( request_type=datastore.RunAggregationQueryRequest, ): @@ -3983,6 +4751,44 @@ def test_begin_transaction_rest(request_type): assert response.transaction == b"transaction_blob" +def test_begin_transaction_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.begin_transaction in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.begin_transaction + ] = mock_rpc + + request = {} + client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.begin_transaction(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_begin_transaction_rest_required_fields( request_type=datastore.BeginTransactionRequest, ): @@ -4248,6 +5054,42 @@ def test_commit_rest(request_type): assert response.index_updates == 1389 +def test_commit_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.commit in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.commit] = mock_rpc + + request = {} + client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.commit(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_commit_rest_required_fields(request_type=datastore.CommitRequest): transport_class = transports.DatastoreRestTransport @@ -4530,6 +5372,42 @@ def test_rollback_rest(request_type): assert isinstance(response, datastore.RollbackResponse) +def test_rollback_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.rollback in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.rollback] = mock_rpc + + request = {} + client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.rollback(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_rollback_rest_required_fields(request_type=datastore.RollbackRequest): transport_class = transports.DatastoreRestTransport @@ -4801,6 +5679,42 @@ def test_allocate_ids_rest(request_type): assert isinstance(response, datastore.AllocateIdsResponse) +def test_allocate_ids_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.allocate_ids in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.allocate_ids] = mock_rpc + + request = {} + client.allocate_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.allocate_ids(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_allocate_ids_rest_required_fields(request_type=datastore.AllocateIdsRequest): transport_class = transports.DatastoreRestTransport @@ -5076,6 +5990,42 @@ def test_reserve_ids_rest(request_type): assert isinstance(response, datastore.ReserveIdsResponse) +def test_reserve_ids_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.reserve_ids in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.reserve_ids] = mock_rpc + + request = {} + client.reserve_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.reserve_ids(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_reserve_ids_rest_required_fields(request_type=datastore.ReserveIdsRequest): transport_class = transports.DatastoreRestTransport From 215c5637440a33ffc21d0875611dbbd2a306a7ac Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 29 Jul 2024 08:14:15 -0600 Subject: [PATCH 576/611] chore: update mypy target (#554) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update mypy package target * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- packages/google-cloud-datastore/noxfile.py | 2 +- packages/google-cloud-datastore/owlbot.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index e4e112d5531d..a3084c382c05 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -143,7 +143,7 @@ def mypy(session): "types-protobuf!=4.24.0.20240106", "types-requests", ) - session.run("mypy", "-p", "google") + session.run("mypy", "-p", "google.cloud.datastore") @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 82565aa3c809..3b8149e65669 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -285,7 +285,7 @@ def mypy(session): session.install( "mypy", "types-setuptools", "types-mock", "types-protobuf!=4.24.0.20240106", "types-requests" ) - session.run("mypy", "-p", "google") + session.run("mypy", "-p", "google.cloud.datastore") @nox.session(python=DEFAULT_PYTHON_VERSION) From 88ddb9489b98c26b3243816b28f96b95a9ae6691 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 29 Jul 2024 13:47:43 -0400 Subject: [PATCH 577/611] chore: update templated files (#550) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update templated files * remove obsolete replacements in owlbot.py * update replacements in owlbot.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> --- packages/google-cloud-datastore/.flake8 | 2 +- .../.github/.OwlBot.lock.yaml | 3 +- .../.github/auto-label.yaml | 2 +- .../google-cloud-datastore/.kokoro/build.sh | 2 +- .../.kokoro/docker/docs/Dockerfile | 23 +- .../.kokoro/docker/docs/requirements.txt | 40 +- .../.kokoro/populate-secrets.sh | 2 +- .../.kokoro/publish-docs.sh | 2 +- .../google-cloud-datastore/.kokoro/release.sh | 2 +- .../.kokoro/requirements.txt | 509 +++++++++--------- .../.kokoro/test-samples-against-head.sh | 2 +- .../.kokoro/test-samples-impl.sh | 2 +- .../.kokoro/test-samples.sh | 2 +- .../.kokoro/trampoline.sh | 2 +- .../.kokoro/trampoline_v2.sh | 2 +- .../.pre-commit-config.yaml | 2 +- packages/google-cloud-datastore/.trampolinerc | 2 +- packages/google-cloud-datastore/MANIFEST.in | 2 +- packages/google-cloud-datastore/docs/conf.py | 2 +- packages/google-cloud-datastore/noxfile.py | 61 ++- packages/google-cloud-datastore/owlbot.py | 31 +- .../scripts/decrypt-secrets.sh | 2 +- .../scripts/readme-gen/readme_gen.py | 2 +- 23 files changed, 377 insertions(+), 324 deletions(-) diff --git a/packages/google-cloud-datastore/.flake8 b/packages/google-cloud-datastore/.flake8 index 87f6e408c47d..32986c79287a 100644 --- a/packages/google-cloud-datastore/.flake8 +++ b/packages/google-cloud-datastore/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 81f87c56917d..001b1b1cabec 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 -# created: 2024-04-12T11:35:58.922854369Z + digest: sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e diff --git a/packages/google-cloud-datastore/.github/auto-label.yaml b/packages/google-cloud-datastore/.github/auto-label.yaml index 8b37ee89711f..21786a4eb085 100644 --- a/packages/google-cloud-datastore/.github/auto-label.yaml +++ b/packages/google-cloud-datastore/.github/auto-label.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/build.sh b/packages/google-cloud-datastore/.kokoro/build.sh index f9800c921fa5..e0cd71b03bd2 100755 --- a/packages/google-cloud-datastore/.kokoro/build.sh +++ b/packages/google-cloud-datastore/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile index bdaf39fe22d0..5205308b334d 100644 --- a/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ubuntu:22.04 +from ubuntu:24.04 ENV DEBIAN_FRONTEND noninteractive @@ -40,7 +40,6 @@ RUN apt-get update \ libssl-dev \ libsqlite3-dev \ portaudio19-dev \ - python3-distutils \ redis-server \ software-properties-common \ ssh \ @@ -60,18 +59,22 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.9.13 -# Download python 3.9.13 -RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz +###################### Install python 3.10.14 for docs/docfx session + +# Download python 3.10.14 +RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz # Extract files -RUN tar -xvf Python-3.9.13.tgz +RUN tar -xvf Python-3.10.14.tgz -# Install python 3.9.13 -RUN ./Python-3.9.13/configure --enable-optimizations +# Install python 3.10.14 +RUN ./Python-3.10.14/configure --enable-optimizations RUN make altinstall +RUN python3.10 -m venv /venv +ENV PATH /venv/bin:$PATH + ###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ && python3 /tmp/get-pip.py \ @@ -84,4 +87,4 @@ RUN python3 -m pip COPY requirements.txt /requirements.txt RUN python3 -m pip install --require-hashes -r requirements.txt -CMD ["python3.8"] +CMD ["python3.10"] diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt index 0e5d70f20f83..7129c7715594 100644 --- a/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt @@ -4,9 +4,9 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.2.3 \ - --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ - --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c +argcomplete==3.4.0 \ + --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ + --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f # via nox colorlog==6.8.2 \ --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ @@ -16,23 +16,27 @@ distlib==0.3.8 \ --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c +filelock==3.15.4 \ + --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ + --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 # via virtualenv -nox==2024.3.2 \ - --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ - --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 +nox==2024.4.15 \ + --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ + --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f # via -r requirements.in -packaging==24.0 \ - --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ - --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via nox -platformdirs==4.2.0 \ - --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ - --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 +platformdirs==4.2.2 \ + --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ + --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 # via virtualenv -virtualenv==20.25.1 \ - --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ - --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via nox +virtualenv==20.26.3 \ + --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ + --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 # via nox diff --git a/packages/google-cloud-datastore/.kokoro/populate-secrets.sh b/packages/google-cloud-datastore/.kokoro/populate-secrets.sh index 6f3972140e80..c435402f473e 100755 --- a/packages/google-cloud-datastore/.kokoro/populate-secrets.sh +++ b/packages/google-cloud-datastore/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC. +# Copyright 2024 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/publish-docs.sh b/packages/google-cloud-datastore/.kokoro/publish-docs.sh index 9eafe0be3bba..38f083f05aa0 100755 --- a/packages/google-cloud-datastore/.kokoro/publish-docs.sh +++ b/packages/google-cloud-datastore/.kokoro/publish-docs.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/release.sh b/packages/google-cloud-datastore/.kokoro/release.sh index c236e3cf0913..b460d5a03693 100755 --- a/packages/google-cloud-datastore/.kokoro/release.sh +++ b/packages/google-cloud-datastore/.kokoro/release.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt index 51f92b8e12f1..9622baf0ba38 100644 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/requirements.txt @@ -4,21 +4,25 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.1.4 \ - --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ - --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f +argcomplete==3.4.0 \ + --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ + --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f # via nox -attrs==23.1.0 \ - --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ - --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 +attrs==23.2.0 \ + --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ + --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 # via gcp-releasetool -cachetools==5.3.2 \ - --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ - --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 +backports-tarfile==1.2.0 \ + --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ + --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 + # via jaraco-context +cachetools==5.3.3 \ + --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ + --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 # via google-auth -certifi==2023.7.22 \ - --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ - --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 +certifi==2024.7.4 \ + --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ + --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 # via requests cffi==1.16.0 \ --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ @@ -87,90 +91,90 @@ click==8.0.4 \ # -r requirements.in # gcp-docuploader # gcp-releasetool -colorlog==6.7.0 \ - --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ - --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 # via # gcp-docuploader # nox -cryptography==42.0.5 \ - --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ - --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ - --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ - --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ - --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ - --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ - --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ - --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ - --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ - --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ - --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ - --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ - --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ - --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ - --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ - --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ - --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ - --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ - --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ - --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ - --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ - --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ - --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ - --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ - --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ - --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ - --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ - --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ - --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ - --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ - --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ - --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 +cryptography==42.0.8 \ + --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \ + --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \ + --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \ + --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \ + --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \ + --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \ + --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \ + --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \ + --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \ + --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \ + --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \ + --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \ + --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \ + --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \ + --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \ + --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \ + --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \ + --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \ + --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \ + --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \ + --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \ + --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \ + --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \ + --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \ + --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \ + --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \ + --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \ + --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \ + --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \ + --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \ + --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \ + --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e # via # -r requirements.in # gcp-releasetool # secretstorage -distlib==0.3.7 \ - --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ - --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -docutils==0.20.1 \ - --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ - --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b +docutils==0.21.2 \ + --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ + --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 # via readme-renderer -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c +filelock==3.15.4 \ + --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ + --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 # via virtualenv gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==2.0.0 \ - --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ - --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f +gcp-releasetool==2.0.1 \ + --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \ + --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62 # via -r requirements.in -google-api-core==2.12.0 \ - --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ - --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 +google-api-core==2.19.1 \ + --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \ + --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd # via # google-cloud-core # google-cloud-storage -google-auth==2.23.4 \ - --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ - --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 +google-auth==2.31.0 \ + --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \ + --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871 # via # gcp-releasetool # google-api-core # google-cloud-core # google-cloud-storage -google-cloud-core==2.3.3 \ - --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ - --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 # via google-cloud-storage -google-cloud-storage==2.13.0 \ - --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ - --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 +google-cloud-storage==2.17.0 \ + --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \ + --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -244,28 +248,36 @@ google-crc32c==1.5.0 \ # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.6.0 \ - --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ - --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b +google-resumable-media==2.7.1 \ + --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \ + --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33 # via google-cloud-storage -googleapis-common-protos==1.61.0 \ - --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ - --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b +googleapis-common-protos==1.63.2 \ + --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \ + --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87 # via google-api-core idna==3.7 \ --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests -importlib-metadata==6.8.0 \ - --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ - --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 +importlib-metadata==8.0.0 \ + --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \ + --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812 # via # -r requirements.in # keyring # twine -jaraco-classes==3.3.0 \ - --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ - --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 +jaraco-classes==3.4.0 \ + --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ + --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 + # via keyring +jaraco-context==5.3.0 \ + --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ + --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 + # via keyring +jaraco-functools==4.0.1 \ + --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \ + --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -273,13 +285,13 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.3 \ - --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ - --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 +jinja2==3.1.4 \ + --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ + --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via gcp-releasetool -keyring==24.2.0 \ - --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ - --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 +keyring==25.2.1 \ + --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ + --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b # via # gcp-releasetool # twine @@ -287,146 +299,153 @@ markdown-it-py==3.0.0 \ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb # via rich -markupsafe==2.1.3 \ - --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ - --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ - --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ - --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ - --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ - --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ - --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ - --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ - --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ - --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ - --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ - --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ - --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ - --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ - --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ - --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ - --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ - --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ - --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ - --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ - --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ - --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ - --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ - --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ - --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ - --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ - --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ - --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ - --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ - --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ - --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ - --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ - --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ - --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ - --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ - --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ - --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ - --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ - --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ - --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ - --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ - --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ - --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ - --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ - --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ - --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ - --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ - --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ - --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ - --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ - --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ - --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ - --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ - --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ - --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ - --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ - --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ - --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ - --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ - --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 +markupsafe==2.1.5 \ + --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ + --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ + --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ + --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ + --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ + --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ + --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ + --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ + --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ + --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ + --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ + --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ + --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ + --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ + --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ + --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ + --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ + --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ + --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ + --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ + --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ + --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ + --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ + --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ + --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ + --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ + --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ + --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ + --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ + --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ + --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ + --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ + --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ + --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ + --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ + --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ + --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ + --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ + --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ + --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ + --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ + --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ + --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ + --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ + --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ + --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ + --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ + --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ + --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ + --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ + --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ + --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ + --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ + --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ + --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ + --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ + --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ + --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ + --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ + --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 # via jinja2 mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba # via markdown-it-py -more-itertools==10.1.0 \ - --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ - --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 - # via jaraco-classes -nh3==0.2.14 \ - --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ - --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ - --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ - --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ - --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ - --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ - --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ - --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ - --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ - --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ - --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ - --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ - --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ - --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ - --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ - --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 +more-itertools==10.3.0 \ + --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ + --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 + # via + # jaraco-classes + # jaraco-functools +nh3==0.2.18 \ + --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \ + --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \ + --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \ + --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \ + --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \ + --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \ + --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \ + --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \ + --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \ + --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \ + --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \ + --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \ + --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \ + --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \ + --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ + --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe # via readme-renderer -nox==2023.4.22 \ - --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ - --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f +nox==2024.4.15 \ + --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ + --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f # via -r requirements.in -packaging==23.2 \ - --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ - --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via # gcp-releasetool # nox -pkginfo==1.9.6 \ - --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ - --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 +pkginfo==1.10.0 \ + --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ + --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 # via twine -platformdirs==3.11.0 \ - --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ - --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e +platformdirs==4.2.2 \ + --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ + --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 # via virtualenv -protobuf==4.25.3 \ - --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ - --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ - --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ - --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ - --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ - --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ - --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ - --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ - --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ - --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ - --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 +proto-plus==1.24.0 \ + --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ + --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 + # via google-api-core +protobuf==5.27.2 \ + --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \ + --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \ + --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \ + --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \ + --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \ + --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \ + --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \ + --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \ + --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \ + --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \ + --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714 # via # gcp-docuploader # gcp-releasetool # google-api-core # googleapis-common-protos -pyasn1==0.5.0 \ - --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ - --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde + # proto-plus +pyasn1==0.6.0 \ + --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ + --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 # via # pyasn1-modules # rsa -pyasn1-modules==0.3.0 \ - --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ - --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d +pyasn1-modules==0.4.0 \ + --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ + --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b # via google-auth -pycparser==2.21 \ - --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ - --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 +pycparser==2.22 \ + --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ + --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc # via cffi -pygments==2.16.1 \ - --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ - --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 +pygments==2.18.0 \ + --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ + --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a # via # readme-renderer # rich @@ -434,20 +453,20 @@ pyjwt==2.8.0 \ --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool -pyperclip==1.8.2 \ - --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 +pyperclip==1.9.0 \ + --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 # via gcp-releasetool -python-dateutil==2.8.2 \ - --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ - --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 +python-dateutil==2.9.0.post0 \ + --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ + --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 # via gcp-releasetool -readme-renderer==42.0 \ - --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ - --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 +readme-renderer==44.0 \ + --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \ + --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1 # via twine -requests==2.31.0 \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 # via # gcp-releasetool # google-api-core @@ -462,9 +481,9 @@ rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==13.6.0 \ - --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ - --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef +rich==13.7.1 \ + --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ + --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -480,35 +499,39 @@ six==1.16.0 \ # via # gcp-docuploader # python-dateutil -twine==4.0.2 \ - --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ - --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via nox +twine==5.1.1 \ + --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ + --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db # via -r requirements.in -typing-extensions==4.8.0 \ - --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ - --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef +typing-extensions==4.12.2 \ + --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ + --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 # via -r requirements.in -urllib3==2.0.7 \ - --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ - --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e +urllib3==2.2.2 \ + --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ + --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 # via # requests # twine -virtualenv==20.24.6 \ - --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ - --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 +virtualenv==20.26.3 \ + --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ + --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 # via nox -wheel==0.41.3 \ - --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ - --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 +wheel==0.43.0 \ + --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ + --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 # via -r requirements.in -zipp==3.17.0 \ - --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ - --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 +zipp==3.19.2 \ + --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ + --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==69.2.0 \ - --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ - --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c +setuptools==70.2.0 \ + --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \ + --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1 # via -r requirements.in diff --git a/packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh b/packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh index 63ac41dfae1d..e9d8bd79a644 100755 --- a/packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh +++ b/packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh b/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh index 5a0f5fab6a89..55910c8ba178 100755 --- a/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh +++ b/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/test-samples.sh b/packages/google-cloud-datastore/.kokoro/test-samples.sh index 50b35a48c190..7933d820149a 100755 --- a/packages/google-cloud-datastore/.kokoro/test-samples.sh +++ b/packages/google-cloud-datastore/.kokoro/test-samples.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/trampoline.sh b/packages/google-cloud-datastore/.kokoro/trampoline.sh index d85b1f267693..48f79699706e 100755 --- a/packages/google-cloud-datastore/.kokoro/trampoline.sh +++ b/packages/google-cloud-datastore/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh b/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh index 59a7cf3a9373..35fa529231dc 100755 --- a/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh +++ b/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.pre-commit-config.yaml b/packages/google-cloud-datastore/.pre-commit-config.yaml index 6a8e16950664..1d74695f70b6 100644 --- a/packages/google-cloud-datastore/.pre-commit-config.yaml +++ b/packages/google-cloud-datastore/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/.trampolinerc b/packages/google-cloud-datastore/.trampolinerc index a7dfeb42c6d0..0080152373d5 100644 --- a/packages/google-cloud-datastore/.trampolinerc +++ b/packages/google-cloud-datastore/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/MANIFEST.in b/packages/google-cloud-datastore/MANIFEST.in index e0a66705318e..d6814cd60037 100644 --- a/packages/google-cloud-datastore/MANIFEST.in +++ b/packages/google-cloud-datastore/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/docs/conf.py b/packages/google-cloud-datastore/docs/conf.py index 3e814f57e6f1..8122be7156d8 100644 --- a/packages/google-cloud-datastore/docs/conf.py +++ b/packages/google-cloud-datastore/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index a3084c382c05..4a08c70ff100 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -181,14 +181,28 @@ def install_unittest_dependencies(session, *constraints): session.install("-e", ".", *constraints) -def default(session): +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + # Run py.test against the unit tests. session.run( "py.test", @@ -202,15 +216,12 @@ def default(session): "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) - - def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. @@ -304,7 +315,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.9") +@nox.session(python="3.10") def docs(session): """Build the docs for this library.""" @@ -396,10 +407,17 @@ def docfx(session): ) -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def prerelease_deps(session): +@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install all dependencies session.install("-e", ".[all, tests, tracing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES @@ -434,9 +452,9 @@ def prerelease_deps(session): "protobuf", # dependency of grpc "six", + "grpc-google-iam-v1", "googleapis-common-protos", - # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 - "grpcio!=1.52.0rc1", + "grpcio", "grpcio-status", "google-api-core", "google-auth", @@ -462,12 +480,17 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("py.test", "tests/unit") + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") - env = {} # Only run system tests if found. if os.path.exists(system_test_path): session.run( @@ -475,8 +498,10 @@ def prerelease_deps(session): "--verbose", f"--junitxml=system_{session.python}_sponge_log.xml", system_test_path, - env=env, *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) if os.path.exists(system_test_folder_path): session.run( @@ -484,6 +509,8 @@ def prerelease_deps(session): "--verbose", f"--junitxml=system_{session.python}_sponge_log.xml", system_test_folder_path, - env=env, *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 3b8149e65669..9fcf0e1583e9 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -137,33 +137,30 @@ def system(session, disable_grpc): if disable_grpc: env["GOOGLE_CLOUD_DISABLE_GRPC"] = "True" -# Run py.test against the system tests. + # Run py.test against the system tests. """, ) assert 1 == s.replace( "noxfile.py", - """\ - # Only run system tests if found. - """, - """\ - env = {} - # Only run system tests if found. - """, -) - -assert 2 == s.replace( - "noxfile.py", - """system_test_path,\n""", """system_test_path, - env=env,\n""", + \*session.posargs, + \)""", + """system_test_path, + env=env, + *session.posargs, + )""", ) -assert 2 == s.replace( +assert 1 == s.replace( "noxfile.py", - """system_test_folder_path,\n""", """system_test_folder_path, - env=env,\n""", + \*session.posargs, + \)""", + """system_test_folder_path, + env=env, + *session.posargs, + )""", ) # Add nox session to exercise doctests diff --git a/packages/google-cloud-datastore/scripts/decrypt-secrets.sh b/packages/google-cloud-datastore/scripts/decrypt-secrets.sh index 0018b421ddf8..120b0ddc4364 100755 --- a/packages/google-cloud-datastore/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-datastore/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2023 Google LLC All rights reserved. +# Copyright 2024 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/scripts/readme-gen/readme_gen.py b/packages/google-cloud-datastore/scripts/readme-gen/readme_gen.py index 1acc119835b5..8f5e248a0da1 100644 --- a/packages/google-cloud-datastore/scripts/readme-gen/readme_gen.py +++ b/packages/google-cloud-datastore/scripts/readme-gen/readme_gen.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 39a6f89a01f2b52cff696ab867254a7156300332 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Tue, 30 Jul 2024 13:08:14 -0400 Subject: [PATCH 578/611] fix: Using end_cursor instead of skipped_cursor in Iterator to fix rare bug. (#552) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Using end_cursor instead of skipped_cursor in Iterator to fix rare bug. * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Daniel Sanche --- .../google/cloud/datastore/query.py | 2 +- .../tests/unit/test_query.py | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index 72d6fe51b0d3..c6d2785541af 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -825,7 +825,7 @@ def _next_page(self): old_query_pb = query_pb query_pb = query_pb2.Query() query_pb._pb.CopyFrom(old_query_pb._pb) # copy for testability - query_pb.start_cursor = response_pb.batch.skipped_cursor + query_pb.start_cursor = response_pb.batch.end_cursor query_pb.offset -= response_pb.batch.skipped_results request = { diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index 6c2063bbe244..fa7d63dce8e4 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -1019,7 +1019,8 @@ def test_iterator__next_page_no_more(database_id): @pytest.mark.parametrize("database_id", [None, "somedb"]) -def test_iterator__next_page_w_skipped_lt_offset(database_id): +@pytest.mark.parametrize("skipped_cursor_1", [b"DEADBEEF", b""]) +def test_iterator__next_page_w_skipped_lt_offset(skipped_cursor_1, database_id): from google.api_core import page_iterator from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 @@ -1028,16 +1029,17 @@ def test_iterator__next_page_w_skipped_lt_offset(database_id): project = "prujekt" skipped_1 = 100 - skipped_cursor_1 = b"DEADBEEF" + end_cursor_1 = b"DEADBEEF" skipped_2 = 50 - skipped_cursor_2 = b"FACEDACE" + end_cursor_2 = b"FACEDACE" more_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED result_1 = _make_query_response([], b"", more_enum, skipped_1) result_1.batch.skipped_cursor = skipped_cursor_1 + result_1.batch.end_cursor = end_cursor_1 result_2 = _make_query_response([], b"", more_enum, skipped_2) - result_2.batch.skipped_cursor = skipped_cursor_2 + result_2.batch.end_cursor = end_cursor_2 ds_api = _make_datastore_api(result_1, result_2) client = _Client(project, datastore_api=ds_api, database=database_id) @@ -1055,9 +1057,7 @@ def test_iterator__next_page_w_skipped_lt_offset(database_id): read_options = datastore_pb2.ReadOptions() query_1 = query_pb2.Query(offset=offset) - query_2 = query_pb2.Query( - start_cursor=skipped_cursor_1, offset=(offset - skipped_1) - ) + query_2 = query_pb2.Query(start_cursor=end_cursor_1, offset=(offset - skipped_1)) expected_calls = [] for query in [query_1, query_2]: expected_request = { From e515e493e999796f25566b0e7780b3657b9ec7a6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 5 Aug 2024 11:12:48 -0400 Subject: [PATCH 579/611] fix: Retry and timeout values do not propagate in requests during pagination (#555) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.18.4 PiperOrigin-RevId: 657207628 Source-Link: https://github.com/googleapis/googleapis/commit/33fe71e5a2061402283e0455636a98e5b78eaf7f Source-Link: https://github.com/googleapis/googleapis-gen/commit/e02739d122ed15bd5ef5771c57f12a83d47a1dda Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTAyNzM5ZDEyMmVkMTViZDVlZjU3NzFjNTdmMTJhODNkNDdhMWRkYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> --- .../services/datastore_admin/async_client.py | 2 + .../services/datastore_admin/client.py | 2 + .../services/datastore_admin/pagers.py | 41 ++++++++++++++++++- .../test_datastore_admin.py | 7 +++- .../unit/gapic/datastore_v1/test_datastore.py | 1 + 5 files changed, 50 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 4b7b0c8d854b..80fb6bbf9e3a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -1093,6 +1093,8 @@ async def sample_list_indexes(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index e6f35ba3f0ce..ce21fdbd0faa 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -1476,6 +1476,8 @@ def sample_list_indexes(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py index eb4bd0dcff0a..dc61026b6192 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, @@ -22,8 +25,18 @@ Tuple, Optional, Iterator, + Union, ) +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index @@ -52,6 +65,8 @@ def __init__( request: datastore_admin.ListIndexesRequest, response: datastore_admin.ListIndexesResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -63,12 +78,17 @@ def __init__( The initial request object. response (google.cloud.datastore_admin_v1.types.ListIndexesResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = datastore_admin.ListIndexesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -79,7 +99,12 @@ def pages(self) -> Iterator[datastore_admin.ListIndexesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[index.Index]: @@ -114,6 +139,8 @@ def __init__( request: datastore_admin.ListIndexesRequest, response: datastore_admin.ListIndexesResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -125,12 +152,17 @@ def __init__( The initial request object. response (google.cloud.datastore_admin_v1.types.ListIndexesResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = datastore_admin.ListIndexesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -141,7 +173,12 @@ async def pages(self) -> AsyncIterator[datastore_admin.ListIndexesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[index.Index]: diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index 8e65052ceed6..859224252e86 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -47,6 +47,7 @@ from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.datastore_admin_v1.services.datastore_admin import ( @@ -3119,12 +3120,16 @@ def test_list_indexes_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("project_id", ""),)), ) - pager = client.list_indexes(request={}) + pager = client.list_indexes(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 73f3d837a3f4..65289ad3ba9f 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -43,6 +43,7 @@ from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.datastore_v1.services.datastore import DatastoreAsyncClient From bb0acaf10126dafa35a4b8047da6fe284b1b5541 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 7 Aug 2024 10:09:18 -0400 Subject: [PATCH 580/611] test: Added system test for query offset issue (#557) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * test: Added system test for query offset issue * linting * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fixed test * Removed testing the default database --------- Co-authored-by: Owl Bot --- .../tests/system/test_query.py | 34 +++++++ .../tests/system/utils/clear_datastore.py | 10 +- .../tests/system/utils/populate_datastore.py | 93 ++++++++++++++++++- 3 files changed, 132 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-datastore/tests/system/test_query.py b/packages/google-cloud-datastore/tests/system/test_query.py index 9f902205e9db..b9574789a8f2 100644 --- a/packages/google-cloud-datastore/tests/system/test_query.py +++ b/packages/google-cloud-datastore/tests/system/test_query.py @@ -337,6 +337,17 @@ def large_query_client(datastore_client): return large_query_client +@pytest.fixture(scope="session") +def mergejoin_query_client(datastore_client): + mergejoin_query_client = _helpers.clone_client( + datastore_client, + namespace=populate_datastore.MERGEJOIN_DATASET_NAMESPACE, + ) + populate_datastore.add_mergejoin_dataset_entities(client=mergejoin_query_client) + + return mergejoin_query_client + + @pytest.fixture(scope="function") def large_query(large_query_client): # Use the client for this test instead of the global. @@ -346,6 +357,15 @@ def large_query(large_query_client): ) +@pytest.fixture(scope="function") +def mergejoin_query(mergejoin_query_client): + # Use the client for this test instead of the global. + return mergejoin_query_client.query( + kind=populate_datastore.MERGEJOIN_DATASET_KIND, + namespace=populate_datastore.MERGEJOIN_DATASET_NAMESPACE, + ) + + @pytest.mark.parametrize( "limit,offset,expected", [ @@ -385,6 +405,20 @@ def test_large_query(large_query, limit, offset, expected, database_id): assert len(entities) == expected +@pytest.mark.parametrize("database_id", [_helpers.TEST_DATABASE], indirect=True) +def test_mergejoin_query(mergejoin_query, database_id): + query = mergejoin_query + query.add_filter(filter=PropertyFilter("a", "=", 1)) + query.add_filter(filter=PropertyFilter("b", "=", 1)) + + # There should be 2 * MERGEJOIN_QUERY_NUM_RESULTS results total + expected_total = 2 * populate_datastore.MERGEJOIN_QUERY_NUM_RESULTS + for offset in range(0, expected_total + 1): + iterator = query.fetch(offset=offset) + num_entities = len([e for e in iterator]) + assert num_entities == expected_total - offset + + @pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) def test_query_add_property_filter(ancestor_query, database_id): query = ancestor_query diff --git a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py index 2082bce76363..05a63b319500 100644 --- a/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/clear_datastore.py @@ -31,6 +31,8 @@ "Post", "uuid_key", "timestamp_key", + "LargeCharacter", + "Mergejoin", ) TRANSACTION_MAX_GROUPS = 5 MAX_DEL_ENTITIES = 500 @@ -90,12 +92,10 @@ def remove_all_entities(client): def run(database): - client = datastore.Client(database=database) kinds = sys.argv[1:] if len(kinds) == 0: kinds = ALL_KINDS - print_func( "This command will remove all entities from the database " + database @@ -105,8 +105,10 @@ def run(database): response = input("Is this OK [y/n]? ") if response.lower() == "y": - for kind in kinds: - remove_kind(kind, client) + for namespace in ["", "LargeCharacterEntity", "MergejoinNamespace"]: + client = datastore.Client(database=database, namespace=namespace) + for kind in kinds: + remove_kind(kind, client) else: print_func("Doing nothing.") diff --git a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py index 9077241f76f5..0eea15fb0eac 100644 --- a/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py +++ b/packages/google-cloud-datastore/tests/system/utils/populate_datastore.py @@ -58,6 +58,11 @@ LARGE_CHARACTER_NAMESPACE = "LargeCharacterEntity" LARGE_CHARACTER_KIND = "LargeCharacter" +MERGEJOIN_QUERY_NUM_RESULTS = 7 +MERGEJOIN_DATASET_INTERMEDIATE_OBJECTS = 20000 +MERGEJOIN_DATASET_NAMESPACE = "MergejoinNamespace" +MERGEJOIN_DATASET_KIND = "Mergejoin" + def get_system_test_db(): return os.getenv("SYSTEM_TESTS_DATABASE") or "system-tests-named-db" @@ -179,12 +184,92 @@ def add_timestamp_keys(client=None): batch.put(entity) +def add_mergejoin_dataset_entities(client=None): + """ + Dataset to account for one bug that was seen in https://github.com/googleapis/python-datastore/issues/547 + The root cause of this is us setting a subsequent query's start_cursor to skipped_cursor instead of end_cursor. + In niche scenarios involving mergejoins, skipped_cursor becomes empty and the query starts back from the beginning, + returning duplicate items. + + This bug is able to be reproduced with a dataset shown in b/352377540, with 7 items of a=1, b=1 + followed by 20k items of alternating a=1, b=0 and a=0, b=1, then 7 more a=1, b=1, then querying for all + items with a=1, b=1 and an offset of 8. + """ + client.namespace = MERGEJOIN_DATASET_NAMESPACE + + # Query used for all tests + page_query = client.query( + kind=MERGEJOIN_DATASET_KIND, namespace=MERGEJOIN_DATASET_NAMESPACE + ) + + def create_entity(id, a, b): + key = client.key(MERGEJOIN_DATASET_KIND, id) + entity = datastore.Entity(key=key) + entity["a"] = a + entity["b"] = b + return entity + + def put_objects(count): + id = 1 + curr_intermediate_entries = 0 + + # Can only do 500 operations in a transaction with an overall + # size limit. + ENTITIES_TO_BATCH = 500 + + with client.transaction() as xact: + for _ in range(0, MERGEJOIN_QUERY_NUM_RESULTS): + entity = create_entity(id, 1, 1) + id += 1 + xact.put(entity) + + while curr_intermediate_entries < count - MERGEJOIN_QUERY_NUM_RESULTS: + start = curr_intermediate_entries + end = min(curr_intermediate_entries + ENTITIES_TO_BATCH, count) + with client.transaction() as xact: + # The name/ID for the new entity + for i in range(start, end): + if id % 2: + entity = create_entity(id, 0, 1) + else: + entity = create_entity(id, 1, 0) + id += 1 + + # Saves the entity + xact.put(entity) + curr_intermediate_entries += ENTITIES_TO_BATCH + + with client.transaction() as xact: + for _ in range(0, MERGEJOIN_QUERY_NUM_RESULTS): + entity = create_entity(id, 1, 1) + id += 1 + xact.put(entity) + + # If anything exists in this namespace, delete it, since we need to + # set up something very specific. + all_entities = [e for e in page_query.fetch()] + if len(all_entities) > 0: + # Cleanup Collection if not an exact match + while all_entities: + entities = all_entities[:500] + all_entities = all_entities[500:] + client.delete_multi([e.key for e in entities]) + # Put objects + put_objects(MERGEJOIN_DATASET_INTERMEDIATE_OBJECTS) + + def run(database): client = datastore.Client(database=database) flags = sys.argv[1:] if len(flags) == 0: - flags = ["--characters", "--uuid", "--timestamps"] + flags = [ + "--characters", + "--uuid", + "--timestamps", + "--large-characters", + "--mergejoin", + ] if "--characters" in flags: add_characters(client) @@ -195,6 +280,12 @@ def run(database): if "--timestamps" in flags: add_timestamp_keys(client) + if "--large-characters" in flags: + add_large_character_entities(client) + + if "--mergejoin" in flags: + add_mergejoin_dataset_entities(client) + def main(): for database in ["", get_system_test_db()]: From 9ffb9a25436fecf29050b5cd85d682f5922471cb Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 7 Aug 2024 14:57:47 -0600 Subject: [PATCH 581/611] feat: implement query profiling (#542) --- .../google/cloud/datastore/__init__.py | 12 +- .../google/cloud/datastore/aggregation.py | 99 ++++++--- .../google/cloud/datastore/client.py | 4 +- .../google/cloud/datastore/query.py | 93 ++++++--- .../google/cloud/datastore/query_profile.py | 143 +++++++++++++ .../samples/snippets/snippets.py | 110 +++++++++- .../samples/snippets/snippets_test.py | 72 +++++-- .../tests/system/test_aggregation_query.py | 141 +++++++++++++ .../tests/system/test_query.py | 124 ++++++++++- .../tests/unit/test_aggregation.py | 193 +++++++++++++++++- .../tests/unit/test_query.py | 153 ++++++++++++++ .../tests/unit/test_query_profile.py | 126 ++++++++++++ 12 files changed, 1170 insertions(+), 100 deletions(-) create mode 100644 packages/google-cloud-datastore/google/cloud/datastore/query_profile.py create mode 100644 packages/google-cloud-datastore/tests/unit/test_query_profile.py diff --git a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py index b2b4c1724b31..d6fa310bea34 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/__init__.py @@ -61,6 +61,16 @@ from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key from google.cloud.datastore.query import Query +from google.cloud.datastore.query_profile import ExplainOptions from google.cloud.datastore.transaction import Transaction -__all__ = ["__version__", "Batch", "Client", "Entity", "Key", "Query", "Transaction"] +__all__ = [ + "__version__", + "Batch", + "Client", + "Entity", + "Key", + "Query", + "ExplainOptions", + "Transaction", +] diff --git a/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py b/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py index 1384f33206b9..dcb812365dd1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/aggregation.py @@ -23,15 +23,11 @@ from google.cloud.datastore import helpers from google.cloud.datastore.query import _pb_from_query +from google.cloud.datastore.query_profile import ExplainMetrics +from google.cloud.datastore.query_profile import QueryExplainError -_NOT_FINISHED = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED -_NO_MORE_RESULTS = query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS - -_FINISHED = ( - _NO_MORE_RESULTS, - query_pb2.QueryResultBatch.MoreResultsType.MORE_RESULTS_AFTER_LIMIT, - query_pb2.QueryResultBatch.MoreResultsType.MORE_RESULTS_AFTER_CURSOR, -) +from google.cloud.datastore.query import _NOT_FINISHED +from google.cloud.datastore.query import _FINISHED class BaseAggregation(ABC): @@ -159,16 +155,25 @@ class AggregationQuery(object): :type query: :class:`google.cloud.datastore.query.Query` :param query: The query used for aggregations. + + :type explain_options: :class:`~google.cloud.datastore.ExplainOptions` + :param explain_options: (Optional) Options to enable query profiling for + this query. When set, explain_metrics will be available on the iterator + returned by query.fetch(). + If not passed, will use value from given query. """ def __init__( self, client, query, + explain_options=None, ): self._client = client self._nested_query = query self._aggregations = [] + # fallback to query._explain_options if not set + self._explain_options = explain_options or query._explain_options @property def project(self): @@ -391,6 +396,7 @@ def __init__( self._read_time = read_time self._limit = limit # The attributes below will change over the life of the iterator. + self._explain_metrics = None self._more_results = True def _build_protobuf(self): @@ -441,7 +447,6 @@ def _next_page(self): if not self._more_results: return None - query_pb = self._build_protobuf() transaction_id, new_transaction_options = helpers.get_transaction_options( self.client.current_transaction ) @@ -466,38 +471,68 @@ def _next_page(self): "project_id": self._aggregation_query.project, "partition_id": partition_id, "read_options": read_options, - "aggregation_query": query_pb, + "aggregation_query": self._build_protobuf(), } + if self._aggregation_query._explain_options: + request[ + "explain_options" + ] = self._aggregation_query._explain_options._to_dict() helpers.set_database_id_to_request(request, self.client.database) - response_pb = self.client._datastore_api.run_aggregation_query( - request=request, - **kwargs, - ) - while response_pb.batch.more_results == _NOT_FINISHED: - # We haven't finished processing. A likely reason is we haven't - # skipped all of the results yet. Don't return any results. - # Instead, rerun query, adjusting offsets. Datastore doesn't process - # more than 1000 skipped results in a query. - old_query_pb = query_pb - query_pb = query_pb2.AggregationQuery() - query_pb._pb.CopyFrom(old_query_pb._pb) # copy for testability - - request = { - "project_id": self._aggregation_query.project, - "partition_id": partition_id, - "read_options": read_options, - "aggregation_query": query_pb, - } - helpers.set_database_id_to_request(request, self.client.database) + response_pb = None + + while response_pb is None or response_pb.batch.more_results == _NOT_FINISHED: + if response_pb is not None: + # We haven't finished processing. A likely reason is we haven't + # skipped all of the results yet. Don't return any results. + # Instead, rerun query, adjusting offsets. Datastore doesn't process + # more than 1000 skipped results in a query. + new_query_pb = query_pb2.AggregationQuery() + new_query_pb._pb.CopyFrom( + request["aggregation_query"]._pb + ) # copy for testability + request["aggregation_query"] = new_query_pb + response_pb = self.client._datastore_api.run_aggregation_query( - request=request, - **kwargs, + request=request.copy(), **kwargs ) + # capture explain metrics if present in response + # should only be present in last response, and only if explain_options was set + if response_pb.explain_metrics: + self._explain_metrics = ExplainMetrics._from_pb( + response_pb.explain_metrics + ) item_pbs = self._process_query_results(response_pb) return page_iterator.Page(self, item_pbs, self.item_to_value) + @property + def explain_metrics(self) -> ExplainMetrics: + """ + Get the metrics associated with the query execution. + Metrics are only available when explain_options is set on the query. If + ExplainOptions.analyze is False, only plan_summary is available. If it is + True, execution_stats is also available. + + :rtype: :class:`~google.cloud.datastore.query_profile.ExplainMetrics` + :returns: The metrics associated with the query execution. + :raises: :class:`~google.cloud.datastore.query_profile.QueryExplainError` + if explain_metrics is not available on the query. + """ + if self._explain_metrics is not None: + return self._explain_metrics + elif self._aggregation_query._explain_options is None: + raise QueryExplainError("explain_options not set on query.") + elif self._aggregation_query._explain_options.analyze is False: + # we need to run the query to get the explain_metrics + # analyze=False only returns explain_metrics, no results + self._next_page() + if self._explain_metrics is not None: + return self._explain_metrics + raise QueryExplainError( + "explain_metrics not available until query is complete." + ) + # pylint: disable=unused-argument def _item_to_aggregation_result(iterator, pb): diff --git a/packages/google-cloud-datastore/google/cloud/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore/client.py index b1e79d91a818..ca3d4e0c1177 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/client.py @@ -875,7 +875,7 @@ def do_something_with(entity): kwargs["namespace"] = self.namespace return Query(self, **kwargs) - def aggregation_query(self, query): + def aggregation_query(self, query, **kwargs): """Proxy to :class:`google.cloud.datastore.aggregation.AggregationQuery`. Using aggregation_query to count over a query: @@ -953,7 +953,7 @@ def do_something_with(entity): :rtype: :class:`~google.cloud.datastore.aggregation.AggregationQuery` :returns: An AggregationQuery object. """ - return AggregationQuery(self, query) + return AggregationQuery(self, query, **kwargs) def reserve_ids_sequential(self, complete_key, num_ids, retry=None, timeout=None): """Reserve a list of IDs sequentially from a complete key. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query.py b/packages/google-cloud-datastore/google/cloud/datastore/query.py index c6d2785541af..5ff273663bc2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/query.py @@ -13,20 +13,21 @@ # limitations under the License. """Create / interact with Google Cloud Datastore queries.""" - import base64 import warnings - from google.api_core import page_iterator from google.cloud._helpers import _ensure_tuple_or_list - from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore_v1.types import query as query_pb2 from google.cloud.datastore import helpers from google.cloud.datastore.key import Key + +from google.cloud.datastore.query_profile import ExplainMetrics +from google.cloud.datastore.query_profile import QueryExplainError + import abc from abc import ABC @@ -38,6 +39,7 @@ _NO_MORE_RESULTS, query_pb2.QueryResultBatch.MoreResultsType.MORE_RESULTS_AFTER_LIMIT, query_pb2.QueryResultBatch.MoreResultsType.MORE_RESULTS_AFTER_CURSOR, + query_pb2.QueryResultBatch.MoreResultsType.MORE_RESULTS_TYPE_UNSPECIFIED, # received when explain_options(analyze=False) ) KEY_PROPERTY_NAME = "__key__" @@ -176,6 +178,11 @@ class Query(object): :type distinct_on: sequence of string :param distinct_on: field names used to group query results. + :type explain_options: :class:`~google.cloud.datastore.ExplainOptions` + :param explain_options: (Optional) Options to enable query profiling for + this query. When set, explain_metrics will be available on the iterator + returned by query.fetch(). + :raises: ValueError if ``project`` is not passed and no implicit default is set. """ @@ -203,6 +210,7 @@ def __init__( projection=(), order=(), distinct_on=(), + explain_options=None, ): self._client = client self._kind = kind @@ -221,6 +229,7 @@ def __init__( else: self._namespace = None + self._explain_options = explain_options self._ancestor = ancestor self._filters = [] @@ -704,6 +713,7 @@ def __init__( self._timeout = timeout self._read_time = read_time # The attributes below will change over the life of the iterator. + self._explain_metrics = None self._more_results = True self._skipped_results = 0 @@ -777,7 +787,6 @@ def _next_page(self): if not self._more_results: return None - query_pb = self._build_protobuf() new_transaction_options = None transaction_id, new_transaction_options = helpers.get_transaction_options( self.client.current_transaction @@ -804,46 +813,70 @@ def _next_page(self): "project_id": self._query.project, "partition_id": partition_id, "read_options": read_options, - "query": query_pb, + "query": self._build_protobuf(), } + if self._query._explain_options: + request["explain_options"] = self._query._explain_options._to_dict() helpers.set_database_id_to_request(request, self.client.database) - response_pb = self.client._datastore_api.run_query( - request=request, - **kwargs, - ) + response_pb = None - while ( + while response_pb is None or ( response_pb.batch.more_results == _NOT_FINISHED - and response_pb.batch.skipped_results < query_pb.offset + and response_pb.batch.skipped_results < request["query"].offset ): - # We haven't finished processing. A likely reason is we haven't - # skipped all of the results yet. Don't return any results. - # Instead, rerun query, adjusting offsets. Datastore doesn't process - # more than 1000 skipped results in a query. - old_query_pb = query_pb - query_pb = query_pb2.Query() - query_pb._pb.CopyFrom(old_query_pb._pb) # copy for testability - query_pb.start_cursor = response_pb.batch.end_cursor - query_pb.offset -= response_pb.batch.skipped_results - - request = { - "project_id": self._query.project, - "partition_id": partition_id, - "read_options": read_options, - "query": query_pb, - } - helpers.set_database_id_to_request(request, self.client.database) + if response_pb is not None: + # We haven't finished processing. A likely reason is we haven't + # skipped all of the results yet. Don't return any results. + # Instead, rerun query, adjusting offsets. Datastore doesn't process + # more than 1000 skipped results in a query. + new_query_pb = query_pb2.Query() + new_query_pb._pb.CopyFrom(request["query"]._pb) # copy for testability + new_query_pb.start_cursor = response_pb.batch.end_cursor + new_query_pb.offset -= response_pb.batch.skipped_results + request["query"] = new_query_pb response_pb = self.client._datastore_api.run_query( - request=request, - **kwargs, + request=request.copy(), **kwargs ) + # capture explain metrics if present in response + # should only be present in last response, and only if explain_options was set + if response_pb and response_pb.explain_metrics: + self._explain_metrics = ExplainMetrics._from_pb( + response_pb.explain_metrics + ) entity_pbs = self._process_query_results(response_pb) return page_iterator.Page(self, entity_pbs, self.item_to_value) + @property + def explain_metrics(self) -> ExplainMetrics: + """ + Get the metrics associated with the query execution. + Metrics are only available when explain_options is set on the query. If + ExplainOptions.analyze is False, only plan_summary is available. If it is + True, execution_stats is also available. + + :rtype: :class:`~google.cloud.datastore.query_profile.ExplainMetrics` + :returns: The metrics associated with the query execution. + :raises: :class:`~google.cloud.datastore.query_profile.QueryExplainError` + if explain_metrics is not available on the query. + """ + if self._explain_metrics is not None: + return self._explain_metrics + elif self._query._explain_options is None: + raise QueryExplainError("explain_options not set on query.") + elif self._query._explain_options.analyze is False: + # we need to run the query to get the explain_metrics + # analyze=False only returns explain_metrics, no results + self._next_page() + if self._explain_metrics is not None: + return self._explain_metrics + raise QueryExplainError( + "explain_metrics not available until query is complete." + ) + def _pb_from_query(query): """Convert a Query instance to the corresponding protobuf. diff --git a/packages/google-cloud-datastore/google/cloud/datastore/query_profile.py b/packages/google-cloud-datastore/google/cloud/datastore/query_profile.py new file mode 100644 index 000000000000..35c31cb3dada --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore/query_profile.py @@ -0,0 +1,143 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from typing import Any + +import datetime + +from dataclasses import dataclass +from google.protobuf.json_format import MessageToDict + + +@dataclass(frozen=True) +class ExplainOptions: + """ + Class used to configure query profiling on a query. + Set on a query object using the explain_options attribute at query construction time. + + :type analyze: bool + :param analyze: Whether to execute this query. When false (the default), + the query will be planned, returning only metrics from the planning stages. + When true, the query will be planned and executed, returning the full + query results along with both planning and execution stage metrics. + """ + + analyze: bool = False + + def _to_dict(self): + return {"analyze": self.analyze} + + +@dataclass(frozen=True) +class PlanSummary: + """ + Contains planning phase information about a query. + + :type indexes_used: list[dict[str, Any]] + :param indexes_used: The indexes selected for this query. + """ + + indexes_used: list[dict[str, Any]] + + +@dataclass(frozen=True) +class ExecutionStats: + """ + Execution phase information about a query. + + Only available when explain_options.analyze is True. + + :type results_returned: int + :param results_returned: Total number of results returned, including + documents, projections, aggregation results, keys. + :type execution_duration: datetime.timedelta + :param execution_duration: Total time to execute the query in the backend. + :type read_operations: int + :param read_operations: Total billable read operations. + :type debug_stats: dict[str, Any] + :param debug_stats: Debugging statistics from the execution of the query. + Note that the debugging stats are subject to change as Firestore evolves + """ + + results_returned: int + execution_duration: datetime.timedelta + read_operations: int + debug_stats: dict[str, Any] + + +@dataclass(frozen=True) +class ExplainMetrics: + """ + ExplainMetrics contains information about the planning and execution of a query. + + When explain_options.analyze is false, only plan_summary is available. + When explain_options.analyze is true, execution_stats is also available. + + :type plan_summary: PlanSummary + :param plan_summary: Planning phase information about the query. + :type execution_stats: ExecutionStats + :param execution_stats: Execution phase information about the query. + """ + + plan_summary: PlanSummary + + @staticmethod + def _from_pb(metrics_pb): + dict_repr = MessageToDict(metrics_pb._pb, preserving_proto_field_name=True) + plan_summary = PlanSummary( + indexes_used=dict_repr.get("plan_summary", {}).get("indexes_used", []) + ) + if "execution_stats" in dict_repr: + stats_dict = dict_repr.get("execution_stats", {}) + execution_stats = ExecutionStats( + results_returned=int(stats_dict.get("results_returned", 0)), + execution_duration=metrics_pb.execution_stats.execution_duration, + read_operations=int(stats_dict.get("read_operations", 0)), + debug_stats=stats_dict.get("debug_stats", {}), + ) + return _ExplainAnalyzeMetrics( + plan_summary=plan_summary, _execution_stats=execution_stats + ) + else: + return ExplainMetrics(plan_summary=plan_summary) + + @property + def execution_stats(self) -> ExecutionStats: + raise QueryExplainError( + "execution_stats not available when explain_options.analyze=False." + ) + + +@dataclass(frozen=True) +class _ExplainAnalyzeMetrics(ExplainMetrics): + """ + Subclass of ExplainMetrics that includes execution_stats. + Only available when explain_options.analyze is True. + """ + + plan_summary: PlanSummary + _execution_stats: ExecutionStats + + @property + def execution_stats(self) -> ExecutionStats: + return self._execution_stats + + +class QueryExplainError(Exception): + """ + Error returned when there is a problem accessing query profiling information. + """ + + pass diff --git a/packages/google-cloud-datastore/samples/snippets/snippets.py b/packages/google-cloud-datastore/samples/snippets/snippets.py index 749a1ce4d74d..1b86ba8b0cdf 100644 --- a/packages/google-cloud-datastore/samples/snippets/snippets.py +++ b/packages/google-cloud-datastore/samples/snippets/snippets.py @@ -278,15 +278,16 @@ def sum_query_property_filter(client): # Execute sum aggregation query with filters completed_tasks = client.query(kind="Task").add_filter("done", "=", True) completed_tasks_query = client.aggregation_query(query=completed_tasks).sum( - property_ref="hours", - alias="total_completed_sum_hours" + property_ref="hours", alias="total_completed_sum_hours" ) completed_query_result = completed_tasks_query.fetch() for aggregation_results in completed_query_result: for aggregation_result in aggregation_results: if aggregation_result.alias == "total_completed_sum_hours": - print(f"Total sum of hours in completed tasks is {aggregation_result.value}") + print( + f"Total sum of hours in completed tasks is {aggregation_result.value}" + ) # [END datastore_sum_aggregation_query_with_filters] return tasks @@ -339,15 +340,16 @@ def avg_query_property_filter(client): # Execute average aggregation query with filters completed_tasks = client.query(kind="Task").add_filter("done", "=", True) completed_tasks_query = client.aggregation_query(query=completed_tasks).avg( - property_ref="hours", - alias="total_completed_avg_hours" + property_ref="hours", alias="total_completed_avg_hours" ) completed_query_result = completed_tasks_query.fetch() for aggregation_results in completed_query_result: for aggregation_result in aggregation_results: if aggregation_result.alias == "total_completed_avg_hours": - print(f"Total average of hours in completed tasks is {aggregation_result.value}") + print( + f"Total average of hours in completed tasks is {aggregation_result.value}" + ) # [END datastore_avg_aggregation_query_with_filters] return tasks @@ -375,9 +377,11 @@ def multiple_aggregations_query(client): [ datastore.aggregation.CountAggregation(alias="count_aggregation"), datastore.aggregation.SumAggregation( - property_ref="hours", alias="sum_aggregation"), + property_ref="hours", alias="sum_aggregation" + ), datastore.aggregation.AvgAggregation( - property_ref="hours", alias="avg_aggregation") + property_ref="hours", alias="avg_aggregation" + ), ] ) @@ -389,6 +393,96 @@ def multiple_aggregations_query(client): return tasks +def explain_analyze_entity(client): + # [START datastore_query_explain_analyze_entity] + # Build the query with explain_options + # analzye = true to get back the query stats, plan info, and query results + query = client.query( + kind="Task", explain_options=datastore.ExplainOptions(analyze=True) + ) + + # initiate the query + iterator = query.fetch() + + # explain_metrics is only available after query is completed + for task_result in iterator: + print(task_result) + + # get the plan summary + plan_summary = iterator.explain_metrics.plan_summary + print(f"Indexes used: {plan_summary.indexes_used}") + + # get the execution stats + execution_stats = iterator.explain_metrics.execution_stats + print(f"Results returned: {execution_stats.results_returned}") + print(f"Execution duration: {execution_stats.execution_duration}") + print(f"Read operations: {execution_stats.read_operations}") + print(f"Debug stats: {execution_stats.debug_stats}") + # [END datastore_query_explain_analyze_entity] + + +def explain_entity(client): + # [START datastore_query_explain_entity] + # Build the query with explain_options + # by default (analyze = false), only plan_summary property is available + query = client.query(kind="Task", explain_options=datastore.ExplainOptions()) + + # initiate the query + iterator = query.fetch() + + # get the plan summary + plan_summary = iterator.explain_metrics.plan_summary + print(f"Indexes used: {plan_summary.indexes_used}") + # [END datastore_query_explain_entity] + + +def explain_analyze_aggregation(client): + # [START datastore_query_explain_analyze_aggregation] + # Build the aggregation query with explain_options + # analzye = true to get back the query stats, plan info, and query results + all_tasks_query = client.query(kind="Task") + count_query = client.aggregation_query( + all_tasks_query, explain_options=datastore.ExplainOptions(analyze=True) + ).count() + + # initiate the query + iterator = count_query.fetch() + + # explain_metrics is only available after query is completed + for task_result in iterator: + print(task_result) + + # get the plan summary + plan_summary = iterator.explain_metrics.plan_summary + print(f"Indexes used: {plan_summary.indexes_used}") + + # get the execution stats + execution_stats = iterator.explain_metrics.execution_stats + print(f"Results returned: {execution_stats.results_returned}") + print(f"Execution duration: {execution_stats.execution_duration}") + print(f"Read operations: {execution_stats.read_operations}") + print(f"Debug stats: {execution_stats.debug_stats}") + # [END datastore_query_explain_analyze_aggregation] + + +def explain_aggregation(client): + # [START datastore_query_explain_aggregation] + # Build the aggregation query with explain_options + # by default (analyze = false), only plan_summary property is available + all_tasks_query = client.query(kind="Task") + count_query = client.aggregation_query( + all_tasks_query, explain_options=datastore.ExplainOptions() + ).count() + + # initiate the query + iterator = count_query.fetch() + + # get the plan summary + plan_summary = iterator.explain_metrics.plan_summary + print(f"Indexes used: {plan_summary.indexes_used}") + # [END datastore_query_explain_aggregation] + + def main(project_id): client = datastore.Client(project_id) diff --git a/packages/google-cloud-datastore/samples/snippets/snippets_test.py b/packages/google-cloud-datastore/samples/snippets/snippets_test.py index 92db05075d9c..ae3b2948b349 100644 --- a/packages/google-cloud-datastore/samples/snippets/snippets_test.py +++ b/packages/google-cloud-datastore/samples/snippets/snippets_test.py @@ -52,17 +52,15 @@ def setup_indexes(request): indexes = [] done_property_index = datastore_admin_v1.Index.IndexedProperty( - name='done', - direction=datastore_admin_v1.Index.Direction.ASCENDING + name="done", direction=datastore_admin_v1.Index.Direction.ASCENDING ) hour_property_index = datastore_admin_v1.Index.IndexedProperty( - name='hours', - direction=datastore_admin_v1.Index.Direction.ASCENDING + name="hours", direction=datastore_admin_v1.Index.Direction.ASCENDING ) done_hour_index = datastore_admin_v1.Index( - kind='Task', + kind="Task", ancestor=datastore_admin_v1.Index.AncestorMode.NONE, - properties=[done_property_index, hour_property_index] + properties=[done_property_index, hour_property_index], ) indexes.append(done_hour_index) @@ -157,9 +155,7 @@ def test_count_query_with_stale_read(self, capsys, client): def test_sum_query_on_kind(self, capsys, client): tasks = snippets.sum_query_on_kind(client) captured = capsys.readouterr() - assert ( - captured.out.strip() == "Total sum of hours in tasks is 9" - ) + assert captured.out.strip() == "Total sum of hours in tasks is 9" assert captured.err == "" client.entities_to_delete.extend(tasks) @@ -168,9 +164,7 @@ def test_sum_query_on_kind(self, capsys, client): def test_sum_query_property_filter(self, capsys, client): tasks = snippets.sum_query_property_filter(client) captured = capsys.readouterr() - assert ( - captured.out.strip() == "Total sum of hours in completed tasks is 8" - ) + assert captured.out.strip() == "Total sum of hours in completed tasks is 8" assert captured.err == "" client.entities_to_delete.extend(tasks) @@ -179,9 +173,7 @@ def test_sum_query_property_filter(self, capsys, client): def test_avg_query_on_kind(self, capsys, client): tasks = snippets.avg_query_on_kind(client) captured = capsys.readouterr() - assert ( - captured.out.strip() == "Total average of hours in tasks is 3.0" - ) + assert captured.out.strip() == "Total average of hours in tasks is 3.0" assert captured.err == "" client.entities_to_delete.extend(tasks) @@ -201,15 +193,57 @@ def test_avg_query_property_filter(self, capsys, client): def test_multiple_aggregations_query(self, capsys, client): tasks = snippets.multiple_aggregations_query(client) captured = capsys.readouterr() + assert "avg_aggregation value is 3.0" in captured.out + assert "count_aggregation value is 3" in captured.out + assert "sum_aggregation value is 9" in captured.out + assert captured.err == "" + + client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_explain_analyze_entity(self, capsys, client): + snippets.explain_analyze_entity(client) + captured = capsys.readouterr() assert ( - 'avg_aggregation value is 3.0' in captured.out + "Indexes used: [{'properties': '(__name__ ASC)', 'query_scope': 'Collection group'}]" + in captured.out ) + assert "Results returned: 0" in captured.out + assert "Execution duration: 0:00" in captured.out + assert "Read operations: 0" in captured.out + assert "Debug stats: {" in captured.out + assert captured.err == "" + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_explain_entity(self, capsys, client): + snippets.explain_entity(client) + captured = capsys.readouterr() assert ( - 'count_aggregation value is 3' in captured.out + "Indexes used: [{'properties': '(__name__ ASC)', 'query_scope': 'Collection group'}]" + in captured.out ) + assert captured.err == "" + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_explain_analyze_aggregation(self, capsys, client): + snippets.explain_analyze_aggregation(client) + captured = capsys.readouterr() assert ( - 'sum_aggregation value is 9' in captured.out + "Indexes used: [{'properties': '(__name__ ASC)', 'query_scope': 'Collection group'}]" + in captured.out ) + assert "Results returned: 1" in captured.out + assert "Execution duration: 0:00" in captured.out + assert "Read operations: 1" in captured.out + assert "Debug stats: {" in captured.out assert captured.err == "" - client.entities_to_delete.extend(tasks) + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_explain_aggregation(self, capsys, client): + snippets.explain_aggregation(client) + captured = capsys.readouterr() + assert ( + "Indexes used: [{'properties': '(__name__ ASC)', 'query_scope': 'Collection group'}]" + in captured.out + ) + assert captured.err == "" diff --git a/packages/google-cloud-datastore/tests/system/test_aggregation_query.py b/packages/google-cloud-datastore/tests/system/test_aggregation_query.py index ae175d808b02..14588fde24e2 100644 --- a/packages/google-cloud-datastore/tests/system/test_aggregation_query.py +++ b/packages/google-cloud-datastore/tests/system/test_aggregation_query.py @@ -530,3 +530,144 @@ def test_aggregation_query_with_nested_query_multiple_filters( ) assert result_dict["sum_appearances"].value == expected_sum assert result_dict["avg_appearances"].value == expected_sum / expected_matches + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_aggregation_query_no_explain( + aggregation_query_client, nested_query, database_id +): + """ + When explain_options is not set, iterator.explain_metrics should raise an exception + """ + from google.cloud.datastore.query_profile import QueryExplainError + + expected_error = "explain_options not set on query" + + agg_query = aggregation_query_client.aggregation_query( + nested_query, explain_options=None + ) + agg_query.count() + agg_query.sum("appearances") + agg_query.avg("appearances") + iterator = agg_query.fetch() + with pytest.raises(QueryExplainError) as excinfo: + iterator.explain_metrics + assert expected_error in str(excinfo.value) + # exhaust the iterator and try again + list(iterator) + with pytest.raises(QueryExplainError) as excinfo: + iterator.explain_metrics + assert expected_error in str(excinfo.value) + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_aggregation_query_explain(aggregation_query_client, nested_query, database_id): + """ + When explain_options(analyze=False) is set, iterator should contain explain_metrics field + with plan_summary but no execution_stats + """ + from google.cloud.datastore.query_profile import QueryExplainError + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore.query_profile import PlanSummary + + agg_query = aggregation_query_client.aggregation_query( + nested_query, explain_options=ExplainOptions(analyze=False) + ) + agg_query.count() + agg_query.sum("appearances") + agg_query.avg("appearances") + iterator = agg_query.fetch() + # should have plan_summary but no execution_stats + stats = iterator.explain_metrics + assert isinstance(stats, ExplainMetrics) + assert isinstance(stats.plan_summary, PlanSummary) + assert len(stats.plan_summary.indexes_used) > 0 + # execution_stats should not be present + with pytest.raises(QueryExplainError) as excinfo: + stats.execution_stats + assert "execution_stats not available" in str(excinfo.value) + # should have no results + assert len(list(iterator)) == 0 + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_aggregation_query_explain_analyze( + aggregation_query_client, nested_query, database_id +): + """ + When explain_options(analyze=True) is set, iterator should contain explain_metrics field + with plan_summary and execution_stats + + Should not be present until iterator is exhausted + """ + from google.cloud.datastore.query_profile import QueryExplainError + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore.query_profile import ExecutionStats + from google.cloud.datastore.query_profile import PlanSummary + + expected_error = "explain_metrics not available until query is complete." + agg_query = aggregation_query_client.aggregation_query( + nested_query, explain_options=ExplainOptions(analyze=True) + ) + agg_query.count() + agg_query.sum("appearances") + agg_query.avg("appearances") + iterator = agg_query.fetch() + # explain_metrics isn't present until iterator is exhausted + with pytest.raises(QueryExplainError) as excinfo: + iterator.explain_metrics + assert expected_error in str(excinfo.value) + # exhaust the iterator + results = list(iterator) + num_results = len(results) + assert num_results > 0 + stats = iterator.explain_metrics + assert isinstance(stats, ExplainMetrics) + # verify plan_summary + assert isinstance(stats.plan_summary, PlanSummary) + assert len(stats.plan_summary.indexes_used) > 0 + assert ( + stats.plan_summary.indexes_used[0]["properties"] + == "(appearances ASC, __name__ ASC)" + ) + assert stats.plan_summary.indexes_used[0]["query_scope"] == "Includes ancestors" + # verify execution_stats + assert isinstance(stats.execution_stats, ExecutionStats) + assert stats.execution_stats.results_returned == num_results + assert stats.execution_stats.read_operations == num_results + duration = stats.execution_stats.execution_duration.total_seconds() + assert duration > 0 + assert duration < 1 # we expect a number closer to 0.05 + assert isinstance(stats.execution_stats.debug_stats, dict) + assert "billing_details" in stats.execution_stats.debug_stats + assert "documents_scanned" in stats.execution_stats.debug_stats + assert "index_entries_scanned" in stats.execution_stats.debug_stats + assert len(stats.execution_stats.debug_stats) > 0 + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_aggregation_query_explain_in_transaction( + aggregation_query_client, nested_query, database_id +): + """ + When an aggregation query is run in a transaction, the transaction id should be sent with the request. + The result is the same as when it is run outside of a transaction. + """ + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore.query_profile import ExplainOptions + + with aggregation_query_client.transaction(): + agg_query = aggregation_query_client.aggregation_query( + nested_query, explain_options=ExplainOptions(analyze=True) + ) + agg_query.count() + agg_query.sum("appearances") + agg_query.avg("appearances") + iterator = agg_query.fetch() + # run full query + list(iterator) + # check for stats + stats = iterator.explain_metrics + assert isinstance(stats, ExplainMetrics) diff --git a/packages/google-cloud-datastore/tests/system/test_query.py b/packages/google-cloud-datastore/tests/system/test_query.py index b9574789a8f2..99dce2ec22fa 100644 --- a/packages/google-cloud-datastore/tests/system/test_query.py +++ b/packages/google-cloud-datastore/tests/system/test_query.py @@ -61,8 +61,8 @@ def ancestor_key(query_client, in_emulator): clear_datastore.remove_all_entities(client=query_client) -def _make_ancestor_query(query_client, ancestor_key): - return query_client.query(kind="Character", ancestor=ancestor_key) +def _make_ancestor_query(query_client, ancestor_key, **kwargs): + return query_client.query(kind="Character", ancestor=ancestor_key, **kwargs) @pytest.fixture(scope="function") @@ -527,3 +527,123 @@ def test_query_add_complex_filters(ancestor_query, database_id): assert alive_count == 4 assert appearance_count == 4 assert stark_family_count == 5 + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_no_explain(query_client, ancestor_key, database_id): + """ + When explain_options is not set, iterator.explain_metrics should raise an exception + """ + from google.cloud.datastore.query_profile import QueryExplainError + + expected_error = "explain_options not set on query" + query = _make_ancestor_query(query_client, ancestor_key, explain_options=None) + iterator = query.fetch() + with pytest.raises(QueryExplainError) as excinfo: + iterator.explain_metrics + assert expected_error in str(excinfo.value) + # exhaust the iterator and try again + list(iterator) + with pytest.raises(QueryExplainError) as excinfo: + iterator.explain_metrics + assert expected_error in str(excinfo.value) + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_explain(query_client, ancestor_key, database_id): + """ + When explain_options(analyze=False) is set, iterator should contain explain_metrics field + with plan_summary but no execution_stats + """ + from google.cloud.datastore.query_profile import QueryExplainError + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore.query_profile import PlanSummary + + query = _make_ancestor_query( + query_client, ancestor_key, explain_options=ExplainOptions(analyze=False) + ) + iterator = query.fetch() + # should have plan_summary but no execution_stats + stats = iterator.explain_metrics + assert isinstance(stats, ExplainMetrics) + assert isinstance(stats.plan_summary, PlanSummary) + assert len(stats.plan_summary.indexes_used) > 0 + assert stats.plan_summary.indexes_used[0]["properties"] == "(__name__ ASC)" + assert stats.plan_summary.indexes_used[0]["query_scope"] == "Collection group" + # execution_stats should not be present + with pytest.raises(QueryExplainError) as excinfo: + stats.execution_stats + expected_error = "execution_stats not available when explain_options.analyze=False." + assert expected_error in str(excinfo.value) + # should have no results + assert list(iterator) == [] + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_explain_analyze(query_client, ancestor_key, database_id): + """ + When explain_options(analyze=True) is set, iterator should contain explain_metrics field + with plan_summary and execution_stats + + Should not be present until iterator is exhausted + """ + from google.cloud.datastore.query_profile import QueryExplainError + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore.query_profile import ExecutionStats + from google.cloud.datastore.query_profile import PlanSummary + + expected_error = "explain_metrics not available until query is complete." + query = _make_ancestor_query( + query_client, ancestor_key, explain_options=ExplainOptions(analyze=True) + ) + iterator = query.fetch() + # explain_metrics isn't present until iterator is exhausted + with pytest.raises(QueryExplainError) as excinfo: + iterator.explain_metrics + assert expected_error in str(excinfo.value) + # exhaust the iterator + results = list(iterator) + num_results = len(results) + assert num_results > 0 + stats = iterator.explain_metrics + assert isinstance(stats, ExplainMetrics) + # verify plan_summary + assert isinstance(stats.plan_summary, PlanSummary) + assert len(stats.plan_summary.indexes_used) > 0 + assert stats.plan_summary.indexes_used[0]["properties"] == "(__name__ ASC)" + assert stats.plan_summary.indexes_used[0]["query_scope"] == "Collection group" + # verify execution_stats + assert isinstance(stats.execution_stats, ExecutionStats) + assert stats.execution_stats.results_returned == num_results + assert stats.execution_stats.read_operations == num_results + duration = stats.execution_stats.execution_duration.total_seconds() + assert duration > 0 + assert duration < 1 # we expect a number closer to 0.05 + assert isinstance(stats.execution_stats.debug_stats, dict) + assert "billing_details" in stats.execution_stats.debug_stats + assert "documents_scanned" in stats.execution_stats.debug_stats + assert "index_entries_scanned" in stats.execution_stats.debug_stats + assert len(stats.execution_stats.debug_stats) > 0 + + +@pytest.mark.parametrize("database_id", [None, _helpers.TEST_DATABASE], indirect=True) +def test_query_explain_in_transaction(query_client, ancestor_key, database_id): + """ + Should be able to access explain metrics when called in a transaction + """ + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore.query_profile import ExplainOptions + + query = _make_ancestor_query( + query_client, ancestor_key, explain_options=ExplainOptions(analyze=True) + ) + client = query._client + with client.transaction(): + # run full query + iterator = query.fetch() + list(iterator) + # check for stats + stats = iterator.explain_metrics + assert isinstance(stats, ExplainMetrics) diff --git a/packages/google-cloud-datastore/tests/unit/test_aggregation.py b/packages/google-cloud-datastore/tests/unit/test_aggregation.py index 8284b8086ccb..5a59522060b3 100644 --- a/packages/google-cloud-datastore/tests/unit/test_aggregation.py +++ b/packages/google-cloud-datastore/tests/unit/test_aggregation.py @@ -278,10 +278,25 @@ def test_query_fetch_w_explicit_client_w_limit(client, database_id): assert iterator._limit == limit +@pytest.mark.parametrize("database_id", [None, "somedb"], indirect=True) +def test_aggregation_uses_nested_query_explain_options(client, database_id): + """ + If explain_options is set on the nested query but not the aggregation, + use the nested query's explain_options. + """ + expected_explain_options = mock.Mock() + query = _make_query(client, explain_options=expected_explain_options) + aggregation_query = _make_aggregation_query( + client=client, query=query, explain_options=None + ) + assert aggregation_query._explain_options is expected_explain_options + + def test_iterator_constructor_defaults(): - query = object() + query = mock.Mock() client = object() aggregation_query = AggregationQuery(client=client, query=query) + assert aggregation_query._explain_options == query._explain_options iterator = _make_aggregation_iterator(aggregation_query, client) assert not iterator._started @@ -292,12 +307,17 @@ def test_iterator_constructor_defaults(): assert iterator._more_results assert iterator._retry is None assert iterator._timeout is None + assert iterator._explain_metrics is None def test_iterator_constructor_explicit(): query = object() client = object() - aggregation_query = AggregationQuery(client=client, query=query) + explain_options = object() + aggregation_query = AggregationQuery( + client=client, query=query, explain_options=explain_options + ) + assert aggregation_query._explain_options is explain_options retry = mock.Mock() timeout = 100000 limit = 2 @@ -315,6 +335,7 @@ def test_iterator_constructor_explicit(): assert iterator._retry == retry assert iterator._timeout == timeout assert iterator._limit == limit + assert iterator._explain_metrics is None def test_iterator__build_protobuf_empty(): @@ -404,16 +425,13 @@ def test_iterator__process_query_results_finished_result(): def test_iterator__process_query_results_unexpected_result(): - from google.cloud.datastore_v1.types import query as query_pb2 from google.cloud.datastore.aggregation import AggregationResult iterator = _make_aggregation_iterator(None, None) aggregation_pbs = [AggregationResult(alias="total", value=1)] - more_results_enum = ( - query_pb2.QueryResultBatch.MoreResultsType.MORE_RESULTS_TYPE_UNSPECIFIED - ) + more_results_enum = 999 response_pb = _make_aggregation_query_response(aggregation_pbs, more_results_enum) with pytest.raises(ValueError): iterator._process_query_results(response_pb) @@ -451,6 +469,169 @@ def test_iterator__next_page_no_more(): ds_api.run_aggregation_query.assert_not_called() +@pytest.mark.parametrize("database_id", [None, "somedb"]) +@pytest.mark.parametrize("analyze", [True, False]) +def test_iterator_sends_explain_options_w_request(database_id, analyze): + """ + When query has explain_options set, all requests should include + the explain_options field. + """ + from google.cloud.datastore.query_profile import ExplainOptions + + response_pb = _make_aggregation_query_response([], 0) + ds_api = _make_datastore_api_for_aggregation(response_pb) + client = _Client(None, datastore_api=ds_api) + explain_options = ExplainOptions(analyze=analyze) + query = _make_aggregation_query( + client, _make_query(client), explain_options=explain_options + ) + iterator = _make_aggregation_iterator(query, client) + iterator._next_page() + # ensure explain_options is set in request + assert ds_api.run_aggregation_query.call_count == 1 + found_explain_options = ds_api.run_aggregation_query.call_args[1]["request"][ + "explain_options" + ] + assert found_explain_options == explain_options._to_dict() + assert found_explain_options["analyze"] == analyze + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_metrics(database_id): + """ + If explain_metrics is recieved from backend, it should be set on the iterator + """ + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore_v1.types import query_profile as query_profile_pb2 + from google.protobuf import duration_pb2 + + expected_metrics = query_profile_pb2.ExplainMetrics( + plan_summary=query_profile_pb2.PlanSummary(), + execution_stats=query_profile_pb2.ExecutionStats( + results_returned=100, + execution_duration=duration_pb2.Duration(seconds=1), + read_operations=10, + debug_stats={}, + ), + ) + response_pb = _make_aggregation_query_response([], 0) + response_pb.explain_metrics = expected_metrics + ds_api = _make_datastore_api_for_aggregation(response_pb) + client = _Client(None, datastore_api=ds_api) + query = _make_aggregation_query(client=client, query=_make_query(client)) + iterator = _make_aggregation_iterator(query, client) + assert iterator._explain_metrics is None + iterator._next_page() + assert isinstance(iterator._explain_metrics, ExplainMetrics) + assert iterator._explain_metrics == ExplainMetrics._from_pb(expected_metrics) + assert iterator.explain_metrics == ExplainMetrics._from_pb(expected_metrics) + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_metrics_no_explain(database_id): + """ + If query has no explain_options set, iterator.explain_metrics should raise + an exception. + """ + from google.cloud.datastore.query_profile import QueryExplainError + + ds_api = _make_datastore_api_for_aggregation() + client = _Client(None, datastore_api=ds_api) + query = _make_aggregation_query(client, _make_query(client), explain_options=None) + iterator = _make_aggregation_iterator(query, client) + assert iterator._explain_metrics is None + with pytest.raises(QueryExplainError) as exc: + iterator.explain_metrics + assert "explain_options not set on query" in str(exc.value) + # should not raise error if field is set + iterator._explain_metrics = object() + assert iterator.explain_metrics is iterator._explain_metrics + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_metrics_no_analyze_make_call(database_id): + """ + If query.explain_options(analyze=False), accessing iterator.explain_metrics + should make a network call to get the data. + """ + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore_v1.types import query_profile as query_profile_pb2 + from google.protobuf import duration_pb2 + + response_pb = _make_aggregation_query_response([], 0) + expected_metrics = query_profile_pb2.ExplainMetrics( + plan_summary=query_profile_pb2.PlanSummary(), + execution_stats=query_profile_pb2.ExecutionStats( + results_returned=100, + execution_duration=duration_pb2.Duration(seconds=1), + read_operations=10, + debug_stats={}, + ), + ) + response_pb.explain_metrics = expected_metrics + ds_api = _make_datastore_api_for_aggregation(response_pb) + client = _Client(None, datastore_api=ds_api) + explain_options = ExplainOptions(analyze=False) + query = _make_aggregation_query( + client, _make_query(client), explain_options=explain_options + ) + iterator = _make_aggregation_iterator(query, client) + assert ds_api.run_aggregation_query.call_count == 0 + metrics = iterator.explain_metrics + # ensure explain_options is set in request + assert ds_api.run_aggregation_query.call_count == 1 + assert isinstance(metrics, ExplainMetrics) + assert metrics == ExplainMetrics._from_pb(expected_metrics) + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_metrics_no_analyze_make_call_failed(database_id): + """ + If query.explain_options(analyze=False), accessing iterator.explain_metrics + should make a network call to get the data. + If the call does not result in explain_metrics data, it should raise a QueryExplainError. + """ + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import QueryExplainError + + # mocked response does not return explain_metrics + response_pb = _make_aggregation_query_response([], 0) + ds_api = _make_datastore_api_for_aggregation(response_pb) + client = _Client(None, datastore_api=ds_api) + explain_options = ExplainOptions(analyze=False) + query = _make_aggregation_query( + client, _make_query(client), explain_options=explain_options + ) + iterator = _make_aggregation_iterator(query, client) + assert ds_api.run_aggregation_query.call_count == 0 + with pytest.raises(QueryExplainError): + iterator.explain_metrics + assert ds_api.run_aggregation_query.call_count == 1 + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_analyze_access_before_complete(database_id): + """ + If query.explain_options(analyze=True), accessing iterator.explain_metrics + before the query is complete should raise an exception. + """ + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import QueryExplainError + + ds_api = _make_datastore_api_for_aggregation() + client = _Client(None, datastore_api=ds_api) + explain_options = ExplainOptions(analyze=True) + query = _make_aggregation_query( + client, _make_query(client), explain_options=explain_options + ) + iterator = _make_aggregation_iterator(query, client) + expected_error = "explain_metrics not available until query is complete" + with pytest.raises(QueryExplainError) as exc: + iterator.explain_metrics + assert expected_error in str(exc.value) + + def _next_page_helper(txn_id=None, retry=None, timeout=None, database_id=None): from google.api_core import page_iterator from google.cloud.datastore_v1.types import datastore as datastore_pb2 diff --git a/packages/google-cloud-datastore/tests/unit/test_query.py b/packages/google-cloud-datastore/tests/unit/test_query.py index fa7d63dce8e4..75fa31fad9ec 100644 --- a/packages/google-cloud-datastore/tests/unit/test_query.py +++ b/packages/google-cloud-datastore/tests/unit/test_query.py @@ -1072,6 +1072,159 @@ def test_iterator__next_page_w_skipped_lt_offset(skipped_cursor_1, database_id): assert ds_api.run_query.call_args_list == expected_calls +@pytest.mark.parametrize("database_id", [None, "somedb"]) +@pytest.mark.parametrize("analyze", [True, False]) +def test_iterator_sends_explain_options_w_request(database_id, analyze): + """ + When query has explain_options set, all requests should include + the explain_options field. + """ + from google.cloud.datastore.query_profile import ExplainOptions + + response_pb = _make_query_response([], b"", 0, 0) + ds_api = _make_datastore_api(response_pb) + client = _Client(None, datastore_api=ds_api) + explain_options = ExplainOptions(analyze=analyze) + query = Query(client, explain_options=explain_options) + iterator = _make_iterator(query, client) + iterator._next_page() + # ensure explain_options is set in request + assert ds_api.run_query.call_count == 1 + found_explain_options = ds_api.run_query.call_args[1]["request"]["explain_options"] + assert found_explain_options == explain_options._to_dict() + assert found_explain_options["analyze"] == analyze + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_metrics(database_id): + """ + If explain_metrics is recieved from backend, it should be set on the iterator + """ + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore_v1.types import query_profile as query_profile_pb2 + from google.protobuf import duration_pb2 + + expected_metrics = query_profile_pb2.ExplainMetrics( + plan_summary=query_profile_pb2.PlanSummary(), + execution_stats=query_profile_pb2.ExecutionStats( + results_returned=100, + execution_duration=duration_pb2.Duration(seconds=1), + read_operations=10, + debug_stats={}, + ), + ) + response_pb = _make_query_response([], b"", 0, 0) + response_pb.explain_metrics = expected_metrics + ds_api = _make_datastore_api(response_pb) + client = _Client(None, datastore_api=ds_api) + query = Query(client) + iterator = _make_iterator(query, client) + assert iterator._explain_metrics is None + iterator._next_page() + assert isinstance(iterator._explain_metrics, ExplainMetrics) + assert iterator._explain_metrics == ExplainMetrics._from_pb(expected_metrics) + assert iterator.explain_metrics == ExplainMetrics._from_pb(expected_metrics) + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_metrics_no_explain(database_id): + """ + If query has no explain_options set, iterator.explain_metrics should raise + an exception. + """ + from google.cloud.datastore.query_profile import QueryExplainError + + ds_api = _make_datastore_api() + client = _Client(None, datastore_api=ds_api) + query = Query(client, explain_options=None) + iterator = _make_iterator(query, client) + assert iterator._explain_metrics is None + with pytest.raises(QueryExplainError) as exc: + iterator.explain_metrics + assert "explain_options not set on query" in str(exc.value) + # should not raise error if field is set + expected_metrics = object() + iterator._explain_metrics = expected_metrics + assert iterator.explain_metrics is expected_metrics + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_metrics_no_analyze_make_call(database_id): + """ + If query.explain_options(analyze=False), accessing iterator.explain_metrics + should make a network call to get the data. + """ + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import ExplainMetrics + from google.cloud.datastore_v1.types import query_profile as query_profile_pb2 + from google.protobuf import duration_pb2 + + response_pb = _make_query_response([], b"", 0, 0) + expected_metrics = query_profile_pb2.ExplainMetrics( + plan_summary=query_profile_pb2.PlanSummary(), + execution_stats=query_profile_pb2.ExecutionStats( + results_returned=100, + execution_duration=duration_pb2.Duration(seconds=1), + read_operations=10, + debug_stats={}, + ), + ) + response_pb.explain_metrics = expected_metrics + ds_api = _make_datastore_api(response_pb) + client = _Client(None, datastore_api=ds_api) + explain_options = ExplainOptions(analyze=False) + query = Query(client, explain_options=explain_options) + iterator = _make_iterator(query, client) + assert ds_api.run_query.call_count == 0 + metrics = iterator.explain_metrics + # ensure explain_options is set in request + assert ds_api.run_query.call_count == 1 + assert isinstance(metrics, ExplainMetrics) + assert metrics == ExplainMetrics._from_pb(expected_metrics) + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_metrics_no_analyze_make_call_failed(database_id): + """ + If query.explain_options(analyze=False), accessing iterator.explain_metrics + should make a network call to get the data. + If the call does not result in explain_metrics data, it should raise a QueryExplainError. + """ + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import QueryExplainError + + # mocked response does not return explain_metrics + response_pb = _make_query_response([], b"", 0, 0) + ds_api = _make_datastore_api(response_pb) + client = _Client(None, datastore_api=ds_api) + explain_options = ExplainOptions(analyze=False) + query = Query(client, explain_options=explain_options) + iterator = _make_iterator(query, client) + assert ds_api.run_query.call_count == 0 + with pytest.raises(QueryExplainError): + iterator.explain_metrics + assert ds_api.run_query.call_count == 1 + + +@pytest.mark.parametrize("database_id", [None, "somedb"]) +def test_iterator_explain_analyze_access_before_complete(database_id): + """ + If query.explain_options(analyze=True), accessing iterator.explain_metrics + before the query is complete should raise an exception. + """ + from google.cloud.datastore.query_profile import ExplainOptions + from google.cloud.datastore.query_profile import QueryExplainError + + ds_api = _make_datastore_api() + client = _Client(None, datastore_api=ds_api) + query = _make_query(client, explain_options=ExplainOptions(analyze=True)) + iterator = _make_iterator(query, client) + expected_error = "explain_metrics not available until query is complete" + with pytest.raises(QueryExplainError) as exc: + iterator.explain_metrics + assert expected_error in str(exc.value) + + def test__item_to_entity(): from google.cloud.datastore.query import _item_to_entity diff --git a/packages/google-cloud-datastore/tests/unit/test_query_profile.py b/packages/google-cloud-datastore/tests/unit/test_query_profile.py new file mode 100644 index 000000000000..d8c8fdd87d88 --- /dev/null +++ b/packages/google-cloud-datastore/tests/unit/test_query_profile.py @@ -0,0 +1,126 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + + +def test_explain_metrics__from_pb(): + """ + Test creating an instance of ExplainMetrics from a protobuf. + """ + from google.cloud.datastore.query_profile import ( + ExplainMetrics, + _ExplainAnalyzeMetrics, + QueryExplainError, + PlanSummary, + ) + from google.cloud.datastore_v1.types import query_profile as query_profile_pb2 + from google.protobuf import struct_pb2, duration_pb2 + + # test without execution_stats field + expected_metrics = query_profile_pb2.ExplainMetrics( + plan_summary=query_profile_pb2.PlanSummary( + indexes_used=struct_pb2.ListValue(values=[]) + ) + ) + metrics = ExplainMetrics._from_pb(expected_metrics) + assert isinstance(metrics, ExplainMetrics) + assert isinstance(metrics.plan_summary, PlanSummary) + assert metrics.plan_summary.indexes_used == [] + with pytest.raises(QueryExplainError) as exc: + metrics.execution_stats + assert "execution_stats not available when explain_options.analyze=False" in str( + exc.value + ) + # test with execution_stats field + expected_metrics.execution_stats = query_profile_pb2.ExecutionStats( + results_returned=1, + execution_duration=duration_pb2.Duration(seconds=2), + read_operations=3, + debug_stats=struct_pb2.Struct( + fields={"foo": struct_pb2.Value(string_value="bar")} + ), + ) + metrics = ExplainMetrics._from_pb(expected_metrics) + assert isinstance(metrics, ExplainMetrics) + assert isinstance(metrics, _ExplainAnalyzeMetrics) + assert metrics.execution_stats.results_returned == 1 + assert metrics.execution_stats.execution_duration.total_seconds() == 2 + assert metrics.execution_stats.read_operations == 3 + assert metrics.execution_stats.debug_stats == {"foo": "bar"} + + +def test_explain_metrics__from_pb_empty(): + """ + Test with empty ExplainMetrics protobuf. + """ + from google.cloud.datastore.query_profile import ( + ExplainMetrics, + ExecutionStats, + _ExplainAnalyzeMetrics, + PlanSummary, + ) + from google.cloud.datastore_v1.types import query_profile as query_profile_pb2 + from google.protobuf import struct_pb2 + + expected_metrics = query_profile_pb2.ExplainMetrics( + plan_summary=query_profile_pb2.PlanSummary( + indexes_used=struct_pb2.ListValue(values=[]) + ), + execution_stats=query_profile_pb2.ExecutionStats(), + ) + metrics = ExplainMetrics._from_pb(expected_metrics) + assert isinstance(metrics, ExplainMetrics) + assert isinstance(metrics, _ExplainAnalyzeMetrics) + assert isinstance(metrics.plan_summary, PlanSummary) + assert isinstance(metrics.execution_stats, ExecutionStats) + assert metrics.plan_summary.indexes_used == [] + assert metrics.execution_stats.results_returned == 0 + assert metrics.execution_stats.execution_duration.total_seconds() == 0 + assert metrics.execution_stats.read_operations == 0 + assert metrics.execution_stats.debug_stats == {} + + +def test_explain_metrics_execution_stats(): + """ + Standard ExplainMetrics class should raise exception when execution_stats is accessed. + _ExplainAnalyzeMetrics should include the field + """ + from google.cloud.datastore.query_profile import ( + ExplainMetrics, + QueryExplainError, + _ExplainAnalyzeMetrics, + ) + + metrics = ExplainMetrics(plan_summary=object()) + with pytest.raises(QueryExplainError) as exc: + metrics.execution_stats + assert "execution_stats not available when explain_options.analyze=False" in str( + exc.value + ) + expected_stats = object() + metrics = _ExplainAnalyzeMetrics( + plan_summary=object(), _execution_stats=expected_stats + ) + assert metrics.execution_stats is expected_stats + + +def test_explain_options__to_dict(): + """ + Should be able to create a dict representation of ExplainOptions + """ + from google.cloud.datastore.query_profile import ExplainOptions + + assert ExplainOptions(analyze=True)._to_dict() == {"analyze": True} + assert ExplainOptions(analyze=False)._to_dict() == {"analyze": False} From 985c8ae581f163e6268e666ee3dfaf818349d4d7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 7 Aug 2024 14:27:01 -0700 Subject: [PATCH 582/611] chore(python): fix docs build (#556) Source-Link: https://github.com/googleapis/synthtool/commit/bef813d194de29ddf3576eda60148b6b3dcc93d9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:94bb690db96e6242b2567a4860a94d48fa48696d092e51b0884a1a2c0a79a407 Co-authored-by: Owl Bot Co-authored-by: Daniel Sanche --- .../.github/.OwlBot.lock.yaml | 3 ++- .../.kokoro/docker/docs/Dockerfile | 9 ++++----- .../.kokoro/publish-docs.sh | 20 +++++++++---------- 3 files changed, 16 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 001b1b1cabec..6d064ddb9b06 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e + digest: sha256:94bb690db96e6242b2567a4860a94d48fa48696d092e51b0884a1a2c0a79a407 +# created: 2024-07-31T14:52:44.926548819Z diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile index 5205308b334d..e5410e296bd8 100644 --- a/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile @@ -72,19 +72,18 @@ RUN tar -xvf Python-3.10.14.tgz RUN ./Python-3.10.14/configure --enable-optimizations RUN make altinstall -RUN python3.10 -m venv /venv -ENV PATH /venv/bin:$PATH +ENV PATH /usr/local/bin/python3.10:$PATH ###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3 /tmp/get-pip.py \ + && python3.10 /tmp/get-pip.py \ && rm /tmp/get-pip.py # Test pip -RUN python3 -m pip +RUN python3.10 -m pip # Install build requirements COPY requirements.txt /requirements.txt -RUN python3 -m pip install --require-hashes -r requirements.txt +RUN python3.10 -m pip install --require-hashes -r requirements.txt CMD ["python3.10"] diff --git a/packages/google-cloud-datastore/.kokoro/publish-docs.sh b/packages/google-cloud-datastore/.kokoro/publish-docs.sh index 38f083f05aa0..233205d580e9 100755 --- a/packages/google-cloud-datastore/.kokoro/publish-docs.sh +++ b/packages/google-cloud-datastore/.kokoro/publish-docs.sh @@ -21,18 +21,18 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3 -m pip install --require-hashes -r .kokoro/requirements.txt -python3 -m nox --version +python3.10 -m pip install --require-hashes -r .kokoro/requirements.txt +python3.10 -m nox --version # build docs nox -s docs # create metadata -python3 -m docuploader create-metadata \ +python3.10 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ + --version=$(python3.10 setup.py --version) \ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ + --distribution-name=$(python3.10 setup.py --name) \ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) @@ -40,18 +40,18 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" +python3.10 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" # docfx yaml files nox -s docfx # create metadata. -python3 -m docuploader create-metadata \ +python3.10 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ + --version=$(python3.10 setup.py --version) \ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ + --distribution-name=$(python3.10 setup.py --name) \ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) @@ -59,4 +59,4 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" +python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" From 25282eb3c6aa64ee17d2cfc40a6e866f194e0fab Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 7 Aug 2024 14:46:19 -0700 Subject: [PATCH 583/611] chore(main): release 2.20.0 (#531) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Daniel Sanche --- .../.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 16 ++++++++++++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../cloud/datastore_admin/gapic_version.py | 2 +- .../cloud/datastore_admin_v1/gapic_version.py | 2 +- .../google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 22 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index b7f666a684a7..ba3e06a78b90 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.19.0" + ".": "2.20.0" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 52d6dfc7cd5d..925fe2e23835 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,22 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.20.0](https://github.com/googleapis/python-datastore/compare/v2.19.0...v2.20.0) (2024-08-07) + + +### Features + +* Add new types ExplainOptions, ExplainMetrics, PlanSummary, ExecutionStats ([#521](https://github.com/googleapis/python-datastore/issues/521)) ([dfbee2d](https://github.com/googleapis/python-datastore/commit/dfbee2db800a3ca99e65a5d386ea907db1c46598)) +* Add new_transaction support ([#499](https://github.com/googleapis/python-datastore/issues/499)) ([43855dd](https://github.com/googleapis/python-datastore/commit/43855dd1762f51771bb1a3924c6a234641950fb6)) +* Implement query profiling ([#542](https://github.com/googleapis/python-datastore/issues/542)) ([1500f70](https://github.com/googleapis/python-datastore/commit/1500f7007f251256ce2923e1168439d40d41cc4d)) +* New PropertyMask field which allows partial commits, lookups, and query results ([7fd218b](https://github.com/googleapis/python-datastore/commit/7fd218b2afc0282d8fea21992e8d10c5eec72ac7)) + + +### Bug Fixes + +* Retry and timeout values do not propagate in requests during pagination ([#555](https://github.com/googleapis/python-datastore/issues/555)) ([5e773cb](https://github.com/googleapis/python-datastore/commit/5e773cb8c766303fef53965dd100b3c4c93b98be)) +* Using end_cursor instead of skipped_cursor in Iterator to fix rare bug. ([#552](https://github.com/googleapis/python-datastore/issues/552)) ([4982f9a](https://github.com/googleapis/python-datastore/commit/4982f9a6cbbe2de449535295a363a2dd49538c86)) + ## [2.19.0](https://github.com/googleapis/python-datastore/compare/v2.18.0...v2.19.0) (2023-12-10) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index 28762874367c..0dca6536cdbf 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.19.0" # {x-release-please-version} +__version__ = "2.20.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 2605c08a3adf..9fea4fece7f8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.19.0" +__version__ = "2.20.0" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index 0f1a446f3802..551f0d2ebacb 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.0" # {x-release-please-version} +__version__ = "2.20.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index 8dc121fd5787..34a9e3527f8c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.19.0" # {x-release-please-version} +__version__ = "2.20.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index 8dc121fd5787..34a9e3527f8c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.19.0" # {x-release-please-version} +__version__ = "2.20.0" # {x-release-please-version} From f325fcf93ee1bae3eae22b29b546cc9d7ca574a3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 13 Aug 2024 11:43:12 -0400 Subject: [PATCH 584/611] chore: Update gapic-generator-python to v1.18.5 (#559) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.18.5 PiperOrigin-RevId: 661268868 Source-Link: https://github.com/googleapis/googleapis/commit/f7d214cb08cd7d9b018d44564a8b184263f64177 Source-Link: https://github.com/googleapis/googleapis-gen/commit/79a8411bbdb25a983fa3aae8c0e14327df129f94 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/datastore_admin/async_client.py | 5 +- .../services/datastore_admin/client.py | 2 +- .../services/datastore/async_client.py | 5 +- .../datastore_v1/services/datastore/client.py | 2 +- .../test_datastore_admin.py | 94 +++++++++++-------- .../unit/gapic/datastore_v1/test_datastore.py | 72 +++++++------- 6 files changed, 98 insertions(+), 82 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 80fb6bbf9e3a..bc248882ba98 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Dict, @@ -244,9 +243,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DatastoreAdminClient).get_transport_class, type(DatastoreAdminClient) - ) + get_transport_class = DatastoreAdminClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index ce21fdbd0faa..388ed08aec6d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -696,7 +696,7 @@ def __init__( transport_init: Union[ Type[DatastoreAdminTransport], Callable[..., DatastoreAdminTransport] ] = ( - type(self).get_transport_class(transport) + DatastoreAdminClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DatastoreAdminTransport], transport) ) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index d6c347f6ee9d..2f0ba4f09766 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Dict, @@ -191,9 +190,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DatastoreClient).get_transport_class, type(DatastoreClient) - ) + get_transport_class = DatastoreClient.get_transport_class def __init__( self, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index 6c3cb8029228..6717a09fd2ee 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -644,7 +644,7 @@ def __init__( transport_init: Union[ Type[DatastoreTransport], Callable[..., DatastoreTransport] ] = ( - type(self).get_transport_class(transport) + DatastoreClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DatastoreTransport], transport) ) diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index 859224252e86..6501a012bee7 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -1244,8 +1244,9 @@ def test_export_entities_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_entities(request) @@ -1299,26 +1300,28 @@ async def test_export_entities_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.export_entities - ] = mock_object + ] = mock_rpc request = {} await client.export_entities(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_entities(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1640,8 +1643,9 @@ def test_import_entities_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_entities(request) @@ -1695,26 +1699,28 @@ async def test_import_entities_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.import_entities - ] = mock_object + ] = mock_rpc request = {} await client.import_entities(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_entities(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2034,8 +2040,9 @@ def test_create_index_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_index(request) @@ -2089,26 +2096,28 @@ async def test_create_index_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_index - ] = mock_object + ] = mock_rpc request = {} await client.create_index(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_index(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2318,8 +2327,9 @@ def test_delete_index_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_index(request) @@ -2373,26 +2383,28 @@ async def test_delete_index_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_index - ] = mock_object + ] = mock_rpc request = {} await client.delete_index(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_index(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2670,22 +2682,23 @@ async def test_get_index_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_index - ] = mock_object + ] = mock_rpc request = {} await client.get_index(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_index(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2964,22 +2977,23 @@ async def test_list_indexes_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_indexes - ] = mock_object + ] = mock_rpc request = {} await client.list_indexes(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_indexes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 65289ad3ba9f..aa1af5253efa 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -1250,22 +1250,23 @@ async def test_lookup_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.lookup - ] = mock_object + ] = mock_rpc request = {} await client.lookup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.lookup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1646,22 +1647,23 @@ async def test_run_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_query - ] = mock_object + ] = mock_rpc request = {} await client.run_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1923,22 +1925,23 @@ async def test_run_aggregation_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.run_aggregation_query - ] = mock_object + ] = mock_rpc request = {} await client.run_aggregation_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.run_aggregation_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2203,22 +2206,23 @@ async def test_begin_transaction_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.begin_transaction - ] = mock_object + ] = mock_rpc request = {} await client.begin_transaction(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.begin_transaction(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2557,22 +2561,23 @@ async def test_commit_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.commit - ] = mock_object + ] = mock_rpc request = {} await client.commit(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.commit(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2978,22 +2983,23 @@ async def test_rollback_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback - ] = mock_object + ] = mock_rpc request = {} await client.rollback(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rollback(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3326,22 +3332,23 @@ async def test_allocate_ids_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.allocate_ids - ] = mock_object + ] = mock_rpc request = {} await client.allocate_ids(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.allocate_ids(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3694,22 +3701,23 @@ async def test_reserve_ids_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.reserve_ids - ] = mock_object + ] = mock_rpc request = {} await client.reserve_ids(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.reserve_ids(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From 51307d9b290ea2cfb10c249f4a8a6f06e8b5b7dc Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 13 Aug 2024 12:47:35 -0400 Subject: [PATCH 585/611] fix: Allow protobuf 5.x; require protobuf >=3.20.2 (#560) * fix: Allow protobuf 5.x; require protobuf >=3.20.2 * update constraints --------- Co-authored-by: Daniel Sanche --- packages/google-cloud-datastore/setup.py | 2 +- packages/google-cloud-datastore/testing/constraints-3.7.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 5f588e054fc0..eb3c2660faaa 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -36,7 +36,7 @@ "google-cloud-core >= 1.4.0, <3.0.0dev", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<6.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = {"libcst": "libcst >= 0.2.5"} diff --git a/packages/google-cloud-datastore/testing/constraints-3.7.txt b/packages/google-cloud-datastore/testing/constraints-3.7.txt index da9c6ca856ac..b99d4dc5c1fb 100644 --- a/packages/google-cloud-datastore/testing/constraints-3.7.txt +++ b/packages/google-cloud-datastore/testing/constraints-3.7.txt @@ -10,4 +10,4 @@ google-auth==2.14.1 google-cloud-core==1.4.0 proto-plus==1.22.0 libcst==0.2.5 -protobuf==3.19.5 +protobuf==3.20.2 From b172a1b123d30e17d16254e7c98512fbd6efdd68 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 14 Aug 2024 18:56:42 +0200 Subject: [PATCH 586/611] chore(deps): update all dependencies (#519) * chore(deps): update all dependencies * pin pytest for python 3.7 --------- Co-authored-by: Daniel Sanche Co-authored-by: Anthonios Partheniou --- .../samples/snippets/requirements-test.txt | 5 +++-- .../samples/snippets/schedule-export/requirements-test.txt | 3 ++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt index 80d3b1a9ebe0..81c08fe2c07d 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements-test.txt @@ -1,4 +1,5 @@ backoff===1.11.1; python_version < "3.7" backoff==2.2.1; python_version >= "3.7" -pytest==7.4.4 -flaky==3.7.0 +pytest===7.4.3; python_version == '3.7' +pytest==8.3.2; python_version >= '3.8' +flaky==3.8.1 diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt index fa427e190024..6c461db6f4c6 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements-test.txt @@ -1 +1,2 @@ -pytest==7.4.4 \ No newline at end of file +pytest===7.4.3; python_version == '3.7' +pytest==8.3.2; python_version >= '3.8' \ No newline at end of file From 166cbea0377bc6f0e86e00f7956768552eb8a455 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 14 Aug 2024 13:23:04 -0400 Subject: [PATCH 587/611] chore(main): release 2.20.1 (#561) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-datastore/.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../google/cloud/datastore_admin/gapic_version.py | 2 +- .../google/cloud/datastore_admin_v1/gapic_version.py | 2 +- .../google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 13 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index ba3e06a78b90..a95c589d8ce9 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.20.0" + ".": "2.20.1" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 925fe2e23835..45db587dbe41 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.20.1](https://github.com/googleapis/python-datastore/compare/v2.20.0...v2.20.1) (2024-08-14) + + +### Bug Fixes + +* Allow protobuf 5.x; require protobuf >=3.20.2 ([#560](https://github.com/googleapis/python-datastore/issues/560)) ([ad50e36](https://github.com/googleapis/python-datastore/commit/ad50e3648954edf27575001be833bb5e1e598f46)) + ## [2.20.0](https://github.com/googleapis/python-datastore/compare/v2.19.0...v2.20.0) (2024-08-07) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index 0dca6536cdbf..5feb6ccf27f2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.20.0" # {x-release-please-version} +__version__ = "2.20.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 9fea4fece7f8..02ee97e2a149 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.20.0" +__version__ = "2.20.1" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index 551f0d2ebacb..5585b0b1a0d8 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.0" # {x-release-please-version} +__version__ = "2.20.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index 34a9e3527f8c..9b4d43d65b8d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.20.0" # {x-release-please-version} +__version__ = "2.20.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index 34a9e3527f8c..9b4d43d65b8d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.20.0" # {x-release-please-version} +__version__ = "2.20.1" # {x-release-please-version} From 4aa39f9a3a02a05b99b33bc2679fa6e0cdb97dd5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 17 Sep 2024 11:38:26 -0400 Subject: [PATCH 588/611] build(python): release script update (#565) Source-Link: https://github.com/googleapis/synthtool/commit/71a72973dddbc66ea64073b53eda49f0d22e0942 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e8dcfd7cbfd8beac3a3ff8d3f3185287ea0625d859168cc80faccfc9a7a00455 Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 4 ++-- .../google-cloud-datastore/.github/workflows/unittest.yml | 1 + packages/google-cloud-datastore/.kokoro/release.sh | 2 +- packages/google-cloud-datastore/.kokoro/release/common.cfg | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 6d064ddb9b06..597e0c3261ca 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:94bb690db96e6242b2567a4860a94d48fa48696d092e51b0884a1a2c0a79a407 -# created: 2024-07-31T14:52:44.926548819Z + digest: sha256:e8dcfd7cbfd8beac3a3ff8d3f3185287ea0625d859168cc80faccfc9a7a00455 +# created: 2024-09-16T21:04:09.091105552Z diff --git a/packages/google-cloud-datastore/.github/workflows/unittest.yml b/packages/google-cloud-datastore/.github/workflows/unittest.yml index f4a337c496a0..dd8bd76922f9 100644 --- a/packages/google-cloud-datastore/.github/workflows/unittest.yml +++ b/packages/google-cloud-datastore/.github/workflows/unittest.yml @@ -30,6 +30,7 @@ jobs: with: name: coverage-artifact-${{ matrix.python }} path: .coverage-${{ matrix.python }} + include-hidden-files: true cover: runs-on: ubuntu-latest diff --git a/packages/google-cloud-datastore/.kokoro/release.sh b/packages/google-cloud-datastore/.kokoro/release.sh index b460d5a03693..d3805e022934 100755 --- a/packages/google-cloud-datastore/.kokoro/release.sh +++ b/packages/google-cloud-datastore/.kokoro/release.sh @@ -23,7 +23,7 @@ python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source / export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2") cd github/python-datastore python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/google-cloud-datastore/.kokoro/release/common.cfg b/packages/google-cloud-datastore/.kokoro/release/common.cfg index 717dc3eee743..09d805a9c697 100644 --- a/packages/google-cloud-datastore/.kokoro/release/common.cfg +++ b/packages/google-cloud-datastore/.kokoro/release/common.cfg @@ -28,7 +28,7 @@ before_action { fetch_keystore { keystore_resource { keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-1" + keyname: "google-cloud-pypi-token-keystore-2" } } } From b421f98fdd75bda62ee0595b0f89ca8fdc8e9191 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 19 Sep 2024 22:02:42 +0200 Subject: [PATCH 589/611] chore(deps): update all dependencies (#563) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google-cloud-datastore/samples/snippets/requirements.txt | 2 +- .../samples/snippets/schedule-export/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-datastore/samples/snippets/requirements.txt b/packages/google-cloud-datastore/samples/snippets/requirements.txt index 5bccacc5f61e..8816d485f0f5 100644 --- a/packages/google-cloud-datastore/samples/snippets/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.19.0 \ No newline at end of file +google-cloud-datastore==2.20.0 \ No newline at end of file diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt index b748abdc9c1e..7f50395730c1 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/requirements.txt @@ -1 +1 @@ -google-cloud-datastore==2.19.0 +google-cloud-datastore==2.20.0 From 4ea0c85159f3dfb8663bca983edf328e872aa088 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 11 Oct 2024 15:26:59 -0700 Subject: [PATCH 590/611] chore: update gapic-generator-python (#567) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Support for field update operators in the Datastore API and resolution strategies when there is a conflict at write time PiperOrigin-RevId: 683253625 Source-Link: https://github.com/googleapis/googleapis/commit/3effbf23b1a1d1fe1306356e94397e20d01d31a0 Source-Link: https://github.com/googleapis/googleapis-gen/commit/5dd983c76417f2ad88f9d47438c48c27d5ac0e27 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNWRkOTgzYzc2NDE3ZjJhZDg4ZjlkNDc0MzhjNDhjMjdkNWFjMGUyNyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.19.1 PiperOrigin-RevId: 684571179 Source-Link: https://github.com/googleapis/googleapis/commit/fbdc238931e0a7a95c0f55e0cd3ad9e3de2535c8 Source-Link: https://github.com/googleapis/googleapis-gen/commit/3a2cdcfb80c2d0f5ec0cc663c2bab0a9486229d0 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiM2EyY2RjZmI4MGMyZDBmNWVjMGNjNjYzYzJiYWIwYTk0ODYyMjlkMCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Add FindNearest API to the stable branch PiperOrigin-RevId: 684905940 Source-Link: https://github.com/googleapis/googleapis/commit/2196d4843a22befdc7bc3b7b676eeb392c31b3cf Source-Link: https://github.com/googleapis/googleapis-gen/commit/05df6fa16ab93bd9da17b56edd379b43963de167 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDVkZjZmYTE2YWI5M2JkOWRhMTdiNTZlZGQzNzliNDM5NjNkZTE2NyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/datastore_admin/async_client.py | 24 +- .../services/datastore_admin/client.py | 24 +- .../datastore_admin/transports/README.rst | 9 + .../datastore_admin/transports/base.py | 20 + .../transports/grpc_asyncio.py | 45 +- .../datastore_admin/transports/rest.py | 767 ++- .../datastore_admin/transports/rest_base.py | 447 ++ .../google/cloud/datastore_v1/__init__.py | 4 + .../services/datastore/async_client.py | 176 +- .../datastore_v1/services/datastore/client.py | 24 +- .../services/datastore/transports/README.rst | 9 + .../services/datastore/transports/base.py | 20 + .../datastore/transports/grpc_asyncio.py | 49 +- .../services/datastore/transports/rest.py | 1013 +-- .../datastore/transports/rest_base.py | 650 ++ .../cloud/datastore_v1/types/__init__.py | 4 + .../cloud/datastore_v1/types/datastore.py | 225 + .../google/cloud/datastore_v1/types/query.py | 127 + .../test_datastore_admin.py | 3158 +++++----- .../unit/gapic/datastore_v1/test_datastore.py | 5472 ++++++++++------- 20 files changed, 7678 insertions(+), 4589 deletions(-) create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/README.rst create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest_base.py create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/README.rst create mode 100644 packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest_base.py diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index bc248882ba98..0c2572d65b20 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -1129,11 +1129,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -1186,11 +1182,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1247,11 +1239,7 @@ async def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1304,11 +1292,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index 388ed08aec6d..ca54ec3c7467 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -1528,11 +1528,7 @@ def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -1585,11 +1581,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1646,11 +1638,7 @@ def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1703,11 +1691,7 @@ def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/README.rst b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/README.rst new file mode 100644 index 000000000000..fe3ac4f18793 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`DatastoreAdminTransport` is the ABC for all transports. +- public child `DatastoreAdminGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `DatastoreAdminGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseDatastoreAdminRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `DatastoreAdminRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index 8c3a00f364fe..bcfb2688f19b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -185,6 +185,26 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index 7526fc5cda8f..366878dbc241 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -284,6 +285,9 @@ def __init__( ) # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) self._prep_wrapped_messages(client_info) @property @@ -527,27 +531,27 @@ def list_indexes( def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.export_entities: gapic_v1.method_async.wrap_method( + self.export_entities: self._wrap_method( self.export_entities, default_timeout=60.0, client_info=client_info, ), - self.import_entities: gapic_v1.method_async.wrap_method( + self.import_entities: self._wrap_method( self.import_entities, default_timeout=60.0, client_info=client_info, ), - self.create_index: gapic_v1.method_async.wrap_method( + self.create_index: self._wrap_method( self.create_index, default_timeout=60.0, client_info=client_info, ), - self.delete_index: gapic_v1.method_async.wrap_method( + self.delete_index: self._wrap_method( self.delete_index, default_timeout=60.0, client_info=client_info, ), - self.get_index: gapic_v1.method_async.wrap_method( + self.get_index: self._wrap_method( self.get_index, default_retry=retries.AsyncRetry( initial=0.1, @@ -562,7 +566,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.list_indexes: gapic_v1.method_async.wrap_method( + self.list_indexes: self._wrap_method( self.list_indexes, default_retry=retries.AsyncRetry( initial=0.1, @@ -577,11 +581,40 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + def close(self): return self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc_asyncio" + @property def delete_operation( self, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py index 8776e623e810..01fcdd8597fc 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py @@ -16,38 +16,34 @@ from google.auth.transport.requests import AuthorizedSession # type: ignore import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers from google.api_core import rest_streaming -from google.api_core import path_template from google.api_core import gapic_v1 from google.protobuf import json_format from google.api_core import operations_v1 + from requests import __version__ as requests_version import dataclasses -import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index from google.longrunning import operations_pb2 # type: ignore -from .base import ( - DatastoreAdminTransport, - DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, -) + +from .rest_base import _BaseDatastoreAdminRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -358,8 +354,8 @@ class DatastoreAdminRestStub: _interceptor: DatastoreAdminRestInterceptor -class DatastoreAdminRestTransport(DatastoreAdminTransport): - """REST backend transport for DatastoreAdmin. +class DatastoreAdminRestTransport(_BaseDatastoreAdminRestTransport): + """REST backend synchronous transport for DatastoreAdmin. Google Cloud Datastore Admin API @@ -421,7 +417,6 @@ class DatastoreAdminRestTransport(DatastoreAdminTransport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 - """ def __init__( @@ -475,21 +470,12 @@ def __init__( # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - super().__init__( host=host, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, api_audience=api_audience, ) self._session = AuthorizedSession( @@ -553,9 +539,34 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client - class _CreateIndex(DatastoreAdminRestStub): + class _CreateIndex( + _BaseDatastoreAdminRestTransport._BaseCreateIndex, DatastoreAdminRestStub + ): def __hash__(self): - return hash("CreateIndex") + return hash("DatastoreAdminRestTransport.CreateIndex") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -585,44 +596,32 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/projects/{project_id}/indexes", - "body": "index", - }, - ] + http_options = ( + _BaseDatastoreAdminRestTransport._BaseCreateIndex._get_http_options() + ) request, metadata = self._interceptor.pre_create_index(request, metadata) - pb_request = datastore_admin.CreateIndexRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseDatastoreAdminRestTransport._BaseCreateIndex._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseDatastoreAdminRestTransport._BaseCreateIndex._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseDatastoreAdminRestTransport._BaseCreateIndex._get_query_params_json( + transcoded_request ) - query_params["$alt"] = "json;enum-encoding=int" - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = DatastoreAdminRestTransport._CreateIndex._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -636,9 +635,33 @@ def __call__( resp = self._interceptor.post_create_index(resp) return resp - class _DeleteIndex(DatastoreAdminRestStub): + class _DeleteIndex( + _BaseDatastoreAdminRestTransport._BaseDeleteIndex, DatastoreAdminRestStub + ): def __hash__(self): - return hash("DeleteIndex") + return hash("DatastoreAdminRestTransport.DeleteIndex") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -668,37 +691,27 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/projects/{project_id}/indexes/{index_id}", - }, - ] + http_options = ( + _BaseDatastoreAdminRestTransport._BaseDeleteIndex._get_http_options() + ) request, metadata = self._interceptor.pre_delete_index(request, metadata) - pb_request = datastore_admin.DeleteIndexRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseDatastoreAdminRestTransport._BaseDeleteIndex._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseDatastoreAdminRestTransport._BaseDeleteIndex._get_query_params_json( + transcoded_request ) - query_params["$alt"] = "json;enum-encoding=int" - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = DatastoreAdminRestTransport._DeleteIndex._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -712,19 +725,34 @@ def __call__( resp = self._interceptor.post_delete_index(resp) return resp - class _ExportEntities(DatastoreAdminRestStub): + class _ExportEntities( + _BaseDatastoreAdminRestTransport._BaseExportEntities, DatastoreAdminRestStub + ): def __hash__(self): - return hash("ExportEntities") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("DatastoreAdminRestTransport.ExportEntities") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -754,45 +782,32 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/projects/{project_id}:export", - "body": "*", - }, - ] + http_options = ( + _BaseDatastoreAdminRestTransport._BaseExportEntities._get_http_options() + ) request, metadata = self._interceptor.pre_export_entities(request, metadata) - pb_request = datastore_admin.ExportEntitiesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseDatastoreAdminRestTransport._BaseExportEntities._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseDatastoreAdminRestTransport._BaseExportEntities._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseDatastoreAdminRestTransport._BaseExportEntities._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = DatastoreAdminRestTransport._ExportEntities._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -806,9 +821,33 @@ def __call__( resp = self._interceptor.post_export_entities(resp) return resp - class _GetIndex(DatastoreAdminRestStub): + class _GetIndex( + _BaseDatastoreAdminRestTransport._BaseGetIndex, DatastoreAdminRestStub + ): def __hash__(self): - return hash("GetIndex") + return hash("DatastoreAdminRestTransport.GetIndex") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -835,37 +874,31 @@ def __call__( Datastore composite index definition. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/projects/{project_id}/indexes/{index_id}", - }, - ] + http_options = ( + _BaseDatastoreAdminRestTransport._BaseGetIndex._get_http_options() + ) request, metadata = self._interceptor.pre_get_index(request, metadata) - pb_request = datastore_admin.GetIndexRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = ( + _BaseDatastoreAdminRestTransport._BaseGetIndex._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseDatastoreAdminRestTransport._BaseGetIndex._get_query_params_json( + transcoded_request ) ) - query_params["$alt"] = "json;enum-encoding=int" - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = DatastoreAdminRestTransport._GetIndex._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -881,19 +914,34 @@ def __call__( resp = self._interceptor.post_get_index(resp) return resp - class _ImportEntities(DatastoreAdminRestStub): + class _ImportEntities( + _BaseDatastoreAdminRestTransport._BaseImportEntities, DatastoreAdminRestStub + ): def __hash__(self): - return hash("ImportEntities") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("DatastoreAdminRestTransport.ImportEntities") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -923,45 +971,32 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/projects/{project_id}:import", - "body": "*", - }, - ] + http_options = ( + _BaseDatastoreAdminRestTransport._BaseImportEntities._get_http_options() + ) request, metadata = self._interceptor.pre_import_entities(request, metadata) - pb_request = datastore_admin.ImportEntitiesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseDatastoreAdminRestTransport._BaseImportEntities._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseDatastoreAdminRestTransport._BaseImportEntities._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseDatastoreAdminRestTransport._BaseImportEntities._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = DatastoreAdminRestTransport._ImportEntities._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -975,9 +1010,33 @@ def __call__( resp = self._interceptor.post_import_entities(resp) return resp - class _ListIndexes(DatastoreAdminRestStub): + class _ListIndexes( + _BaseDatastoreAdminRestTransport._BaseListIndexes, DatastoreAdminRestStub + ): def __hash__(self): - return hash("ListIndexes") + return hash("DatastoreAdminRestTransport.ListIndexes") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -1006,37 +1065,27 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/projects/{project_id}/indexes", - }, - ] + http_options = ( + _BaseDatastoreAdminRestTransport._BaseListIndexes._get_http_options() + ) request, metadata = self._interceptor.pre_list_indexes(request, metadata) - pb_request = datastore_admin.ListIndexesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseDatastoreAdminRestTransport._BaseListIndexes._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseDatastoreAdminRestTransport._BaseListIndexes._get_query_params_json( + transcoded_request ) - query_params["$alt"] = "json;enum-encoding=int" - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = DatastoreAdminRestTransport._ListIndexes._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1104,7 +1153,34 @@ def list_indexes( def cancel_operation(self): return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - class _CancelOperation(DatastoreAdminRestStub): + class _CancelOperation( + _BaseDatastoreAdminRestTransport._BaseCancelOperation, DatastoreAdminRestStub + ): + def __hash__(self): + return hash("DatastoreAdminRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__( self, request: operations_pb2.CancelOperationRequest, @@ -1125,34 +1201,29 @@ def __call__( sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/operations/*}:cancel", - }, - ] - + http_options = ( + _BaseDatastoreAdminRestTransport._BaseCancelOperation._get_http_options() + ) request, metadata = self._interceptor.pre_cancel_operation( request, metadata ) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseDatastoreAdminRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = _BaseDatastoreAdminRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), + response = DatastoreAdminRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1166,7 +1237,34 @@ def __call__( def delete_operation(self): return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - class _DeleteOperation(DatastoreAdminRestStub): + class _DeleteOperation( + _BaseDatastoreAdminRestTransport._BaseDeleteOperation, DatastoreAdminRestStub + ): + def __hash__(self): + return hash("DatastoreAdminRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__( self, request: operations_pb2.DeleteOperationRequest, @@ -1187,34 +1285,29 @@ def __call__( sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/operations/*}", - }, - ] - + http_options = ( + _BaseDatastoreAdminRestTransport._BaseDeleteOperation._get_http_options() + ) request, metadata = self._interceptor.pre_delete_operation( request, metadata ) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseDatastoreAdminRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = _BaseDatastoreAdminRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), + response = DatastoreAdminRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1228,7 +1321,34 @@ def __call__( def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(DatastoreAdminRestStub): + class _GetOperation( + _BaseDatastoreAdminRestTransport._BaseGetOperation, DatastoreAdminRestStub + ): + def __hash__(self): + return hash("DatastoreAdminRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__( self, request: operations_pb2.GetOperationRequest, @@ -1252,32 +1372,27 @@ def __call__( operations_pb2.Operation: Response from GetOperation method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/operations/*}", - }, - ] - + http_options = ( + _BaseDatastoreAdminRestTransport._BaseGetOperation._get_http_options() + ) request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseDatastoreAdminRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = _BaseDatastoreAdminRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), + response = DatastoreAdminRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1285,8 +1400,9 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) return resp @@ -1294,7 +1410,34 @@ def __call__( def list_operations(self): return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - class _ListOperations(DatastoreAdminRestStub): + class _ListOperations( + _BaseDatastoreAdminRestTransport._BaseListOperations, DatastoreAdminRestStub + ): + def __hash__(self): + return hash("DatastoreAdminRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__( self, request: operations_pb2.ListOperationsRequest, @@ -1318,32 +1461,27 @@ def __call__( operations_pb2.ListOperationsResponse: Response from ListOperations method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*}/operations", - }, - ] - + http_options = ( + _BaseDatastoreAdminRestTransport._BaseListOperations._get_http_options() + ) request, metadata = self._interceptor.pre_list_operations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseDatastoreAdminRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = _BaseDatastoreAdminRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), + response = DatastoreAdminRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1351,8 +1489,9 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) return resp diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest_base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest_base.py new file mode 100644 index 000000000000..a94eece52156 --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest_base.py @@ -0,0 +1,447 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.datastore_admin_v1.types import datastore_admin +from google.cloud.datastore_admin_v1.types import index +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseDatastoreAdminRestTransport(DatastoreAdminTransport): + """Base REST backend transport for DatastoreAdmin. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "datastore.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'datastore.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCreateIndex: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}/indexes", + "body": "index", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datastore_admin.CreateIndexRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteIndex: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/projects/{project_id}/indexes/{index_id}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datastore_admin.DeleteIndexRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseExportEntities: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:export", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datastore_admin.ExportEntitiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDatastoreAdminRestTransport._BaseExportEntities._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetIndex: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/projects/{project_id}/indexes/{index_id}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datastore_admin.GetIndexRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseImportEntities: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:import", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datastore_admin.ImportEntitiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDatastoreAdminRestTransport._BaseImportEntities._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListIndexes: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/projects/{project_id}/indexes", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datastore_admin.ListIndexesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseDatastoreAdminRestTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py index b1855affcf3f..a417fe1ef035 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py @@ -34,6 +34,7 @@ from .types.datastore import Mutation from .types.datastore import MutationResult from .types.datastore import PropertyMask +from .types.datastore import PropertyTransform from .types.datastore import ReadOptions from .types.datastore import ReserveIdsRequest from .types.datastore import ReserveIdsResponse @@ -53,6 +54,7 @@ from .types.query import CompositeFilter from .types.query import EntityResult from .types.query import Filter +from .types.query import FindNearest from .types.query import GqlQuery from .types.query import GqlQueryParameter from .types.query import KindExpression @@ -87,6 +89,7 @@ "ExplainMetrics", "ExplainOptions", "Filter", + "FindNearest", "GqlQuery", "GqlQueryParameter", "Key", @@ -102,6 +105,7 @@ "PropertyMask", "PropertyOrder", "PropertyReference", + "PropertyTransform", "Query", "QueryResultBatch", "ReadOptions", diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index 2f0ba4f09766..fcef7a8bf8ce 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -360,13 +360,18 @@ async def sample_lookup(): # and friendly error handling. rpc = self._client._transport._wrapped_methods[self._client._transport.lookup] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), - ) + header_params = {} + + if request.project_id: + header_params["project_id"] = request.project_id + + if request.database_id: + header_params["database_id"] = request.database_id + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Validate the universe domain. self._client._validate_universe_domain() @@ -446,13 +451,18 @@ async def sample_run_query(): self._client._transport.run_query ] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), - ) + header_params = {} + + if request.project_id: + header_params["project_id"] = request.project_id + + if request.database_id: + header_params["database_id"] = request.database_id + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Validate the universe domain. self._client._validate_universe_domain() @@ -532,13 +542,18 @@ async def sample_run_aggregation_query(): self._client._transport.run_aggregation_query ] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), - ) + header_params = {} + + if request.project_id: + header_params["project_id"] = request.project_id + + if request.database_id: + header_params["database_id"] = request.database_id + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Validate the universe domain. self._client._validate_universe_domain() @@ -640,13 +655,18 @@ async def sample_begin_transaction(): self._client._transport.begin_transaction ] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), - ) + header_params = {} + + if request.project_id: + header_params["project_id"] = request.project_id + + if request.database_id: + header_params["database_id"] = request.database_id + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Validate the universe domain. self._client._validate_universe_domain() @@ -792,13 +812,18 @@ async def sample_commit(): # and friendly error handling. rpc = self._client._transport._wrapped_methods[self._client._transport.commit] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), - ) + header_params = {} + + if request.project_id: + header_params["project_id"] = request.project_id + + if request.database_id: + header_params["database_id"] = request.database_id + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Validate the universe domain. self._client._validate_universe_domain() @@ -911,13 +936,18 @@ async def sample_rollback(): # and friendly error handling. rpc = self._client._transport._wrapped_methods[self._client._transport.rollback] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), - ) + header_params = {} + + if request.project_id: + header_params["project_id"] = request.project_id + + if request.database_id: + header_params["database_id"] = request.database_id + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Validate the universe domain. self._client._validate_universe_domain() @@ -1032,13 +1062,18 @@ async def sample_allocate_ids(): self._client._transport.allocate_ids ] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), - ) + header_params = {} + + if request.project_id: + header_params["project_id"] = request.project_id + + if request.database_id: + header_params["database_id"] = request.database_id + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Validate the universe domain. self._client._validate_universe_domain() @@ -1152,13 +1187,18 @@ async def sample_reserve_ids(): self._client._transport.reserve_ids ] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), - ) + header_params = {} + + if request.project_id: + header_params["project_id"] = request.project_id + + if request.database_id: + header_params["database_id"] = request.database_id + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Validate the universe domain. self._client._validate_universe_domain() @@ -1205,11 +1245,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -1262,11 +1298,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1323,11 +1355,7 @@ async def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1380,11 +1408,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index 6717a09fd2ee..ea6fba235925 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -1645,11 +1645,7 @@ def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -1702,11 +1698,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1763,11 +1755,7 @@ def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1820,11 +1808,7 @@ def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/README.rst b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/README.rst new file mode 100644 index 000000000000..77f9e34e5dea --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`DatastoreTransport` is the ABC for all transports. +- public child `DatastoreGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `DatastoreGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseDatastoreRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `DatastoreRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py index db08f5b4da36..cb18d36925b3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -213,6 +213,26 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index b826d7c6bcf5..4d943696f1c7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -234,6 +235,9 @@ def __init__( ) # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) self._prep_wrapped_messages(client_info) @property @@ -470,7 +474,7 @@ def reserve_ids( def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.lookup: gapic_v1.method_async.wrap_method( + self.lookup: self._wrap_method( self.lookup, default_retry=retries.AsyncRetry( initial=0.1, @@ -485,7 +489,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.run_query: gapic_v1.method_async.wrap_method( + self.run_query: self._wrap_method( self.run_query, default_retry=retries.AsyncRetry( initial=0.1, @@ -500,7 +504,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.run_aggregation_query: gapic_v1.method_async.wrap_method( + self.run_aggregation_query: self._wrap_method( self.run_aggregation_query, default_retry=retries.AsyncRetry( initial=0.1, @@ -515,27 +519,27 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.begin_transaction: gapic_v1.method_async.wrap_method( + self.begin_transaction: self._wrap_method( self.begin_transaction, default_timeout=60.0, client_info=client_info, ), - self.commit: gapic_v1.method_async.wrap_method( + self.commit: self._wrap_method( self.commit, default_timeout=60.0, client_info=client_info, ), - self.rollback: gapic_v1.method_async.wrap_method( + self.rollback: self._wrap_method( self.rollback, default_timeout=60.0, client_info=client_info, ), - self.allocate_ids: gapic_v1.method_async.wrap_method( + self.allocate_ids: self._wrap_method( self.allocate_ids, default_timeout=60.0, client_info=client_info, ), - self.reserve_ids: gapic_v1.method_async.wrap_method( + self.reserve_ids: self._wrap_method( self.reserve_ids, default_retry=retries.AsyncRetry( initial=0.1, @@ -550,11 +554,40 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + def close(self): return self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc_asyncio" + @property def delete_operation( self, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py index 245516187538..abb7d45c02a5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py @@ -16,33 +16,32 @@ from google.auth.transport.requests import AuthorizedSession # type: ignore import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers from google.api_core import rest_streaming -from google.api_core import path_template from google.api_core import gapic_v1 from google.protobuf import json_format + from requests import __version__ as requests_version import dataclasses -import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - from google.cloud.datastore_v1.types import datastore from google.longrunning import operations_pb2 # type: ignore -from .base import DatastoreTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +from .rest_base import _BaseDatastoreRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -405,8 +404,8 @@ class DatastoreRestStub: _interceptor: DatastoreRestInterceptor -class DatastoreRestTransport(DatastoreTransport): - """REST backend transport for Datastore. +class DatastoreRestTransport(_BaseDatastoreRestTransport): + """REST backend synchronous transport for Datastore. Each RPC normalizes the partition IDs of the keys in its input entities, and always returns entities with keys with @@ -421,7 +420,6 @@ class DatastoreRestTransport(DatastoreTransport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 - """ def __init__( @@ -475,21 +473,12 @@ def __init__( # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - super().__init__( host=host, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, api_audience=api_audience, ) self._session = AuthorizedSession( @@ -500,19 +489,32 @@ def __init__( self._interceptor = interceptor or DatastoreRestInterceptor() self._prep_wrapped_messages(client_info) - class _AllocateIds(DatastoreRestStub): + class _AllocateIds(_BaseDatastoreRestTransport._BaseAllocateIds, DatastoreRestStub): def __hash__(self): - return hash("AllocateIds") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("DatastoreRestTransport.AllocateIds") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -541,45 +543,36 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/projects/{project_id}:allocateIds", - "body": "*", - }, - ] + http_options = ( + _BaseDatastoreRestTransport._BaseAllocateIds._get_http_options() + ) request, metadata = self._interceptor.pre_allocate_ids(request, metadata) - pb_request = datastore.AllocateIdsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = ( + _BaseDatastoreRestTransport._BaseAllocateIds._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseDatastoreRestTransport._BaseAllocateIds._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseDatastoreRestTransport._BaseAllocateIds._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = DatastoreRestTransport._AllocateIds._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -595,19 +588,34 @@ def __call__( resp = self._interceptor.post_allocate_ids(resp) return resp - class _BeginTransaction(DatastoreRestStub): + class _BeginTransaction( + _BaseDatastoreRestTransport._BaseBeginTransaction, DatastoreRestStub + ): def __hash__(self): - return hash("BeginTransaction") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("DatastoreRestTransport.BeginTransaction") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -636,47 +644,34 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/projects/{project_id}:beginTransaction", - "body": "*", - }, - ] + http_options = ( + _BaseDatastoreRestTransport._BaseBeginTransaction._get_http_options() + ) request, metadata = self._interceptor.pre_begin_transaction( request, metadata ) - pb_request = datastore.BeginTransactionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseDatastoreRestTransport._BaseBeginTransaction._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseDatastoreRestTransport._BaseBeginTransaction._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseDatastoreRestTransport._BaseBeginTransaction._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = DatastoreRestTransport._BeginTransaction._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -692,19 +687,32 @@ def __call__( resp = self._interceptor.post_begin_transaction(resp) return resp - class _Commit(DatastoreRestStub): + class _Commit(_BaseDatastoreRestTransport._BaseCommit, DatastoreRestStub): def __hash__(self): - return hash("Commit") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("DatastoreRestTransport.Commit") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -733,45 +741,34 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/projects/{project_id}:commit", - "body": "*", - }, - ] + http_options = _BaseDatastoreRestTransport._BaseCommit._get_http_options() request, metadata = self._interceptor.pre_commit(request, metadata) - pb_request = datastore.CommitRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = ( + _BaseDatastoreRestTransport._BaseCommit._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseDatastoreRestTransport._BaseCommit._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseDatastoreRestTransport._BaseCommit._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = DatastoreRestTransport._Commit._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -787,19 +784,32 @@ def __call__( resp = self._interceptor.post_commit(resp) return resp - class _Lookup(DatastoreRestStub): + class _Lookup(_BaseDatastoreRestTransport._BaseLookup, DatastoreRestStub): def __hash__(self): - return hash("Lookup") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("DatastoreRestTransport.Lookup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -828,45 +838,34 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/projects/{project_id}:lookup", - "body": "*", - }, - ] + http_options = _BaseDatastoreRestTransport._BaseLookup._get_http_options() request, metadata = self._interceptor.pre_lookup(request, metadata) - pb_request = datastore.LookupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = ( + _BaseDatastoreRestTransport._BaseLookup._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseDatastoreRestTransport._BaseLookup._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseDatastoreRestTransport._BaseLookup._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = DatastoreRestTransport._Lookup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -882,19 +881,32 @@ def __call__( resp = self._interceptor.post_lookup(resp) return resp - class _ReserveIds(DatastoreRestStub): + class _ReserveIds(_BaseDatastoreRestTransport._BaseReserveIds, DatastoreRestStub): def __hash__(self): - return hash("ReserveIds") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("DatastoreRestTransport.ReserveIds") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -923,45 +935,36 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/projects/{project_id}:reserveIds", - "body": "*", - }, - ] + http_options = ( + _BaseDatastoreRestTransport._BaseReserveIds._get_http_options() + ) request, metadata = self._interceptor.pre_reserve_ids(request, metadata) - pb_request = datastore.ReserveIdsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = ( + _BaseDatastoreRestTransport._BaseReserveIds._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseDatastoreRestTransport._BaseReserveIds._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseDatastoreRestTransport._BaseReserveIds._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = DatastoreRestTransport._ReserveIds._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -977,19 +980,32 @@ def __call__( resp = self._interceptor.post_reserve_ids(resp) return resp - class _Rollback(DatastoreRestStub): + class _Rollback(_BaseDatastoreRestTransport._BaseRollback, DatastoreRestStub): def __hash__(self): - return hash("Rollback") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("DatastoreRestTransport.Rollback") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1019,45 +1035,34 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/projects/{project_id}:rollback", - "body": "*", - }, - ] + http_options = _BaseDatastoreRestTransport._BaseRollback._get_http_options() request, metadata = self._interceptor.pre_rollback(request, metadata) - pb_request = datastore.RollbackRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = ( + _BaseDatastoreRestTransport._BaseRollback._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseDatastoreRestTransport._BaseRollback._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseDatastoreRestTransport._BaseRollback._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = DatastoreRestTransport._Rollback._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1073,19 +1078,34 @@ def __call__( resp = self._interceptor.post_rollback(resp) return resp - class _RunAggregationQuery(DatastoreRestStub): + class _RunAggregationQuery( + _BaseDatastoreRestTransport._BaseRunAggregationQuery, DatastoreRestStub + ): def __hash__(self): - return hash("RunAggregationQuery") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("DatastoreRestTransport.RunAggregationQuery") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1114,47 +1134,34 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/projects/{project_id}:runAggregationQuery", - "body": "*", - }, - ] + http_options = ( + _BaseDatastoreRestTransport._BaseRunAggregationQuery._get_http_options() + ) request, metadata = self._interceptor.pre_run_aggregation_query( request, metadata ) - pb_request = datastore.RunAggregationQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseDatastoreRestTransport._BaseRunAggregationQuery._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseDatastoreRestTransport._BaseRunAggregationQuery._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseDatastoreRestTransport._BaseRunAggregationQuery._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = DatastoreRestTransport._RunAggregationQuery._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1170,19 +1177,32 @@ def __call__( resp = self._interceptor.post_run_aggregation_query(resp) return resp - class _RunQuery(DatastoreRestStub): + class _RunQuery(_BaseDatastoreRestTransport._BaseRunQuery, DatastoreRestStub): def __hash__(self): - return hash("RunQuery") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("DatastoreRestTransport.RunQuery") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1211,45 +1231,34 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/projects/{project_id}:runQuery", - "body": "*", - }, - ] + http_options = _BaseDatastoreRestTransport._BaseRunQuery._get_http_options() request, metadata = self._interceptor.pre_run_query(request, metadata) - pb_request = datastore.RunQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = ( + _BaseDatastoreRestTransport._BaseRunQuery._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseDatastoreRestTransport._BaseRunQuery._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseDatastoreRestTransport._BaseRunQuery._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = DatastoreRestTransport._RunQuery._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1333,7 +1342,34 @@ def run_query( def cancel_operation(self): return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - class _CancelOperation(DatastoreRestStub): + class _CancelOperation( + _BaseDatastoreRestTransport._BaseCancelOperation, DatastoreRestStub + ): + def __hash__(self): + return hash("DatastoreRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__( self, request: operations_pb2.CancelOperationRequest, @@ -1354,34 +1390,31 @@ def __call__( sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/operations/*}:cancel", - }, - ] - + http_options = ( + _BaseDatastoreRestTransport._BaseCancelOperation._get_http_options() + ) request, metadata = self._interceptor.pre_cancel_operation( request, metadata ) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseDatastoreRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = ( + _BaseDatastoreRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), + response = DatastoreRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1395,7 +1428,34 @@ def __call__( def delete_operation(self): return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - class _DeleteOperation(DatastoreRestStub): + class _DeleteOperation( + _BaseDatastoreRestTransport._BaseDeleteOperation, DatastoreRestStub + ): + def __hash__(self): + return hash("DatastoreRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__( self, request: operations_pb2.DeleteOperationRequest, @@ -1416,34 +1476,31 @@ def __call__( sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/operations/*}", - }, - ] - + http_options = ( + _BaseDatastoreRestTransport._BaseDeleteOperation._get_http_options() + ) request, metadata = self._interceptor.pre_delete_operation( request, metadata ) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseDatastoreRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = ( + _BaseDatastoreRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), + response = DatastoreRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1457,7 +1514,34 @@ def __call__( def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(DatastoreRestStub): + class _GetOperation( + _BaseDatastoreRestTransport._BaseGetOperation, DatastoreRestStub + ): + def __hash__(self): + return hash("DatastoreRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__( self, request: operations_pb2.GetOperationRequest, @@ -1481,32 +1565,31 @@ def __call__( operations_pb2.Operation: Response from GetOperation method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/operations/*}", - }, - ] - + http_options = ( + _BaseDatastoreRestTransport._BaseGetOperation._get_http_options() + ) request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = ( + _BaseDatastoreRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = ( + _BaseDatastoreRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), + response = DatastoreRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1514,8 +1597,9 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) return resp @@ -1523,7 +1607,34 @@ def __call__( def list_operations(self): return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - class _ListOperations(DatastoreRestStub): + class _ListOperations( + _BaseDatastoreRestTransport._BaseListOperations, DatastoreRestStub + ): + def __hash__(self): + return hash("DatastoreRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__( self, request: operations_pb2.ListOperationsRequest, @@ -1547,32 +1658,31 @@ def __call__( operations_pb2.ListOperationsResponse: Response from ListOperations method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*}/operations", - }, - ] - + http_options = ( + _BaseDatastoreRestTransport._BaseListOperations._get_http_options() + ) request, metadata = self._interceptor.pre_list_operations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = ( + _BaseDatastoreRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = ( + _BaseDatastoreRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), + response = DatastoreRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1580,8 +1690,9 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) return resp diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest_base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest_base.py new file mode 100644 index 000000000000..c8d5c675af3a --- /dev/null +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest_base.py @@ -0,0 +1,650 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import DatastoreTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.datastore_v1.types import datastore +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseDatastoreRestTransport(DatastoreTransport): + """Base REST backend transport for Datastore. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "datastore.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'datastore.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseAllocateIds: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:allocateIds", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datastore.AllocateIdsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDatastoreRestTransport._BaseAllocateIds._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseBeginTransaction: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:beginTransaction", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datastore.BeginTransactionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDatastoreRestTransport._BaseBeginTransaction._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCommit: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:commit", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datastore.CommitRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDatastoreRestTransport._BaseCommit._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseLookup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:lookup", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datastore.LookupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDatastoreRestTransport._BaseLookup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseReserveIds: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:reserveIds", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datastore.ReserveIdsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDatastoreRestTransport._BaseReserveIds._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRollback: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:rollback", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datastore.RollbackRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDatastoreRestTransport._BaseRollback._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRunAggregationQuery: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:runAggregationQuery", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datastore.RunAggregationQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDatastoreRestTransport._BaseRunAggregationQuery._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRunQuery: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}:runQuery", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datastore.RunQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDatastoreRestTransport._BaseRunQuery._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseDatastoreRestTransport",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py index 3ae809b44e96..0efe33ff991b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py @@ -29,6 +29,7 @@ Mutation, MutationResult, PropertyMask, + PropertyTransform, ReadOptions, ReserveIdsRequest, ReserveIdsResponse, @@ -52,6 +53,7 @@ CompositeFilter, EntityResult, Filter, + FindNearest, GqlQuery, GqlQueryParameter, KindExpression, @@ -83,6 +85,7 @@ "Mutation", "MutationResult", "PropertyMask", + "PropertyTransform", "ReadOptions", "ReserveIdsRequest", "ReserveIdsResponse", @@ -102,6 +105,7 @@ "CompositeFilter", "EntityResult", "Filter", + "FindNearest", "GqlQuery", "GqlQueryParameter", "KindExpression", diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index 11974c3d968c..281866f59efd 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -46,6 +46,7 @@ "ReserveIdsRequest", "ReserveIdsResponse", "Mutation", + "PropertyTransform", "MutationResult", "PropertyMask", "ReadOptions", @@ -796,6 +797,10 @@ class Mutation(proto.Message): mutation conflicts. This field is a member of `oneof`_ ``conflict_detection_strategy``. + conflict_resolution_strategy (google.cloud.datastore_v1.types.Mutation.ConflictResolutionStrategy): + The strategy to use when a conflict is detected. Defaults to + ``SERVER_VALUE``. If this is set, then + ``conflict_detection_strategy`` must also be set. property_mask (google.cloud.datastore_v1.types.PropertyMask): The properties to write in this mutation. None of the properties in the mask may have a reserved name, except for @@ -804,8 +809,31 @@ class Mutation(proto.Message): If the entity already exists, only properties referenced in the mask are updated, others are left untouched. Properties referenced in the mask but not in the entity are deleted. + property_transforms (MutableSequence[google.cloud.datastore_v1.types.PropertyTransform]): + Optional. The transforms to perform on the entity. + + This field can be set only when the operation is ``insert``, + ``update``, or ``upsert``. If present, the transforms are be + applied to the entity regardless of the property mask, in + order, after the operation. """ + class ConflictResolutionStrategy(proto.Enum): + r"""The possible ways to resolve a conflict detected in a + mutation. + + Values: + STRATEGY_UNSPECIFIED (0): + Unspecified. Defaults to ``SERVER_VALUE``. + SERVER_VALUE (1): + The server entity is kept. + FAIL (3): + The whole commit request fails. + """ + STRATEGY_UNSPECIFIED = 0 + SERVER_VALUE = 1 + FAIL = 3 + insert: entity.Entity = proto.Field( proto.MESSAGE, number=4, @@ -841,11 +869,199 @@ class Mutation(proto.Message): oneof="conflict_detection_strategy", message=timestamp_pb2.Timestamp, ) + conflict_resolution_strategy: ConflictResolutionStrategy = proto.Field( + proto.ENUM, + number=10, + enum=ConflictResolutionStrategy, + ) property_mask: "PropertyMask" = proto.Field( proto.MESSAGE, number=9, message="PropertyMask", ) + property_transforms: MutableSequence["PropertyTransform"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="PropertyTransform", + ) + + +class PropertyTransform(proto.Message): + r"""A transformation of an entity property. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + property (str): + Optional. The name of the property. + + Property paths (a list of property names separated by dots + (``.``)) may be used to refer to properties inside entity + values. For example ``foo.bar`` means the property ``bar`` + inside the entity property ``foo``. + + If a property name contains a dot ``.`` or a backlslash + ``\``, then that name must be escaped. + set_to_server_value (google.cloud.datastore_v1.types.PropertyTransform.ServerValue): + Sets the property to the given server value. + + This field is a member of `oneof`_ ``transform_type``. + increment (google.cloud.datastore_v1.types.Value): + Adds the given value to the property's + current value. + This must be an integer or a double value. + If the property is not an integer or double, or + if the property does not yet exist, the + transformation will set the property to the + given value. If either of the given value or the + current property value are doubles, both values + will be interpreted as doubles. Double + arithmetic and representation of double values + follows IEEE 754 semantics. If there is + positive/negative integer overflow, the property + is resolved to the largest magnitude + positive/negative integer. + + This field is a member of `oneof`_ ``transform_type``. + maximum (google.cloud.datastore_v1.types.Value): + Sets the property to the maximum of its + current value and the given value. + + This must be an integer or a double value. + If the property is not an integer or double, or + if the property does not yet exist, the + transformation will set the property to the + given value. If a maximum operation is applied + where the property and the input value are of + mixed types (that is - one is an integer and one + is a double) the property takes on the type of + the larger operand. If the operands are + equivalent (e.g. 3 and 3.0), the property does + not change. 0, 0.0, and -0.0 are all zero. The + maximum of a zero stored value and zero input + value is always the stored value. + The maximum of any numeric value x and NaN is + NaN. + + This field is a member of `oneof`_ ``transform_type``. + minimum (google.cloud.datastore_v1.types.Value): + Sets the property to the minimum of its + current value and the given value. + + This must be an integer or a double value. + If the property is not an integer or double, or + if the property does not yet exist, the + transformation will set the property to the + input value. If a minimum operation is applied + where the property and the input value are of + mixed types (that is - one is an integer and one + is a double) the property takes on the type of + the smaller operand. If the operands are + equivalent (e.g. 3 and 3.0), the property does + not change. 0, 0.0, and -0.0 are all zero. The + minimum of a zero stored value and zero input + value is always the stored value. The minimum of + any numeric value x and NaN is NaN. + + This field is a member of `oneof`_ ``transform_type``. + append_missing_elements (google.cloud.datastore_v1.types.ArrayValue): + Appends the given elements in order if they + are not already present in the current property + value. If the property is not an array, or if + the property does not yet exist, it is first set + to the empty array. + + Equivalent numbers of different types (e.g. 3L + and 3.0) are considered equal when checking if a + value is missing. NaN is equal to NaN, and the + null value is equal to the null value. If the + input contains multiple equivalent values, only + the first will be considered. + + The corresponding transform result will be the + null value. + + This field is a member of `oneof`_ ``transform_type``. + remove_all_from_array (google.cloud.datastore_v1.types.ArrayValue): + Removes all of the given elements from the + array in the property. If the property is not an + array, or if the property does not yet exist, it + is set to the empty array. + + Equivalent numbers of different types (e.g. 3L + and 3.0) are considered equal when deciding + whether an element should be removed. NaN is + equal to NaN, and the null value is equal to the + null value. This will remove all equivalent + values if there are duplicates. + + The corresponding transform result will be the + null value. + + This field is a member of `oneof`_ ``transform_type``. + """ + + class ServerValue(proto.Enum): + r"""A value that is calculated by the server. + + Values: + SERVER_VALUE_UNSPECIFIED (0): + Unspecified. This value must not be used. + REQUEST_TIME (1): + The time at which the server processed the + request, with millisecond precision. If used on + multiple properties (same or different entities) + in a transaction, all the properties will get + the same server timestamp. + """ + SERVER_VALUE_UNSPECIFIED = 0 + REQUEST_TIME = 1 + + property: str = proto.Field( + proto.STRING, + number=1, + ) + set_to_server_value: ServerValue = proto.Field( + proto.ENUM, + number=2, + oneof="transform_type", + enum=ServerValue, + ) + increment: entity.Value = proto.Field( + proto.MESSAGE, + number=3, + oneof="transform_type", + message=entity.Value, + ) + maximum: entity.Value = proto.Field( + proto.MESSAGE, + number=4, + oneof="transform_type", + message=entity.Value, + ) + minimum: entity.Value = proto.Field( + proto.MESSAGE, + number=5, + oneof="transform_type", + message=entity.Value, + ) + append_missing_elements: entity.ArrayValue = proto.Field( + proto.MESSAGE, + number=6, + oneof="transform_type", + message=entity.ArrayValue, + ) + remove_all_from_array: entity.ArrayValue = proto.Field( + proto.MESSAGE, + number=7, + oneof="transform_type", + message=entity.ArrayValue, + ) class MutationResult(proto.Message): @@ -878,6 +1094,10 @@ class MutationResult(proto.Message): Whether a conflict was detected for this mutation. Always false when a conflict detection strategy field is not set in the mutation. + transform_results (MutableSequence[google.cloud.datastore_v1.types.Value]): + The results of applying each + [PropertyTransform][google.datastore.v1.PropertyTransform], + in the same order of the request. """ key: entity.Key = proto.Field( @@ -903,6 +1123,11 @@ class MutationResult(proto.Message): proto.BOOL, number=5, ) + transform_results: MutableSequence[entity.Value] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message=entity.Value, + ) class PropertyMask(proto.Message): diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index 2ce1000fe956..1f8679cc2317 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -37,6 +37,7 @@ "Filter", "CompositeFilter", "PropertyFilter", + "FindNearest", "GqlQuery", "GqlQueryParameter", "QueryResultBatch", @@ -132,6 +133,16 @@ class ResultType(proto.Enum): class Query(proto.Message): r"""A query for entities. + The query stages are executed in the following order: + + 1. kind + 2. filter + 3. projection + 4. order + start_cursor + end_cursor + 5. offset + 6. limit + 7. find_nearest + Attributes: projection (MutableSequence[google.cloud.datastore_v1.types.Projection]): The projection to return. Defaults to @@ -175,6 +186,13 @@ class Query(proto.Message): Applies after all other constraints. Optional. Unspecified is interpreted as no limit. Must be >= 0 if specified. + find_nearest (google.cloud.datastore_v1.types.FindNearest): + Optional. A potential Nearest Neighbors + Search. + Applies after all other filters and ordering. + + Finds the closest vector embeddings to the given + query vector. """ projection: MutableSequence["Projection"] = proto.RepeatedField( @@ -219,6 +237,11 @@ class Query(proto.Message): number=12, message=wrappers_pb2.Int32Value, ) + find_nearest: "FindNearest" = proto.Field( + proto.MESSAGE, + number=13, + message="FindNearest", + ) class AggregationQuery(proto.Message): @@ -712,6 +735,110 @@ class Operator(proto.Enum): ) +class FindNearest(proto.Message): + r"""Nearest Neighbors search config. The ordering provided by + FindNearest supersedes the order_by stage. If multiple documents + have the same vector distance, the returned document order is not + guaranteed to be stable between queries. + + Attributes: + vector_property (google.cloud.datastore_v1.types.PropertyReference): + Required. An indexed vector property to search upon. Only + documents which contain vectors whose dimensionality match + the query_vector can be returned. + query_vector (google.cloud.datastore_v1.types.Value): + Required. The query vector that we are + searching on. Must be a vector of no more than + 2048 dimensions. + distance_measure (google.cloud.datastore_v1.types.FindNearest.DistanceMeasure): + Required. The Distance Measure to use, + required. + limit (google.protobuf.wrappers_pb2.Int32Value): + Required. The number of nearest neighbors to + return. Must be a positive integer of no more + than 100. + distance_result_property (str): + Optional. Optional name of the field to output the result of + the vector distance calculation. Must conform to [entity + property][google.datastore.v1.Entity.properties] + limitations. + distance_threshold (google.protobuf.wrappers_pb2.DoubleValue): + Optional. Option to specify a threshold for which no less + similar documents will be returned. The behavior of the + specified ``distance_measure`` will affect the meaning of + the distance threshold. Since DOT_PRODUCT distances increase + when the vectors are more similar, the comparison is + inverted. + + For EUCLIDEAN, COSINE: WHERE distance <= distance_threshold + For DOT_PRODUCT: WHERE distance >= distance_threshold + """ + + class DistanceMeasure(proto.Enum): + r"""The distance measure to use when comparing vectors. + + Values: + DISTANCE_MEASURE_UNSPECIFIED (0): + Should not be set. + EUCLIDEAN (1): + Measures the EUCLIDEAN distance between the vectors. See + `Euclidean `__ + to learn more. The resulting distance decreases the more + similar two vectors are. + COSINE (2): + COSINE distance compares vectors based on the angle between + them, which allows you to measure similarity that isn't + based on the vectors magnitude. We recommend using + DOT_PRODUCT with unit normalized vectors instead of COSINE + distance, which is mathematically equivalent with better + performance. See `Cosine + Similarity `__ + to learn more about COSINE similarity and COSINE distance. + The resulting COSINE distance decreases the more similar two + vectors are. + DOT_PRODUCT (3): + Similar to cosine but is affected by the magnitude of the + vectors. See `Dot + Product `__ to + learn more. The resulting distance increases the more + similar two vectors are. + """ + DISTANCE_MEASURE_UNSPECIFIED = 0 + EUCLIDEAN = 1 + COSINE = 2 + DOT_PRODUCT = 3 + + vector_property: "PropertyReference" = proto.Field( + proto.MESSAGE, + number=1, + message="PropertyReference", + ) + query_vector: gd_entity.Value = proto.Field( + proto.MESSAGE, + number=2, + message=gd_entity.Value, + ) + distance_measure: DistanceMeasure = proto.Field( + proto.ENUM, + number=3, + enum=DistanceMeasure, + ) + limit: wrappers_pb2.Int32Value = proto.Field( + proto.MESSAGE, + number=4, + message=wrappers_pb2.Int32Value, + ) + distance_result_property: str = proto.Field( + proto.STRING, + number=5, + ) + distance_threshold: wrappers_pb2.DoubleValue = proto.Field( + proto.MESSAGE, + number=6, + message=wrappers_pb2.DoubleValue, + ) + + class GqlQuery(proto.Message): r"""A `GQL query `__. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index 6501a012bee7..f880423cbf60 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -24,7 +24,7 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json import math @@ -37,6 +37,13 @@ from requests.sessions import Session from google.protobuf import json_format +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import future @@ -66,10 +73,24 @@ import google.auth +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -1167,25 +1188,6 @@ def test_export_entities(request_type, transport: str = "grpc"): assert isinstance(response, future.Future) -def test_export_entities_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.export_entities), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.export_entities() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ExportEntitiesRequest() - - def test_export_entities_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1256,27 +1258,6 @@ def test_export_entities_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_export_entities_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.export_entities), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.export_entities() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ExportEntitiesRequest() - - @pytest.mark.asyncio async def test_export_entities_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1285,7 +1266,7 @@ async def test_export_entities_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1329,7 +1310,7 @@ async def test_export_entities_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ExportEntitiesRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1392,7 +1373,7 @@ def test_export_entities_field_headers(): @pytest.mark.asyncio async def test_export_entities_field_headers_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1477,7 +1458,7 @@ def test_export_entities_flattened_error(): @pytest.mark.asyncio async def test_export_entities_flattened_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1518,7 +1499,7 @@ async def test_export_entities_flattened_async(): @pytest.mark.asyncio async def test_export_entities_flattened_error_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1566,25 +1547,6 @@ def test_import_entities(request_type, transport: str = "grpc"): assert isinstance(response, future.Future) -def test_import_entities_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.import_entities), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.import_entities() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ImportEntitiesRequest() - - def test_import_entities_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1655,27 +1617,6 @@ def test_import_entities_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_import_entities_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.import_entities), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.import_entities() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ImportEntitiesRequest() - - @pytest.mark.asyncio async def test_import_entities_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1684,7 +1625,7 @@ async def test_import_entities_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1728,7 +1669,7 @@ async def test_import_entities_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ImportEntitiesRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1791,7 +1732,7 @@ def test_import_entities_field_headers(): @pytest.mark.asyncio async def test_import_entities_field_headers_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1876,7 +1817,7 @@ def test_import_entities_flattened_error(): @pytest.mark.asyncio async def test_import_entities_flattened_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1917,7 +1858,7 @@ async def test_import_entities_flattened_async(): @pytest.mark.asyncio async def test_import_entities_flattened_error_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1965,25 +1906,6 @@ def test_create_index(request_type, transport: str = "grpc"): assert isinstance(response, future.Future) -def test_create_index_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_index), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.CreateIndexRequest() - - def test_create_index_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2052,27 +1974,6 @@ def test_create_index_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_index_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.create_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.CreateIndexRequest() - - @pytest.mark.asyncio async def test_create_index_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2081,7 +1982,7 @@ async def test_create_index_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2125,7 +2026,7 @@ async def test_create_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.CreateIndexRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2188,7 +2089,7 @@ def test_create_index_field_headers(): @pytest.mark.asyncio async def test_create_index_field_headers_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2250,25 +2151,6 @@ def test_delete_index(request_type, transport: str = "grpc"): assert isinstance(response, future.Future) -def test_delete_index_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_index), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.DeleteIndexRequest() - - def test_delete_index_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2339,27 +2221,6 @@ def test_delete_index_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_index_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.delete_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.DeleteIndexRequest() - - @pytest.mark.asyncio async def test_delete_index_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2368,7 +2229,7 @@ async def test_delete_index_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2412,7 +2273,7 @@ async def test_delete_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.DeleteIndexRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2476,7 +2337,7 @@ def test_delete_index_field_headers(): @pytest.mark.asyncio async def test_delete_index_field_headers_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2550,25 +2411,6 @@ def test_get_index(request_type, transport: str = "grpc"): assert response.state == index.Index.State.CREATING -def test_get_index_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_index), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.GetIndexRequest() - - def test_get_index_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2634,40 +2476,13 @@ def test_get_index_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_index_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - index.Index( - project_id="project_id_value", - index_id="index_id_value", - kind="kind_value", - ancestor=index.Index.AncestorMode.NONE, - state=index.Index.State.CREATING, - ) - ) - response = await client.get_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.GetIndexRequest() - - @pytest.mark.asyncio async def test_get_index_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2706,7 +2521,7 @@ async def test_get_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.GetIndexRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2781,7 +2596,7 @@ def test_get_index_field_headers(): @pytest.mark.asyncio async def test_get_index_field_headers_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2845,25 +2660,6 @@ def test_list_indexes(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_indexes_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_indexes() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ListIndexesRequest() - - def test_list_indexes_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2931,29 +2727,6 @@ def test_list_indexes_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_indexes_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore_admin.ListIndexesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_indexes() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ListIndexesRequest() - - @pytest.mark.asyncio async def test_list_indexes_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2962,7 +2735,7 @@ async def test_list_indexes_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3001,7 +2774,7 @@ async def test_list_indexes_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ListIndexesRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3067,7 +2840,7 @@ def test_list_indexes_field_headers(): @pytest.mark.asyncio async def test_list_indexes_field_headers_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3194,7 +2967,7 @@ def test_list_indexes_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_indexes_async_pager(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3244,7 +3017,7 @@ async def test_list_indexes_async_pager(): @pytest.mark.asyncio async def test_list_indexes_async_pages(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3290,41 +3063,6 @@ async def test_list_indexes_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize( - "request_type", - [ - datastore_admin.ExportEntitiesRequest, - dict, - ], -) -def test_export_entities_rest(request_type): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.export_entities(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - def test_export_entities_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3458,89 +3196,6 @@ def test_export_entities_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_entities_rest_interceptors(null_interceptor): - transport = transports.DatastoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatastoreAdminRestInterceptor(), - ) - client = DatastoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DatastoreAdminRestInterceptor, "post_export_entities" - ) as post, mock.patch.object( - transports.DatastoreAdminRestInterceptor, "pre_export_entities" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore_admin.ExportEntitiesRequest.pb( - datastore_admin.ExportEntitiesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = datastore_admin.ExportEntitiesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.export_entities( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_export_entities_rest_bad_request( - transport: str = "rest", request_type=datastore_admin.ExportEntitiesRequest -): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.export_entities(request) - - def test_export_entities_rest_flattened(): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3600,47 +3255,6 @@ def test_export_entities_rest_flattened_error(transport: str = "rest"): ) -def test_export_entities_rest_error(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - datastore_admin.ImportEntitiesRequest, - dict, - ], -) -def test_import_entities_rest(request_type): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.import_entities(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - def test_import_entities_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3774,89 +3388,6 @@ def test_import_entities_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_entities_rest_interceptors(null_interceptor): - transport = transports.DatastoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatastoreAdminRestInterceptor(), - ) - client = DatastoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DatastoreAdminRestInterceptor, "post_import_entities" - ) as post, mock.patch.object( - transports.DatastoreAdminRestInterceptor, "pre_import_entities" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore_admin.ImportEntitiesRequest.pb( - datastore_admin.ImportEntitiesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = datastore_admin.ImportEntitiesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.import_entities( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_import_entities_rest_bad_request( - transport: str = "rest", request_type=datastore_admin.ImportEntitiesRequest -): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.import_entities(request) - - def test_import_entities_rest_flattened(): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3916,123 +3447,47 @@ def test_import_entities_rest_flattened_error(transport: str = "rest"): ) -def test_import_entities_rest_error(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) +def test_create_index_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -@pytest.mark.parametrize( - "request_type", - [ - datastore_admin.CreateIndexRequest, - dict, - ], -) -def test_create_index_rest(request_type): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Ensure method has been cached + assert client._transport.create_index in client._transport._wrapped_methods - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request_init["index"] = { - "project_id": "project_id_value", - "index_id": "index_id_value", - "kind": "kind_value", - "ancestor": 1, - "properties": [{"name": "name_value", "direction": 1}], - "state": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_index] = mock_rpc - # Determine if the message type is proto-plus or protobuf - test_field = datastore_admin.CreateIndexRequest.meta.fields["index"] + request = {} + client.create_index(request) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["index"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["index"][field])): - del request_init["index"][field][i][subfield] - else: - del request_init["index"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_index(request) + client.create_index(request) - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_create_index_rest_use_cached_wrapped_rpc(): +def test_delete_index_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4046,17 +3501,17 @@ def test_create_index_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_index in client._transport._wrapped_methods + assert client._transport.delete_index in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_index] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_index] = mock_rpc request = {} - client.create_index(request) + client.delete_index(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -4065,138 +3520,50 @@ def test_create_index_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_index(request) + client.delete_index(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_index_rest_interceptors(null_interceptor): - transport = transports.DatastoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatastoreAdminRestInterceptor(), - ) - client = DatastoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DatastoreAdminRestInterceptor, "post_create_index" - ) as post, mock.patch.object( - transports.DatastoreAdminRestInterceptor, "pre_create_index" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore_admin.CreateIndexRequest.pb( - datastore_admin.CreateIndexRequest() +def test_get_index_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - request = datastore_admin.CreateIndexRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + # Ensure method has been cached + assert client._transport.get_index in client._transport._wrapped_methods - client.create_index( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[client._transport.get_index] = mock_rpc - pre.assert_called_once() - post.assert_called_once() - - -def test_create_index_rest_bad_request( - transport: str = "rest", request_type=datastore_admin.CreateIndexRequest -): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_index(request) - - -def test_create_index_rest_error(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - datastore_admin.DeleteIndexRequest, - dict, - ], -) -def test_delete_index_rest(request_type): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1", "index_id": "sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + request = {} + client.get_index(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_index(request) + client.get_index(request) - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_delete_index_rest_use_cached_wrapped_rpc(): +def test_list_indexes_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4210,70 +3577,1012 @@ def test_delete_index_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_index in client._transport._wrapped_methods + assert client._transport.list_indexes in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_index] = mock_rpc + client._transport._wrapped_methods[client._transport.list_indexes] = mock_rpc request = {} - client.delete_index(request) + client.list_indexes(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_index(request) + client.list_indexes(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_index_rest_interceptors(null_interceptor): - transport = transports.DatastoreAdminRestTransport( +def test_list_indexes_rest_pager(transport: str = "rest"): + client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatastoreAdminRestInterceptor(), + transport=transport, ) - client = DatastoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DatastoreAdminRestInterceptor, "post_delete_index" - ) as post, mock.patch.object( - transports.DatastoreAdminRestInterceptor, "pre_delete_index" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore_admin.DeleteIndexRequest.pb( - datastore_admin.DeleteIndexRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datastore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + index.Index(), + ], + next_page_token="abc", + ), + datastore_admin.ListIndexesResponse( + indexes=[], + next_page_token="def", + ), + datastore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + ], + next_page_token="ghi", + ), + datastore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + datastore_admin.ListIndexesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project_id": "sample1"} + + pager = client.list_indexes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, index.Index) for i in results) + + pages = list(client.list_indexes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastoreAdminClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatastoreAdminClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatastoreAdminClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastoreAdminClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DatastoreAdminClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DatastoreAdminGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatastoreAdminGrpcTransport, + transports.DatastoreAdminGrpcAsyncIOTransport, + transports.DatastoreAdminRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = DatastoreAdminClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_entities_empty_call_grpc(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.export_entities), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_entities(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore_admin.ExportEntitiesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_import_entities_empty_call_grpc(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.import_entities), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_entities(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore_admin.ImportEntitiesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_index_empty_call_grpc(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_index), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_index(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore_admin.CreateIndexRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_index_empty_call_grpc(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_index(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore_admin.DeleteIndexRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_index_empty_call_grpc(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_index), "__call__") as call: + call.return_value = index.Index() + client.get_index(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore_admin.GetIndexRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_indexes_empty_call_grpc(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: + call.return_value = datastore_admin.ListIndexesResponse() + client.list_indexes(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore_admin.ListIndexesRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = DatastoreAdminAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = DatastoreAdminAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_export_entities_empty_call_grpc_asyncio(): + client = DatastoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.export_entities), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.export_entities(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore_admin.ExportEntitiesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_import_entities_empty_call_grpc_asyncio(): + client = DatastoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.import_entities), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.import_entities(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore_admin.ImportEntitiesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_index_empty_call_grpc_asyncio(): + client = DatastoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_index), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_index(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore_admin.CreateIndexRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_index_empty_call_grpc_asyncio(): + client = DatastoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_index(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore_admin.DeleteIndexRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_index_empty_call_grpc_asyncio(): + client = DatastoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_index), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + index.Index( + project_id="project_id_value", + index_id="index_id_value", + kind="kind_value", + ancestor=index.Index.AncestorMode.NONE, + state=index.Index.State.CREATING, + ) + ) + await client.get_index(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore_admin.GetIndexRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_indexes_empty_call_grpc_asyncio(): + client = DatastoreAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore_admin.ListIndexesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_indexes(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore_admin.ListIndexesRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = DatastoreAdminClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_export_entities_rest_bad_request( + request_type=datastore_admin.ExportEntitiesRequest, +): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.export_entities(request) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.ExportEntitiesRequest, + dict, + ], +) +def test_export_entities_rest_call_success(request_type): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.export_entities(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_entities_rest_interceptors(null_interceptor): + transport = transports.DatastoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastoreAdminRestInterceptor(), + ) + client = DatastoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_export_entities" + ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "pre_export_entities" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore_admin.ExportEntitiesRequest.pb( + datastore_admin.ExportEntitiesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = datastore_admin.ExportEntitiesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_entities( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_import_entities_rest_bad_request( + request_type=datastore_admin.ImportEntitiesRequest, +): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.import_entities(request) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.ImportEntitiesRequest, + dict, + ], +) +def test_import_entities_rest_call_success(request_type): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.import_entities(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_entities_rest_interceptors(null_interceptor): + transport = transports.DatastoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastoreAdminRestInterceptor(), + ) + client = DatastoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_import_entities" + ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "pre_import_entities" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore_admin.ImportEntitiesRequest.pb( + datastore_admin.ImportEntitiesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = datastore_admin.ImportEntitiesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.import_entities( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_index_rest_bad_request(request_type=datastore_admin.CreateIndexRequest): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_index(request) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.CreateIndexRequest, + dict, + ], +) +def test_create_index_rest_call_success(request_type): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request_init["index"] = { + "project_id": "project_id_value", + "index_id": "index_id_value", + "kind": "kind_value", + "ancestor": 1, + "properties": [{"name": "name_value", "direction": 1}], + "state": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = datastore_admin.CreateIndexRequest.meta.fields["index"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["index"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["index"][field])): + del request_init["index"][field][i][subfield] + else: + del request_init["index"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_index(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_index_rest_interceptors(null_interceptor): + transport = transports.DatastoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastoreAdminRestInterceptor(), + ) + client = DatastoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_create_index" + ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "pre_create_index" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore_admin.CreateIndexRequest.pb( + datastore_admin.CreateIndexRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = datastore_admin.CreateIndexRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_index( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_index_rest_bad_request(request_type=datastore_admin.DeleteIndexRequest): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "index_id": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_index(request) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.DeleteIndexRequest, + dict, + ], +) +def test_delete_index_rest_call_success(request_type): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "index_id": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_index(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_index_rest_interceptors(null_interceptor): + transport = transports.DatastoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastoreAdminRestInterceptor(), + ) + client = DatastoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_delete_index" + ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "pre_delete_index" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore_admin.DeleteIndexRequest.pb( + datastore_admin.DeleteIndexRequest() ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value request = datastore_admin.DeleteIndexRequest() metadata = [ @@ -4283,7 +4592,132 @@ def test_delete_index_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_index( + client.delete_index( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_index_rest_bad_request(request_type=datastore_admin.GetIndexRequest): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "index_id": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_index(request) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.GetIndexRequest, + dict, + ], +) +def test_get_index_rest_call_success(request_type): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "index_id": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = index.Index( + project_id="project_id_value", + index_id="index_id_value", + kind="kind_value", + ancestor=index.Index.AncestorMode.NONE, + state=index.Index.State.CREATING, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = index.Index.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_index(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, index.Index) + assert response.project_id == "project_id_value" + assert response.index_id == "index_id_value" + assert response.kind == "kind_value" + assert response.ancestor == index.Index.AncestorMode.NONE + assert response.state == index.Index.State.CREATING + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_index_rest_interceptors(null_interceptor): + transport = transports.DatastoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastoreAdminRestInterceptor(), + ) + client = DatastoreAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_get_index" + ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "pre_get_index" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore_admin.GetIndexRequest.pb( + datastore_admin.GetIndexRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = index.Index.to_json(index.Index()) + req.return_value.content = return_value + + request = datastore_admin.GetIndexRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = index.Index() + + client.get_index( request, metadata=[ ("key", "val"), @@ -4295,16 +4729,12 @@ def test_delete_index_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_index_rest_bad_request( - transport: str = "rest", request_type=datastore_admin.DeleteIndexRequest -): +def test_list_indexes_rest_bad_request(request_type=datastore_admin.ListIndexesRequest): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1", "index_id": "sample2"} + request_init = {"project_id": "sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4312,105 +4742,56 @@ def test_delete_index_rest_bad_request( core_exceptions.BadRequest ): # Wrap the value into a proper Response obj - response_value = Response() + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 - response_value.request = Request() + response_value.request = mock.Mock() req.return_value = response_value - client.delete_index(request) - - -def test_delete_index_rest_error(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + client.list_indexes(request) @pytest.mark.parametrize( "request_type", [ - datastore_admin.GetIndexRequest, + datastore_admin.ListIndexesRequest, dict, ], ) -def test_get_index_rest(request_type): +def test_list_indexes_rest_call_success(request_type): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"project_id": "sample1", "index_id": "sample2"} + request_init = {"project_id": "sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = index.Index( - project_id="project_id_value", - index_id="index_id_value", - kind="kind_value", - ancestor=index.Index.AncestorMode.NONE, - state=index.Index.State.CREATING, + return_value = datastore_admin.ListIndexesResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj - response_value = Response() + response_value = mock.Mock() response_value.status_code = 200 + # Convert return value to protobuf type - return_value = index.Index.pb(return_value) + return_value = datastore_admin.ListIndexesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_index(request) + response = client.list_indexes(request) # Establish that the response is the type that we expect. - assert isinstance(response, index.Index) - assert response.project_id == "project_id_value" - assert response.index_id == "index_id_value" - assert response.kind == "kind_value" - assert response.ancestor == index.Index.AncestorMode.NONE - assert response.state == index.Index.State.CREATING - - -def test_get_index_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_index in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_index] = mock_rpc - - request = {} - client.get_index(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_index(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + assert isinstance(response, pagers.ListIndexesPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_index_rest_interceptors(null_interceptor): +def test_list_indexes_rest_interceptors(null_interceptor): transport = transports.DatastoreAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4418,19 +4799,20 @@ def test_get_index_rest_interceptors(null_interceptor): else transports.DatastoreAdminRestInterceptor(), ) client = DatastoreAdminClient(transport=transport) + with mock.patch.object( type(client.transport._session), "request" ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DatastoreAdminRestInterceptor, "post_get_index" + transports.DatastoreAdminRestInterceptor, "post_list_indexes" ) as post, mock.patch.object( - transports.DatastoreAdminRestInterceptor, "pre_get_index" + transports.DatastoreAdminRestInterceptor, "pre_list_indexes" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = datastore_admin.GetIndexRequest.pb( - datastore_admin.GetIndexRequest() + pb_message = datastore_admin.ListIndexesRequest.pb( + datastore_admin.ListIndexesRequest() ) transcode.return_value = { "method": "post", @@ -4439,20 +4821,22 @@ def test_get_index_rest_interceptors(null_interceptor): "query_params": pb_message, } - req.return_value = Response() + req.return_value = mock.Mock() req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = index.Index.to_json(index.Index()) + return_value = datastore_admin.ListIndexesResponse.to_json( + datastore_admin.ListIndexesResponse() + ) + req.return_value.content = return_value - request = datastore_admin.GetIndexRequest() + request = datastore_admin.ListIndexesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = index.Index() + post.return_value = datastore_admin.ListIndexesResponse() - client.get_index( + client.list_indexes( request, metadata=[ ("key", "val"), @@ -4464,17 +4848,17 @@ def test_get_index_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_index_rest_bad_request( - transport: str = "rest", request_type=datastore_admin.GetIndexRequest +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, ): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1", "index_id": "sample2"} - request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. with mock.patch.object(Session, "request") as req, pytest.raises( @@ -4482,162 +4866,177 @@ def test_get_index_rest_bad_request( ): # Wrap the value into a proper Response obj response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_index(request) - - -def test_get_index_rest_error(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + client.cancel_operation(request) @pytest.mark.parametrize( "request_type", [ - datastore_admin.ListIndexesRequest, + operations_pb2.CancelOperationRequest, dict, ], ) -def test_list_indexes_rest(request_type): +def test_cancel_operation_rest(request_type): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} + request_init = {"name": "projects/sample1/operations/sample2"} request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = datastore_admin.ListIndexesResponse( - next_page_token="next_page_token_value", - ) + return_value = None # Wrap the value into a proper Response obj - response_value = Response() + response_value = mock.Mock() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore_admin.ListIndexesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") - response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_indexes(request) + + response = client.cancel_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListIndexesPager) - assert response.next_page_token == "next_page_token_value" + assert response is None -def test_list_indexes_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) - # Ensure method has been cached - assert client._transport.list_indexes in client._transport._wrapped_methods - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_indexes] = mock_rpc +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - request = {} - client.list_indexes(request) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") - client.list_indexes(request) + req.return_value = response_value - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + response = client.delete_operation(request) + # Establish that the response is the type that we expect. + assert response is None -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_indexes_rest_interceptors(null_interceptor): - transport = transports.DatastoreAdminRestTransport( + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatastoreAdminRestInterceptor(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - client = DatastoreAdminClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.DatastoreAdminRestInterceptor, "post_list_indexes" - ) as post, mock.patch.object( - transports.DatastoreAdminRestInterceptor, "pre_list_indexes" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore_admin.ListIndexesRequest.pb( - datastore_admin.ListIndexesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = datastore_admin.ListIndexesResponse.to_json( - datastore_admin.ListIndexesResponse() - ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - request = datastore_admin.ListIndexesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datastore_admin.ListIndexesResponse() + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") - client.list_indexes( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) + req.return_value = response_value - pre.assert_called_once() - post.assert_called_once() + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -def test_list_indexes_rest_bad_request( - transport: str = "rest", request_type=datastore_admin.ListIndexesRequest +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, ): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) # Mock the http request call within the method and fake a BadRequest error. with mock.patch.object(Session, "request") as req, pytest.raises( @@ -4645,179 +5044,190 @@ def test_list_indexes_rest_bad_request( ): # Wrap the value into a proper Response obj response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_indexes(request) + client.list_operations(request) -def test_list_indexes_rest_pager(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - datastore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - index.Index(), - ], - next_page_token="abc", - ), - datastore_admin.ListIndexesResponse( - indexes=[], - next_page_token="def", - ), - datastore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - ], - next_page_token="ghi", - ), - datastore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - datastore_admin.ListIndexesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() - sample_request = {"project_id": "sample1"} + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") - pager = client.list_indexes(request=sample_request) + req.return_value = response_value - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, index.Index) for i in results) + response = client.list_operations(request) - pages = list(client.list_indexes(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DatastoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), +def test_initialize_client_w_rest(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - with pytest.raises(ValueError): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + assert client is not None - # It is an error to provide a credentials file and a transport instance. - transport = transports.DatastoreAdminGrpcTransport( + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_entities_empty_call_rest(): + client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = DatastoreAdminClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - # It is an error to provide an api_key and a transport instance. - transport = transports.DatastoreAdminGrpcTransport( + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.export_entities), "__call__") as call: + client.export_entities(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore_admin.ExportEntitiesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_import_entities_empty_call_rest(): + client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DatastoreAdminClient( - client_options=options, - transport=transport, - ) - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DatastoreAdminClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.import_entities), "__call__") as call: + client.import_entities(request=None) - # It is an error to provide scopes and a transport instance. - transport = transports.DatastoreAdminGrpcTransport( + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore_admin.ImportEntitiesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_index_empty_call_rest(): + client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = DatastoreAdminClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_index), "__call__") as call: + client.create_index(request=None) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatastoreAdminGrpcTransport( + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore_admin.CreateIndexRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_index_empty_call_rest(): + client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - client = DatastoreAdminClient(transport=transport) - assert client.transport is transport + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_index), "__call__") as call: + client.delete_index(request=None) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatastoreAdminGrpcTransport( + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore_admin.DeleteIndexRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_index_empty_call_rest(): + client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - transport = transports.DatastoreAdminGrpcAsyncIOTransport( + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_index), "__call__") as call: + client.get_index(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore_admin.GetIndexRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_indexes_empty_call_rest(): + client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: + client.list_indexes(request=None) -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatastoreAdminGrpcTransport, - transports.DatastoreAdminGrpcAsyncIOTransport, - transports.DatastoreAdminRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore_admin.ListIndexesRequest() + + assert args[0] == request_msg -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = DatastoreAdminClient.get_transport_class(transport_name)( +def test_datastore_admin_rest_lro_client(): + client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, ) - assert transport.kind == transport_name + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client def test_transport_grpc_default(): @@ -5075,23 +5485,6 @@ def test_datastore_admin_http_transport_client_cert_source_for_mtls(): mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_datastore_admin_rest_lro_client(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - @pytest.mark.parametrize( "transport_name", [ @@ -5332,377 +5725,133 @@ def test_datastore_admin_grpc_lro_async_client(): ) # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = DatastoreAdminClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = DatastoreAdminClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreAdminClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = DatastoreAdminClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = DatastoreAdminClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreAdminClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = DatastoreAdminClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = DatastoreAdminClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreAdminClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) - actual = DatastoreAdminClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = DatastoreAdminClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreAdminClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = DatastoreAdminClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = DatastoreAdminClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreAdminClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.DatastoreAdminTransport, "_prep_wrapped_messages" - ) as prep: - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.DatastoreAdminTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = DatastoreAdminClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/operations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/operations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/operations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) + assert transport.operations_client is transport.operations_client -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - request_init = {"name": "projects/sample1/operations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" + actual = DatastoreAdminClient.common_billing_account_path(billing_account) + assert expected == actual - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_operation(request) +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = DatastoreAdminClient.common_billing_account_path(**expected) - # Establish that the response is the type that we expect. - assert response is None + # Check that the path construction is reversible. + actual = DatastoreAdminClient.parse_common_billing_account_path(path) + assert expected == actual -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, ) + actual = DatastoreAdminClient.common_folder_path(folder) + assert expected == actual - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/operations/sample2"}, request - ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = DatastoreAdminClient.common_folder_path(**expected) + # Check that the path construction is reversible. + actual = DatastoreAdminClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/operations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DatastoreAdminClient.common_organization_path(organization) + assert expected == actual - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_operation(request) +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = DatastoreAdminClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = DatastoreAdminClient.parse_common_organization_path(path) + assert expected == actual -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, ) + actual = DatastoreAdminClient.common_project_path(project) + assert expected == actual - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = DatastoreAdminClient.common_project_path(**expected) + # Check that the path construction is reversible. + actual = DatastoreAdminClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() + actual = DatastoreAdminClient.common_location_path(project, location) + assert expected == actual - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = DatastoreAdminClient.common_location_path(**expected) - response = client.list_operations(request) + # Check that the path construction is reversible. + actual = DatastoreAdminClient.parse_common_location_path(path) + assert expected == actual - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DatastoreAdminTransport, "_prep_wrapped_messages" + ) as prep: + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DatastoreAdminTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DatastoreAdminClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) def test_delete_operation(transport: str = "grpc"): @@ -5732,7 +5881,7 @@ def test_delete_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_delete_operation_async(transport: str = "grpc_asyncio"): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5785,7 +5934,7 @@ def test_delete_operation_field_headers(): @pytest.mark.asyncio async def test_delete_operation_field_headers_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5830,7 +5979,7 @@ def test_delete_operation_from_dict(): @pytest.mark.asyncio async def test_delete_operation_from_dict_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: @@ -5871,7 +6020,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5924,7 +6073,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5969,7 +6118,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -6010,7 +6159,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6065,7 +6214,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6112,7 +6261,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -6155,7 +6304,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6210,7 +6359,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6257,7 +6406,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -6273,22 +6422,41 @@ async def test_list_operations_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = DatastoreAdminAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() def test_client_ctx(): diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index aa1af5253efa..8a28ba749b81 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -24,7 +24,7 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json import math @@ -37,6 +37,13 @@ from requests.sessions import Session from google.protobuf import json_format +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -63,10 +70,24 @@ import google.auth +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -1122,25 +1143,6 @@ def test_lookup(request_type, transport: str = "grpc"): assert response.transaction == b"transaction_blob" -def test_lookup_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lookup), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.lookup() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.LookupRequest() - - def test_lookup_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1206,36 +1208,13 @@ def test_lookup_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_lookup_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lookup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.LookupResponse( - transaction=b"transaction_blob", - ) - ) - response = await client.lookup() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.LookupRequest() - - @pytest.mark.asyncio async def test_lookup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1274,7 +1253,7 @@ async def test_lookup_async( transport: str = "grpc_asyncio", request_type=datastore.LookupRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1308,47 +1287,6 @@ async def test_lookup_async_from_dict(): await test_lookup_async(request_type=dict) -def test_lookup_routing_parameters(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.LookupRequest(**{"project_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lookup), "__call__") as call: - call.return_value = datastore.LookupResponse() - client.lookup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw["metadata"] - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.LookupRequest(**{"database_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lookup), "__call__") as call: - call.return_value = datastore.LookupResponse() - client.lookup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw["metadata"] - - def test_lookup_flattened(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1416,7 +1354,7 @@ def test_lookup_flattened_error(): @pytest.mark.asyncio async def test_lookup_flattened_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1463,7 +1401,7 @@ async def test_lookup_flattened_async(): @pytest.mark.asyncio async def test_lookup_flattened_error_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1519,25 +1457,6 @@ def test_run_query(request_type, transport: str = "grpc"): assert response.transaction == b"transaction_blob" -def test_run_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.run_query), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.run_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunQueryRequest() - - def test_run_query_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1603,36 +1522,13 @@ def test_run_query_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_run_query_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.run_query), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.RunQueryResponse( - transaction=b"transaction_blob", - ) - ) - response = await client.run_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunQueryRequest() - - @pytest.mark.asyncio async def test_run_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1671,7 +1567,7 @@ async def test_run_query_async( transport: str = "grpc_asyncio", request_type=datastore.RunQueryRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1705,47 +1601,6 @@ async def test_run_query_async_from_dict(): await test_run_query_async(request_type=dict) -def test_run_query_routing_parameters(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.RunQueryRequest(**{"project_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.run_query), "__call__") as call: - call.return_value = datastore.RunQueryResponse() - client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw["metadata"] - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.RunQueryRequest(**{"database_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.run_query), "__call__") as call: - call.return_value = datastore.RunQueryResponse() - client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw["metadata"] - - @pytest.mark.parametrize( "request_type", [ @@ -1784,27 +1639,6 @@ def test_run_aggregation_query(request_type, transport: str = "grpc"): assert response.transaction == b"transaction_blob" -def test_run_aggregation_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.run_aggregation_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunAggregationQueryRequest() - - def test_run_aggregation_query_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1877,31 +1711,6 @@ def test_run_aggregation_query_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_run_aggregation_query_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.RunAggregationQueryResponse( - transaction=b"transaction_blob", - ) - ) - response = await client.run_aggregation_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunAggregationQueryRequest() - - @pytest.mark.asyncio async def test_run_aggregation_query_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1910,7 +1719,7 @@ async def test_run_aggregation_query_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1949,7 +1758,7 @@ async def test_run_aggregation_query_async( transport: str = "grpc_asyncio", request_type=datastore.RunAggregationQueryRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1985,51 +1794,6 @@ async def test_run_aggregation_query_async_from_dict(): await test_run_aggregation_query_async(request_type=dict) -def test_run_aggregation_query_routing_parameters(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.RunAggregationQueryRequest(**{"project_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), "__call__" - ) as call: - call.return_value = datastore.RunAggregationQueryResponse() - client.run_aggregation_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw["metadata"] - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.RunAggregationQueryRequest(**{"database_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), "__call__" - ) as call: - call.return_value = datastore.RunAggregationQueryResponse() - client.run_aggregation_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw["metadata"] - - @pytest.mark.parametrize( "request_type", [ @@ -2068,27 +1832,6 @@ def test_begin_transaction(request_type, transport: str = "grpc"): assert response.transaction == b"transaction_blob" -def test_begin_transaction_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.begin_transaction() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.BeginTransactionRequest() - - def test_begin_transaction_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2159,41 +1902,16 @@ def test_begin_transaction_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_begin_transaction_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.BeginTransactionResponse( - transaction=b"transaction_blob", - ) - ) - response = await client.begin_transaction() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.BeginTransactionRequest() - - -@pytest.mark.asyncio -async def test_begin_transaction_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +async def test_begin_transaction_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 @@ -2230,7 +1948,7 @@ async def test_begin_transaction_async( transport: str = "grpc_asyncio", request_type=datastore.BeginTransactionRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2266,51 +1984,6 @@ async def test_begin_transaction_async_from_dict(): await test_begin_transaction_async(request_type=dict) -def test_begin_transaction_routing_parameters(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.BeginTransactionRequest(**{"project_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), "__call__" - ) as call: - call.return_value = datastore.BeginTransactionResponse() - client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw["metadata"] - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.BeginTransactionRequest(**{"database_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), "__call__" - ) as call: - call.return_value = datastore.BeginTransactionResponse() - client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw["metadata"] - - def test_begin_transaction_flattened(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2354,7 +2027,7 @@ def test_begin_transaction_flattened_error(): @pytest.mark.asyncio async def test_begin_transaction_flattened_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2385,7 +2058,7 @@ async def test_begin_transaction_flattened_async(): @pytest.mark.asyncio async def test_begin_transaction_flattened_error_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2433,25 +2106,6 @@ def test_commit(request_type, transport: str = "grpc"): assert response.index_updates == 1389 -def test_commit_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.commit), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.commit() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.CommitRequest() - - def test_commit_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2517,36 +2171,13 @@ def test_commit_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_commit_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.commit), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.CommitResponse( - index_updates=1389, - ) - ) - response = await client.commit() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.CommitRequest() - - @pytest.mark.asyncio async def test_commit_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2585,7 +2216,7 @@ async def test_commit_async( transport: str = "grpc_asyncio", request_type=datastore.CommitRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2619,47 +2250,6 @@ async def test_commit_async_from_dict(): await test_commit_async(request_type=dict) -def test_commit_routing_parameters(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.CommitRequest(**{"project_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.commit), "__call__") as call: - call.return_value = datastore.CommitResponse() - client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw["metadata"] - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.CommitRequest(**{"database_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.commit), "__call__") as call: - call.return_value = datastore.CommitResponse() - client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw["metadata"] - - def test_commit_flattened(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2742,7 +2332,7 @@ def test_commit_flattened_error(): @pytest.mark.asyncio async def test_commit_flattened_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2799,7 +2389,7 @@ async def test_commit_flattened_async(): @pytest.mark.asyncio async def test_commit_flattened_error_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2857,25 +2447,6 @@ def test_rollback(request_type, transport: str = "grpc"): assert isinstance(response, datastore.RollbackResponse) -def test_rollback_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.rollback), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.rollback() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RollbackRequest() - - def test_rollback_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2941,34 +2512,13 @@ def test_rollback_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_rollback_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.rollback), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.RollbackResponse() - ) - response = await client.rollback() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RollbackRequest() - - @pytest.mark.asyncio async def test_rollback_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3007,7 +2557,7 @@ async def test_rollback_async( transport: str = "grpc_asyncio", request_type=datastore.RollbackRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3038,47 +2588,6 @@ async def test_rollback_async_from_dict(): await test_rollback_async(request_type=dict) -def test_rollback_routing_parameters(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.RollbackRequest(**{"project_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.rollback), "__call__") as call: - call.return_value = datastore.RollbackResponse() - client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw["metadata"] - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.RollbackRequest(**{"database_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.rollback), "__call__") as call: - call.return_value = datastore.RollbackResponse() - client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw["metadata"] - - def test_rollback_flattened(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3125,7 +2634,7 @@ def test_rollback_flattened_error(): @pytest.mark.asyncio async def test_rollback_flattened_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3158,7 +2667,7 @@ async def test_rollback_flattened_async(): @pytest.mark.asyncio async def test_rollback_flattened_error_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3204,25 +2713,6 @@ def test_allocate_ids(request_type, transport: str = "grpc"): assert isinstance(response, datastore.AllocateIdsResponse) -def test_allocate_ids_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.allocate_ids() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.AllocateIdsRequest() - - def test_allocate_ids_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3288,27 +2778,6 @@ def test_allocate_ids_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_allocate_ids_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.AllocateIdsResponse() - ) - response = await client.allocate_ids() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.AllocateIdsRequest() - - @pytest.mark.asyncio async def test_allocate_ids_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -3317,7 +2786,7 @@ async def test_allocate_ids_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3356,7 +2825,7 @@ async def test_allocate_ids_async( transport: str = "grpc_asyncio", request_type=datastore.AllocateIdsRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3387,69 +2856,28 @@ async def test_allocate_ids_async_from_dict(): await test_allocate_ids_async(request_type=dict) -def test_allocate_ids_routing_parameters(): +def test_allocate_ids_flattened(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.AllocateIdsRequest(**{"project_id": "sample1"}) - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + # Designate an appropriate return value for the call. call.return_value = datastore.AllocateIdsResponse() - client.allocate_ids(request) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.allocate_ids( + project_id="project_id_value", + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) + ], + ) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw["metadata"] - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.AllocateIdsRequest(**{"database_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: - call.return_value = datastore.AllocateIdsResponse() - client.allocate_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw["metadata"] - - -def test_allocate_ids_flattened(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.AllocateIdsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.allocate_ids( - project_id="project_id_value", - keys=[ - entity.Key( - partition_id=entity.PartitionId(project_id="project_id_value") - ) - ], - ) - - # Establish that the underlying call was made with the expected - # request object values. + # Establish that the underlying call was made with the expected + # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].project_id @@ -3484,7 +2912,7 @@ def test_allocate_ids_flattened_error(): @pytest.mark.asyncio async def test_allocate_ids_flattened_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3523,7 +2951,7 @@ async def test_allocate_ids_flattened_async(): @pytest.mark.asyncio async def test_allocate_ids_flattened_error_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3573,25 +3001,6 @@ def test_reserve_ids(request_type, transport: str = "grpc"): assert isinstance(response, datastore.ReserveIdsResponse) -def test_reserve_ids_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.reserve_ids() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.ReserveIdsRequest() - - def test_reserve_ids_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3657,27 +3066,6 @@ def test_reserve_ids_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_reserve_ids_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.ReserveIdsResponse() - ) - response = await client.reserve_ids() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.ReserveIdsRequest() - - @pytest.mark.asyncio async def test_reserve_ids_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -3686,7 +3074,7 @@ async def test_reserve_ids_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3725,7 +3113,7 @@ async def test_reserve_ids_async( transport: str = "grpc_asyncio", request_type=datastore.ReserveIdsRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3756,47 +3144,6 @@ async def test_reserve_ids_async_from_dict(): await test_reserve_ids_async(request_type=dict) -def test_reserve_ids_routing_parameters(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.ReserveIdsRequest(**{"project_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: - call.return_value = datastore.ReserveIdsResponse() - client.reserve_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw["metadata"] - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.ReserveIdsRequest(**{"database_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: - call.return_value = datastore.ReserveIdsResponse() - client.reserve_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw["metadata"] - - def test_reserve_ids_flattened(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3853,7 +3200,7 @@ def test_reserve_ids_flattened_error(): @pytest.mark.asyncio async def test_reserve_ids_flattened_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3892,7 +3239,7 @@ async def test_reserve_ids_flattened_async(): @pytest.mark.asyncio async def test_reserve_ids_flattened_error_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3909,46 +3256,6 @@ async def test_reserve_ids_flattened_error_async(): ) -@pytest.mark.parametrize( - "request_type", - [ - datastore.LookupRequest, - dict, - ], -) -def test_lookup_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = datastore.LookupResponse( - transaction=b"transaction_blob", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.LookupResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.lookup(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.LookupResponse) - assert response.transaction == b"transaction_blob" - - def test_lookup_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4075,83 +3382,6 @@ def test_lookup_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_lookup_rest_interceptors(null_interceptor): - transport = transports.DatastoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), - ) - client = DatastoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.DatastoreRestInterceptor, "post_lookup" - ) as post, mock.patch.object( - transports.DatastoreRestInterceptor, "pre_lookup" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore.LookupRequest.pb(datastore.LookupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = datastore.LookupResponse.to_json( - datastore.LookupResponse() - ) - - request = datastore.LookupRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datastore.LookupResponse() - - client.lookup( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_lookup_rest_bad_request( - transport: str = "rest", request_type=datastore.LookupRequest -): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.lookup(request) - - def test_lookup_rest_flattened(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4223,74 +3453,28 @@ def test_lookup_rest_flattened_error(transport: str = "rest"): ) -def test_lookup_rest_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) +def test_run_query_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -@pytest.mark.parametrize( - "request_type", - [ - datastore.RunQueryRequest, - dict, - ], -) -def test_run_query_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Ensure method has been cached + assert client._transport.run_query in client._transport._wrapped_methods - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = datastore.RunQueryResponse( - transaction=b"transaction_blob", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.RunQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.run_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.RunQueryResponse) - assert response.transaction == b"transaction_blob" - - -def test_run_query_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.run_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.run_query] = mock_rpc + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.run_query] = mock_rpc request = {} client.run_query(request) @@ -4387,129 +3571,6 @@ def test_run_query_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("projectId",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_query_rest_interceptors(null_interceptor): - transport = transports.DatastoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), - ) - client = DatastoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.DatastoreRestInterceptor, "post_run_query" - ) as post, mock.patch.object( - transports.DatastoreRestInterceptor, "pre_run_query" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore.RunQueryRequest.pb(datastore.RunQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = datastore.RunQueryResponse.to_json( - datastore.RunQueryResponse() - ) - - request = datastore.RunQueryRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datastore.RunQueryResponse() - - client.run_query( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_run_query_rest_bad_request( - transport: str = "rest", request_type=datastore.RunQueryRequest -): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.run_query(request) - - -def test_run_query_rest_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - datastore.RunAggregationQueryRequest, - dict, - ], -) -def test_run_aggregation_query_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = datastore.RunAggregationQueryResponse( - transaction=b"transaction_blob", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.RunAggregationQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.run_aggregation_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.RunAggregationQueryResponse) - assert response.transaction == b"transaction_blob" - - def test_run_aggregation_query_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4635,173 +3696,48 @@ def test_run_aggregation_query_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("projectId",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_aggregation_query_rest_interceptors(null_interceptor): - transport = transports.DatastoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), - ) - client = DatastoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.DatastoreRestInterceptor, "post_run_aggregation_query" - ) as post, mock.patch.object( - transports.DatastoreRestInterceptor, "pre_run_aggregation_query" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore.RunAggregationQueryRequest.pb( - datastore.RunAggregationQueryRequest() +def test_begin_transaction_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = datastore.RunAggregationQueryResponse.to_json( - datastore.RunAggregationQueryResponse() - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - request = datastore.RunAggregationQueryRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datastore.RunAggregationQueryResponse() + # Ensure method has been cached + assert client._transport.begin_transaction in client._transport._wrapped_methods - client.run_aggregation_query( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[ + client._transport.begin_transaction + ] = mock_rpc - pre.assert_called_once() - post.assert_called_once() + request = {} + client.begin_transaction(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_run_aggregation_query_rest_bad_request( - transport: str = "rest", request_type=datastore.RunAggregationQueryRequest -): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + client.begin_transaction(request) - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.run_aggregation_query(request) - -def test_run_aggregation_query_rest_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - datastore.BeginTransactionRequest, - dict, - ], -) -def test_begin_transaction_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = datastore.BeginTransactionResponse( - transaction=b"transaction_blob", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.BeginTransactionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.begin_transaction(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.BeginTransactionResponse) - assert response.transaction == b"transaction_blob" - - -def test_begin_transaction_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.begin_transaction in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.begin_transaction - ] = mock_rpc - - request = {} - client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.begin_transaction(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_begin_transaction_rest_required_fields( - request_type=datastore.BeginTransactionRequest, -): - transport_class = transports.DatastoreRestTransport +def test_begin_transaction_rest_required_fields( + request_type=datastore.BeginTransactionRequest, +): + transport_class = transports.DatastoreRestTransport request_init = {} request_init["project_id"] = "" @@ -4882,85 +3818,6 @@ def test_begin_transaction_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("projectId",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_begin_transaction_rest_interceptors(null_interceptor): - transport = transports.DatastoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), - ) - client = DatastoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.DatastoreRestInterceptor, "post_begin_transaction" - ) as post, mock.patch.object( - transports.DatastoreRestInterceptor, "pre_begin_transaction" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore.BeginTransactionRequest.pb( - datastore.BeginTransactionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = datastore.BeginTransactionResponse.to_json( - datastore.BeginTransactionResponse() - ) - - request = datastore.BeginTransactionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datastore.BeginTransactionResponse() - - client.begin_transaction( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_begin_transaction_rest_bad_request( - transport: str = "rest", request_type=datastore.BeginTransactionRequest -): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.begin_transaction(request) - - def test_begin_transaction_rest_flattened(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5017,52 +3874,6 @@ def test_begin_transaction_rest_flattened_error(transport: str = "rest"): ) -def test_begin_transaction_rest_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - datastore.CommitRequest, - dict, - ], -) -def test_commit_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = datastore.CommitResponse( - index_updates=1389, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.commit(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.CommitResponse) - assert response.index_updates == 1389 - - def test_commit_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5181,96 +3992,19 @@ def test_commit_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("projectId",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_commit_rest_interceptors(null_interceptor): - transport = transports.DatastoreRestTransport( +def test_commit_rest_flattened(): + client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), + transport="rest", ) - client = DatastoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.DatastoreRestInterceptor, "post_commit" - ) as post, mock.patch.object( - transports.DatastoreRestInterceptor, "pre_commit" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore.CommitRequest.pb(datastore.CommitRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = datastore.CommitResponse.to_json( - datastore.CommitResponse() - ) - request = datastore.CommitRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datastore.CommitResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.CommitResponse() - client.commit( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_commit_rest_bad_request( - transport: str = "rest", request_type=datastore.CommitRequest -): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.commit(request) - - -def test_commit_rest_flattened(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = datastore.CommitResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"project_id": "sample1"} + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1"} # get truthy value for each flattened field mock_args = dict( @@ -5338,49 +4072,6 @@ def test_commit_rest_flattened_error(transport: str = "rest"): ) -def test_commit_rest_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - datastore.RollbackRequest, - dict, - ], -) -def test_rollback_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = datastore.RollbackResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.RollbackResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.rollback(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.RollbackResponse) - - def test_rollback_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5511,83 +4202,6 @@ def test_rollback_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rollback_rest_interceptors(null_interceptor): - transport = transports.DatastoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), - ) - client = DatastoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.DatastoreRestInterceptor, "post_rollback" - ) as post, mock.patch.object( - transports.DatastoreRestInterceptor, "pre_rollback" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore.RollbackRequest.pb(datastore.RollbackRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = datastore.RollbackResponse.to_json( - datastore.RollbackResponse() - ) - - request = datastore.RollbackRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datastore.RollbackResponse() - - client.rollback( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_rollback_rest_bad_request( - transport: str = "rest", request_type=datastore.RollbackRequest -): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.rollback(request) - - def test_rollback_rest_flattened(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5645,49 +4259,6 @@ def test_rollback_rest_flattened_error(transport: str = "rest"): ) -def test_rollback_rest_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - datastore.AllocateIdsRequest, - dict, - ], -) -def test_allocate_ids_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = datastore.AllocateIdsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.AllocateIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.allocate_ids(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.AllocateIdsResponse) - - def test_allocate_ids_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5814,107 +4385,30 @@ def test_allocate_ids_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_allocate_ids_rest_interceptors(null_interceptor): - transport = transports.DatastoreRestTransport( +def test_allocate_ids_rest_flattened(): + client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), + transport="rest", ) - client = DatastoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.DatastoreRestInterceptor, "post_allocate_ids" - ) as post, mock.patch.object( - transports.DatastoreRestInterceptor, "pre_allocate_ids" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore.AllocateIdsRequest.pb(datastore.AllocateIdsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = datastore.AllocateIdsResponse.to_json( - datastore.AllocateIdsResponse() - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.AllocateIdsResponse() - request = datastore.AllocateIdsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datastore.AllocateIdsResponse() + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1"} - client.allocate_ids( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + keys=[ + entity.Key( + partition_id=entity.PartitionId(project_id="project_id_value") + ) ], ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_allocate_ids_rest_bad_request( - transport: str = "rest", request_type=datastore.AllocateIdsRequest -): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.allocate_ids(request) - - -def test_allocate_ids_rest_flattened(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = datastore.AllocateIdsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"project_id": "sample1"} - - # get truthy value for each flattened field - mock_args = dict( - project_id="project_id_value", - keys=[ - entity.Key( - partition_id=entity.PartitionId(project_id="project_id_value") - ) - ], - ) - mock_args.update(sample_request) + mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() @@ -5956,49 +4450,6 @@ def test_allocate_ids_rest_flattened_error(transport: str = "rest"): ) -def test_allocate_ids_rest_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - datastore.ReserveIdsRequest, - dict, - ], -) -def test_reserve_ids_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = datastore.ReserveIdsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.ReserveIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.reserve_ids(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.ReserveIdsResponse) - - def test_reserve_ids_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6125,83 +4576,6 @@ def test_reserve_ids_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_reserve_ids_rest_interceptors(null_interceptor): - transport = transports.DatastoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), - ) - client = DatastoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.DatastoreRestInterceptor, "post_reserve_ids" - ) as post, mock.patch.object( - transports.DatastoreRestInterceptor, "pre_reserve_ids" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore.ReserveIdsRequest.pb(datastore.ReserveIdsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = datastore.ReserveIdsResponse.to_json( - datastore.ReserveIdsResponse() - ) - - request = datastore.ReserveIdsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datastore.ReserveIdsResponse() - - client.reserve_ids( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_reserve_ids_rest_bad_request( - transport: str = "rest", request_type=datastore.ReserveIdsRequest -): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project_id": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.reserve_ids(request) - - def test_reserve_ids_rest_flattened(): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6267,12 +4641,6 @@ def test_reserve_ids_rest_flattened_error(transport: str = "rest"): ) -def test_reserve_ids_rest_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.DatastoreGrpcTransport( @@ -6342,41 +4710,3012 @@ def test_transport_get_channel(): channel = transport.grpc_channel assert channel - transport = transports.DatastoreGrpcAsyncIOTransport( + transport = transports.DatastoreGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatastoreGrpcTransport, + transports.DatastoreGrpcAsyncIOTransport, + transports.DatastoreRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = DatastoreClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_lookup_empty_call_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.lookup), "__call__") as call: + call.return_value = datastore.LookupResponse() + client.lookup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.LookupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_query_empty_call_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + call.return_value = datastore.RunQueryResponse() + client.run_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.RunQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_aggregation_query_empty_call_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + call.return_value = datastore.RunAggregationQueryResponse() + client.run_aggregation_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.RunAggregationQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_begin_transaction_empty_call_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + call.return_value = datastore.BeginTransactionResponse() + client.begin_transaction(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.BeginTransactionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_commit_empty_call_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + call.return_value = datastore.CommitResponse() + client.commit(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.CommitRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_rollback_empty_call_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + call.return_value = datastore.RollbackResponse() + client.rollback(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.RollbackRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_allocate_ids_empty_call_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + call.return_value = datastore.AllocateIdsResponse() + client.allocate_ids(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.AllocateIdsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_reserve_ids_empty_call_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + call.return_value = datastore.ReserveIdsResponse() + client.reserve_ids(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.ReserveIdsRequest() + + assert args[0] == request_msg + + +def test_lookup_routing_parameters_request_1_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.lookup), "__call__") as call: + call.return_value = datastore.LookupResponse() + client.lookup(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.LookupRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_lookup_routing_parameters_request_2_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.lookup), "__call__") as call: + call.return_value = datastore.LookupResponse() + client.lookup(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.LookupRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_run_query_routing_parameters_request_1_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + call.return_value = datastore.RunQueryResponse() + client.run_query(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.RunQueryRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_run_query_routing_parameters_request_2_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + call.return_value = datastore.RunQueryResponse() + client.run_query(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.RunQueryRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_run_aggregation_query_routing_parameters_request_1_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + call.return_value = datastore.RunAggregationQueryResponse() + client.run_aggregation_query(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.RunAggregationQueryRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_run_aggregation_query_routing_parameters_request_2_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + call.return_value = datastore.RunAggregationQueryResponse() + client.run_aggregation_query(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.RunAggregationQueryRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_begin_transaction_routing_parameters_request_1_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + call.return_value = datastore.BeginTransactionResponse() + client.begin_transaction(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.BeginTransactionRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_begin_transaction_routing_parameters_request_2_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + call.return_value = datastore.BeginTransactionResponse() + client.begin_transaction(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.BeginTransactionRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_commit_routing_parameters_request_1_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + call.return_value = datastore.CommitResponse() + client.commit(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.CommitRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_commit_routing_parameters_request_2_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + call.return_value = datastore.CommitResponse() + client.commit(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.CommitRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_rollback_routing_parameters_request_1_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + call.return_value = datastore.RollbackResponse() + client.rollback(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.RollbackRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_rollback_routing_parameters_request_2_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + call.return_value = datastore.RollbackResponse() + client.rollback(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.RollbackRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_allocate_ids_routing_parameters_request_1_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + call.return_value = datastore.AllocateIdsResponse() + client.allocate_ids(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.AllocateIdsRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_allocate_ids_routing_parameters_request_2_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + call.return_value = datastore.AllocateIdsResponse() + client.allocate_ids(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.AllocateIdsRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_reserve_ids_routing_parameters_request_1_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + call.return_value = datastore.ReserveIdsResponse() + client.reserve_ids(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.ReserveIdsRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_reserve_ids_routing_parameters_request_2_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + call.return_value = datastore.ReserveIdsResponse() + client.reserve_ids(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.ReserveIdsRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_transport_kind_grpc_asyncio(): + transport = DatastoreAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_lookup_empty_call_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.lookup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.LookupResponse( + transaction=b"transaction_blob", + ) + ) + await client.lookup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.LookupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_run_query_empty_call_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RunQueryResponse( + transaction=b"transaction_blob", + ) + ) + await client.run_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.RunQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_run_aggregation_query_empty_call_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RunAggregationQueryResponse( + transaction=b"transaction_blob", + ) + ) + await client.run_aggregation_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.RunAggregationQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_begin_transaction_empty_call_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.BeginTransactionResponse( + transaction=b"transaction_blob", + ) + ) + await client.begin_transaction(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.BeginTransactionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_commit_empty_call_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.CommitResponse( + index_updates=1389, + ) + ) + await client.commit(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.CommitRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_rollback_empty_call_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RollbackResponse() + ) + await client.rollback(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.RollbackRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_allocate_ids_empty_call_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.AllocateIdsResponse() + ) + await client.allocate_ids(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.AllocateIdsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_reserve_ids_empty_call_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.ReserveIdsResponse() + ) + await client.reserve_ids(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.ReserveIdsRequest() + + assert args[0] == request_msg + + +@pytest.mark.asyncio +async def test_lookup_routing_parameters_request_1_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.lookup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.LookupResponse( + transaction=b"transaction_blob", + ) + ) + await client.lookup(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.LookupRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +@pytest.mark.asyncio +async def test_lookup_routing_parameters_request_2_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.lookup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.LookupResponse( + transaction=b"transaction_blob", + ) + ) + await client.lookup(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.LookupRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +@pytest.mark.asyncio +async def test_run_query_routing_parameters_request_1_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RunQueryResponse( + transaction=b"transaction_blob", + ) + ) + await client.run_query(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.RunQueryRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +@pytest.mark.asyncio +async def test_run_query_routing_parameters_request_2_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RunQueryResponse( + transaction=b"transaction_blob", + ) + ) + await client.run_query(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.RunQueryRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +@pytest.mark.asyncio +async def test_run_aggregation_query_routing_parameters_request_1_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RunAggregationQueryResponse( + transaction=b"transaction_blob", + ) + ) + await client.run_aggregation_query(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.RunAggregationQueryRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +@pytest.mark.asyncio +async def test_run_aggregation_query_routing_parameters_request_2_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RunAggregationQueryResponse( + transaction=b"transaction_blob", + ) + ) + await client.run_aggregation_query(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.RunAggregationQueryRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +@pytest.mark.asyncio +async def test_begin_transaction_routing_parameters_request_1_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.BeginTransactionResponse( + transaction=b"transaction_blob", + ) + ) + await client.begin_transaction(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.BeginTransactionRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +@pytest.mark.asyncio +async def test_begin_transaction_routing_parameters_request_2_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.BeginTransactionResponse( + transaction=b"transaction_blob", + ) + ) + await client.begin_transaction(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.BeginTransactionRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +@pytest.mark.asyncio +async def test_commit_routing_parameters_request_1_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.CommitResponse( + index_updates=1389, + ) + ) + await client.commit(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.CommitRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +@pytest.mark.asyncio +async def test_commit_routing_parameters_request_2_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.CommitResponse( + index_updates=1389, + ) + ) + await client.commit(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.CommitRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +@pytest.mark.asyncio +async def test_rollback_routing_parameters_request_1_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RollbackResponse() + ) + await client.rollback(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.RollbackRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +@pytest.mark.asyncio +async def test_rollback_routing_parameters_request_2_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.RollbackResponse() + ) + await client.rollback(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.RollbackRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +@pytest.mark.asyncio +async def test_allocate_ids_routing_parameters_request_1_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.AllocateIdsResponse() + ) + await client.allocate_ids(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.AllocateIdsRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +@pytest.mark.asyncio +async def test_allocate_ids_routing_parameters_request_2_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.AllocateIdsResponse() + ) + await client.allocate_ids(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.AllocateIdsRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +@pytest.mark.asyncio +async def test_reserve_ids_routing_parameters_request_1_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.ReserveIdsResponse() + ) + await client.reserve_ids(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.ReserveIdsRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +@pytest.mark.asyncio +async def test_reserve_ids_routing_parameters_request_2_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datastore.ReserveIdsResponse() + ) + await client.reserve_ids(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.ReserveIdsRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_transport_kind_rest(): + transport = DatastoreClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_lookup_rest_bad_request(request_type=datastore.LookupRequest): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.lookup(request) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore.LookupRequest, + dict, + ], +) +def test_lookup_rest_call_success(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.LookupResponse( + transaction=b"transaction_blob", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datastore.LookupResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.lookup(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.LookupResponse) + assert response.transaction == b"transaction_blob" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_lookup_rest_interceptors(null_interceptor): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), + ) + client = DatastoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreRestInterceptor, "post_lookup" + ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "pre_lookup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore.LookupRequest.pb(datastore.LookupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = datastore.LookupResponse.to_json(datastore.LookupResponse()) + req.return_value.content = return_value + + request = datastore.LookupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastore.LookupResponse() + + client.lookup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_query_rest_bad_request(request_type=datastore.RunQueryRequest): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.run_query(request) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore.RunQueryRequest, + dict, + ], +) +def test_run_query_rest_call_success(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.RunQueryResponse( + transaction=b"transaction_blob", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datastore.RunQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.RunQueryResponse) + assert response.transaction == b"transaction_blob" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_query_rest_interceptors(null_interceptor): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), + ) + client = DatastoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreRestInterceptor, "post_run_query" + ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "pre_run_query" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore.RunQueryRequest.pb(datastore.RunQueryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = datastore.RunQueryResponse.to_json(datastore.RunQueryResponse()) + req.return_value.content = return_value + + request = datastore.RunQueryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastore.RunQueryResponse() + + client.run_query( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_aggregation_query_rest_bad_request( + request_type=datastore.RunAggregationQueryRequest, +): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.run_aggregation_query(request) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore.RunAggregationQueryRequest, + dict, + ], +) +def test_run_aggregation_query_rest_call_success(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.RunAggregationQueryResponse( + transaction=b"transaction_blob", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datastore.RunAggregationQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_aggregation_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.RunAggregationQueryResponse) + assert response.transaction == b"transaction_blob" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_aggregation_query_rest_interceptors(null_interceptor): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), + ) + client = DatastoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreRestInterceptor, "post_run_aggregation_query" + ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "pre_run_aggregation_query" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore.RunAggregationQueryRequest.pb( + datastore.RunAggregationQueryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = datastore.RunAggregationQueryResponse.to_json( + datastore.RunAggregationQueryResponse() + ) + req.return_value.content = return_value + + request = datastore.RunAggregationQueryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastore.RunAggregationQueryResponse() + + client.run_aggregation_query( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_begin_transaction_rest_bad_request( + request_type=datastore.BeginTransactionRequest, +): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.begin_transaction(request) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore.BeginTransactionRequest, + dict, + ], +) +def test_begin_transaction_rest_call_success(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.BeginTransactionResponse( + transaction=b"transaction_blob", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datastore.BeginTransactionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.begin_transaction(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.BeginTransactionResponse) + assert response.transaction == b"transaction_blob" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_begin_transaction_rest_interceptors(null_interceptor): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), + ) + client = DatastoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreRestInterceptor, "post_begin_transaction" + ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "pre_begin_transaction" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore.BeginTransactionRequest.pb( + datastore.BeginTransactionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = datastore.BeginTransactionResponse.to_json( + datastore.BeginTransactionResponse() + ) + req.return_value.content = return_value + + request = datastore.BeginTransactionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastore.BeginTransactionResponse() + + client.begin_transaction( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_commit_rest_bad_request(request_type=datastore.CommitRequest): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.commit(request) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore.CommitRequest, + dict, + ], +) +def test_commit_rest_call_success(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.CommitResponse( + index_updates=1389, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datastore.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.commit(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.CommitResponse) + assert response.index_updates == 1389 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_commit_rest_interceptors(null_interceptor): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), + ) + client = DatastoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreRestInterceptor, "post_commit" + ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "pre_commit" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore.CommitRequest.pb(datastore.CommitRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = datastore.CommitResponse.to_json(datastore.CommitResponse()) + req.return_value.content = return_value + + request = datastore.CommitRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastore.CommitResponse() + + client.commit( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_rollback_rest_bad_request(request_type=datastore.RollbackRequest): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.rollback(request) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore.RollbackRequest, + dict, + ], +) +def test_rollback_rest_call_success(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.RollbackResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datastore.RollbackResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.rollback(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.RollbackResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_rollback_rest_interceptors(null_interceptor): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), + ) + client = DatastoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreRestInterceptor, "post_rollback" + ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "pre_rollback" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore.RollbackRequest.pb(datastore.RollbackRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = datastore.RollbackResponse.to_json(datastore.RollbackResponse()) + req.return_value.content = return_value + + request = datastore.RollbackRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastore.RollbackResponse() + + client.rollback( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_allocate_ids_rest_bad_request(request_type=datastore.AllocateIdsRequest): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.allocate_ids(request) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore.AllocateIdsRequest, + dict, + ], +) +def test_allocate_ids_rest_call_success(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.AllocateIdsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datastore.AllocateIdsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.allocate_ids(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.AllocateIdsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_allocate_ids_rest_interceptors(null_interceptor): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), + ) + client = DatastoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreRestInterceptor, "post_allocate_ids" + ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "pre_allocate_ids" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore.AllocateIdsRequest.pb(datastore.AllocateIdsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = datastore.AllocateIdsResponse.to_json( + datastore.AllocateIdsResponse() + ) + req.return_value.content = return_value + + request = datastore.AllocateIdsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastore.AllocateIdsResponse() + + client.allocate_ids( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_reserve_ids_rest_bad_request(request_type=datastore.ReserveIdsRequest): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.reserve_ids(request) + + +@pytest.mark.parametrize( + "request_type", + [ + datastore.ReserveIdsRequest, + dict, + ], +) +def test_reserve_ids_rest_call_success(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastore.ReserveIdsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datastore.ReserveIdsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.reserve_ids(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastore.ReserveIdsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reserve_ids_rest_interceptors(null_interceptor): + transport = transports.DatastoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), + ) + client = DatastoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastoreRestInterceptor, "post_reserve_ids" + ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "pre_reserve_ids" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastore.ReserveIdsRequest.pb(datastore.ReserveIdsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = datastore.ReserveIdsResponse.to_json( + datastore.ReserveIdsResponse() + ) + req.return_value.content = return_value + + request = datastore.ReserveIdsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastore.ReserveIdsResponse() + + client.reserve_ids( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_lookup_empty_call_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.lookup), "__call__") as call: + client.lookup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.LookupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_query_empty_call_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + client.run_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.RunQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_aggregation_query_empty_call_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + client.run_aggregation_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.RunAggregationQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_begin_transaction_empty_call_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + client.begin_transaction(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.BeginTransactionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_commit_empty_call_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + client.commit(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.CommitRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_rollback_empty_call_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + client.rollback(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.RollbackRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_allocate_ids_empty_call_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + client.allocate_ids(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.AllocateIdsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_reserve_ids_empty_call_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + client.reserve_ids(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datastore.ReserveIdsRequest() + + assert args[0] == request_msg + + +def test_lookup_routing_parameters_request_1_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.lookup), "__call__") as call: + client.lookup(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.LookupRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_lookup_routing_parameters_request_2_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.lookup), "__call__") as call: + client.lookup(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.LookupRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_run_query_routing_parameters_request_1_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + client.run_query(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.RunQueryRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_run_query_routing_parameters_request_2_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.run_query), "__call__") as call: + client.run_query(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.RunQueryRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_run_aggregation_query_routing_parameters_request_1_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + client.run_aggregation_query(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.RunAggregationQueryRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_run_aggregation_query_routing_parameters_request_2_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), "__call__" + ) as call: + client.run_aggregation_query(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.RunAggregationQueryRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_begin_transaction_routing_parameters_request_1_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + client.begin_transaction(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.BeginTransactionRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_begin_transaction_routing_parameters_request_2_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), "__call__" + ) as call: + client.begin_transaction(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.BeginTransactionRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_commit_routing_parameters_request_1_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + client.commit(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.CommitRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_commit_routing_parameters_request_2_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.commit), "__call__") as call: + client.commit(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.CommitRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_rollback_routing_parameters_request_1_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + client.rollback(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.RollbackRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_rollback_routing_parameters_request_2_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.rollback), "__call__") as call: + client.rollback(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.RollbackRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_allocate_ids_routing_parameters_request_1_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + client.allocate_ids(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.AllocateIdsRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_allocate_ids_routing_parameters_request_2_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: + client.allocate_ids(request={"database_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.AllocateIdsRequest(**{"database_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_reserve_ids_routing_parameters_request_1_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + client.reserve_ids(request={"project_id": "sample1"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.ReserveIdsRequest(**{"project_id": "sample1"}) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_reserve_ids_routing_parameters_request_2_rest(): + client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: + client.reserve_ids(request={"database_id": "sample1"}) -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatastoreGrpcTransport, - transports.DatastoreGrpcAsyncIOTransport, - transports.DatastoreRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = datastore.ReserveIdsRequest(**{"database_id": "sample1"}) + assert args[0] == request_msg -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = DatastoreClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name + expected_headers = {"database_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) def test_transport_grpc_default(): @@ -6832,377 +8171,133 @@ def test_datastore_transport_channel_mtls_with_adc(transport_class): ("grpc.max_receive_message_length", -1), ], ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = DatastoreClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = DatastoreClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = DatastoreClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = DatastoreClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = DatastoreClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = DatastoreClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) - actual = DatastoreClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = DatastoreClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = DatastoreClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = DatastoreClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.DatastoreTransport, "_prep_wrapped_messages" - ) as prep: - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.DatastoreTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = DatastoreClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/operations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/operations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/operations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) + assert transport.grpc_channel == mock_grpc_channel -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - request_init = {"name": "projects/sample1/operations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" + actual = DatastoreClient.common_billing_account_path(billing_account) + assert expected == actual - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_operation(request) +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = DatastoreClient.common_billing_account_path(**expected) - # Establish that the response is the type that we expect. - assert response is None + # Check that the path construction is reversible. + actual = DatastoreClient.parse_common_billing_account_path(path) + assert expected == actual -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, ) + actual = DatastoreClient.common_folder_path(folder) + assert expected == actual - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/operations/sample2"}, request - ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = DatastoreClient.common_folder_path(**expected) + # Check that the path construction is reversible. + actual = DatastoreClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/operations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DatastoreClient.common_organization_path(organization) + assert expected == actual - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_operation(request) +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = DatastoreClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = DatastoreClient.parse_common_organization_path(path) + assert expected == actual -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, ) + actual = DatastoreClient.common_project_path(project) + assert expected == actual - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = DatastoreClient.common_project_path(**expected) + # Check that the path construction is reversible. + actual = DatastoreClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() + actual = DatastoreClient.common_location_path(project, location) + assert expected == actual - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = DatastoreClient.common_location_path(**expected) - response = client.list_operations(request) + # Check that the path construction is reversible. + actual = DatastoreClient.parse_common_location_path(path) + assert expected == actual - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DatastoreTransport, "_prep_wrapped_messages" + ) as prep: + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DatastoreTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DatastoreClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) def test_delete_operation(transport: str = "grpc"): @@ -7232,7 +8327,7 @@ def test_delete_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_delete_operation_async(transport: str = "grpc_asyncio"): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7285,7 +8380,7 @@ def test_delete_operation_field_headers(): @pytest.mark.asyncio async def test_delete_operation_field_headers_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7330,7 +8425,7 @@ def test_delete_operation_from_dict(): @pytest.mark.asyncio async def test_delete_operation_from_dict_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: @@ -7371,7 +8466,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7424,7 +8519,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7469,7 +8564,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -7510,7 +8605,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7565,7 +8660,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7612,7 +8707,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -7655,7 +8750,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7710,7 +8805,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7757,7 +8852,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -7773,22 +8868,41 @@ async def test_list_operations_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = DatastoreAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() def test_client_ctx(): From 07a3f50293ab59c288f001f6a5e9568b6cdea07d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 13:23:46 -0800 Subject: [PATCH 591/611] chore(python): update dependencies in .kokoro/docker/docs (#574) * chore(python): update dependencies in .kokoro/docker/docs Source-Link: https://github.com/googleapis/synthtool/commit/59171c8f83f3522ce186e4d110d27e772da4ba7a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2ed982f884312e4883e01b5ab8af8b6935f0216a5a2d82928d273081fc3be562 * Add constraints file for python 3.13 --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +- .../.github/release-trigger.yml | 1 + .../.github/sync-repo-settings.yaml | 1 + .../.github/workflows/unittest.yml | 2 +- .../.kokoro/docker/docs/requirements.txt | 56 +++++++++---------- .../.kokoro/docs/common.cfg | 2 +- .../.kokoro/samples/python3.13/common.cfg | 40 +++++++++++++ .../.kokoro/samples/python3.13/continuous.cfg | 6 ++ .../samples/python3.13/periodic-head.cfg | 11 ++++ .../.kokoro/samples/python3.13/periodic.cfg | 6 ++ .../.kokoro/samples/python3.13/presubmit.cfg | 6 ++ .../.kokoro/test-samples-impl.sh | 3 +- .../google-cloud-datastore/CONTRIBUTING.rst | 6 +- packages/google-cloud-datastore/noxfile.py | 18 ++++-- .../samples/snippets/noxfile.py | 2 +- .../snippets/schedule-export/noxfile.py | 2 +- .../testing/constraints-3.13.txt | 0 17 files changed, 124 insertions(+), 42 deletions(-) create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.13/common.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.13/continuous.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.13/periodic-head.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.13/periodic.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.13/presubmit.cfg create mode 100644 packages/google-cloud-datastore/testing/constraints-3.13.txt diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 597e0c3261ca..6301519a9a05 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e8dcfd7cbfd8beac3a3ff8d3f3185287ea0625d859168cc80faccfc9a7a00455 -# created: 2024-09-16T21:04:09.091105552Z + digest: sha256:2ed982f884312e4883e01b5ab8af8b6935f0216a5a2d82928d273081fc3be562 +# created: 2024-11-12T12:09:45.821174897Z diff --git a/packages/google-cloud-datastore/.github/release-trigger.yml b/packages/google-cloud-datastore/.github/release-trigger.yml index d4ca94189e16..932fbdd05682 100644 --- a/packages/google-cloud-datastore/.github/release-trigger.yml +++ b/packages/google-cloud-datastore/.github/release-trigger.yml @@ -1 +1,2 @@ enabled: true +multiScmName: python-datastore diff --git a/packages/google-cloud-datastore/.github/sync-repo-settings.yaml b/packages/google-cloud-datastore/.github/sync-repo-settings.yaml index 4b8f66f76573..319f6e4bc1f0 100644 --- a/packages/google-cloud-datastore/.github/sync-repo-settings.yaml +++ b/packages/google-cloud-datastore/.github/sync-repo-settings.yaml @@ -29,6 +29,7 @@ branchProtectionRules: - 'unit (3.10)' - 'unit (3.11)' - 'unit (3.12)' + - 'unit (3.13)' - 'cover' - 'mypy' # List of explicit permissions to add (additive only) diff --git a/packages/google-cloud-datastore/.github/workflows/unittest.yml b/packages/google-cloud-datastore/.github/workflows/unittest.yml index dd8bd76922f9..6a0bc07438fa 100644 --- a/packages/google-cloud-datastore/.github/workflows/unittest.yml +++ b/packages/google-cloud-datastore/.github/workflows/unittest.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] + python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] steps: - name: Checkout uses: actions/checkout@v4 diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt index 7129c7715594..8bb0764594b1 100644 --- a/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt @@ -1,42 +1,42 @@ # -# This file is autogenerated by pip-compile with Python 3.9 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.4.0 \ - --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ - --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f +argcomplete==3.5.1 \ + --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ + --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 # via nox -colorlog==6.8.2 \ - --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ - --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 +colorlog==6.9.0 \ + --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ + --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 # via nox -distlib==0.3.8 \ - --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ - --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 +distlib==0.3.9 \ + --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ + --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 # via virtualenv -filelock==3.15.4 \ - --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ - --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 +filelock==3.16.1 \ + --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ + --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv -nox==2024.4.15 \ - --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ - --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f +nox==2024.10.9 \ + --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ + --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 # via -r requirements.in -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 +packaging==24.2 \ + --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ + --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f # via nox -platformdirs==4.2.2 \ - --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ - --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 +platformdirs==4.3.6 \ + --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ + --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f +tomli==2.0.2 \ + --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ + --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed # via nox -virtualenv==20.26.3 \ - --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ - --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 +virtualenv==20.27.1 \ + --hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \ + --hash=sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4 # via nox diff --git a/packages/google-cloud-datastore/.kokoro/docs/common.cfg b/packages/google-cloud-datastore/.kokoro/docs/common.cfg index 6b7da47ba536..33f2bdc708e9 100644 --- a/packages/google-cloud-datastore/.kokoro/docs/common.cfg +++ b/packages/google-cloud-datastore/.kokoro/docs/common.cfg @@ -63,4 +63,4 @@ before_action { keyname: "docuploader_service_account" } } -} \ No newline at end of file +} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.13/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.13/common.cfg new file mode 100644 index 000000000000..33af919b04ab --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.13/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.13" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-313" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-datastore/.kokoro/trampoline_v2.sh" diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.13/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.13/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.13/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.13/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.13/periodic-head.cfg new file mode 100644 index 000000000000..714045a75ed7 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.13/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.13/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.13/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.13/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.13/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.13/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.13/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh b/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh index 55910c8ba178..53e365bc4e79 100755 --- a/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh +++ b/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh @@ -33,7 +33,8 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Install nox -python3.9 -m pip install --upgrade --quiet nox +# `virtualenv==20.26.6` is added for Python 3.7 compatibility +python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6 # Use secrets acessor service account to get secrets if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then diff --git a/packages/google-cloud-datastore/CONTRIBUTING.rst b/packages/google-cloud-datastore/CONTRIBUTING.rst index 854a6c7c59e5..c59f8503d5f5 100644 --- a/packages/google-cloud-datastore/CONTRIBUTING.rst +++ b/packages/google-cloud-datastore/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.12 -- -k + $ nox -s unit-3.13 -- -k .. note:: @@ -252,6 +252,7 @@ We support: - `Python 3.10`_ - `Python 3.11`_ - `Python 3.12`_ +- `Python 3.13`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ @@ -259,6 +260,7 @@ We support: .. _Python 3.10: https://docs.python.org/3.10/ .. _Python 3.11: https://docs.python.org/3.11/ .. _Python 3.12: https://docs.python.org/3.12/ +.. _Python 3.13: https://docs.python.org/3.13/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 4a08c70ff100..7fcab22046d6 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -34,7 +34,15 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", @@ -66,7 +74,6 @@ CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() -# 'docfx' is excluded since it only needs to run in 'docs-presubmit' nox.options.sessions = [ "unit", "system", @@ -77,6 +84,7 @@ "blacken", "docs", "doctests", + "docfx", "format", ] @@ -189,7 +197,7 @@ def install_unittest_dependencies(session, *constraints): def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") constraints_path = str( @@ -407,7 +415,7 @@ def docfx(session): ) -@nox.session(python="3.12") +@nox.session(python="3.13") @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], @@ -415,7 +423,7 @@ def docfx(session): def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies diff --git a/packages/google-cloud-datastore/samples/snippets/noxfile.py b/packages/google-cloud-datastore/samples/snippets/noxfile.py index 483b55901791..a169b5b5b464 100644 --- a/packages/google-cloud-datastore/samples/snippets/noxfile.py +++ b/packages/google-cloud-datastore/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py b/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py index 483b55901791..a169b5b5b464 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/packages/google-cloud-datastore/testing/constraints-3.13.txt b/packages/google-cloud-datastore/testing/constraints-3.13.txt new file mode 100644 index 000000000000..e69de29bb2d1 From 27766d2e8bd170f9d8feb46d4bc7027796f8b0a6 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 12 Dec 2024 12:15:05 -0600 Subject: [PATCH 592/611] fix: preserve list meanings (#575) --- .../google/cloud/datastore/helpers.py | 66 ++++---- .../tests/unit/test_helpers.py | 148 +++++++++++++++++- 2 files changed, 176 insertions(+), 38 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index 6eaa3b89e37d..d491360c00f5 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -18,7 +18,6 @@ """ import datetime -import itertools from google.protobuf import struct_pb2 from google.type import latlng_pb2 @@ -43,36 +42,29 @@ def _get_meaning(value_pb, is_list=False): :param is_list: Boolean indicating if the ``value_pb`` contains a list value. - :rtype: int + :rtype: int | Tuple[Optional[int], Optional[list[int | None]]] | None :returns: The meaning for the ``value_pb`` if one is set, else - :data:`None`. For a list value, if there are disagreeing - means it just returns a list of meanings. If all the - list meanings agree, it just condenses them. + :data:`None`. For a list value, returns a tuple of + the root meaning of the list, and a list of meanings + of each sub-value. If subvalues are all empty, returns + :data:`None` instead of a list. """ if is_list: + root_meaning = value_pb.meaning or None values = value_pb.array_value.values - # An empty list will have no values, hence no shared meaning - # set among them. - if len(values) == 0: - return None - # We check among all the meanings, some of which may be None, # the rest which may be enum/int values. - all_meanings = [_get_meaning(sub_value_pb) for sub_value_pb in values] - unique_meanings = set(all_meanings) - - if len(unique_meanings) == 1: - # If there is a unique meaning, we preserve it. - return unique_meanings.pop() - else: # We know len(value_pb.array_value.values) > 0. - # If the meaning is not unique, just return all of them. - return all_meanings - - elif value_pb.meaning: # Simple field (int32). - return value_pb.meaning - - return None + sub_meanings = [sub_value_pb.meaning or None for sub_value_pb in values] + if not any(meaning is not None for meaning in sub_meanings): + sub_meanings = None + if root_meaning is None and sub_meanings is None: + # no meanings to save + return None + else: + return root_meaning, sub_meanings + else: + return value_pb.meaning or None def _new_value_pb(entity_pb, name): @@ -156,6 +148,10 @@ def entity_from_protobuf(pb): def _set_pb_meaning_from_entity(entity, name, value, value_pb, is_list=False): """Add meaning information (from an entity) to a protobuf. + value_pb is assumed to have no `meaning` data currently present. + This means if the entity's meaning data is None, this function will do nothing, + rather than removing any existing data. + :type entity: :class:`google.cloud.datastore.entity.Entity` :param entity: The entity to be turned into a protobuf. @@ -181,14 +177,20 @@ def _set_pb_meaning_from_entity(entity, name, value, value_pb, is_list=False): if orig_value is not value: return - # For lists, we set meaning on each sub-element. - if is_list: - if not isinstance(meaning, list): - meaning = itertools.repeat(meaning) - val_iter = zip(value_pb.array_value.values, meaning) - for sub_value_pb, sub_meaning in val_iter: - if sub_meaning is not None: - sub_value_pb.meaning = sub_meaning + if meaning is None: + # no meaning data to set + return + elif is_list: + # for lists, set meaning on the root pb and on each sub-element + root_meaning, sub_meaning_list = meaning + if root_meaning is not None: + value_pb.meaning = root_meaning + if sub_meaning_list: + for sub_value_pb, sub_meaning in zip( + value_pb.array_value.values, sub_meaning_list + ): + if sub_meaning is not None: + sub_value_pb.meaning = sub_meaning else: value_pb.meaning = meaning diff --git a/packages/google-cloud-datastore/tests/unit/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py index 38702dbad21b..a6f63a8078ad 100644 --- a/packages/google-cloud-datastore/tests/unit/test_helpers.py +++ b/packages/google-cloud-datastore/tests/unit/test_helpers.py @@ -361,19 +361,21 @@ def test_entity_to_protobuf_w_variable_meanings(): entity = Entity() name = "quux" entity[name] = values = [1, 20, 300] - meaning = 9 - entity._meanings[name] = ([None, meaning, None], values) + root_meaning = 31 + sub_meaning = 9 + entity._meanings[name] = ((root_meaning, [None, sub_meaning, None]), values) entity_pb = entity_to_protobuf(entity) # Construct the expected protobuf. expected_pb = entity_pb2.Entity() value_pb = _new_value_pb(expected_pb, name) + value_pb.meaning = root_meaning value0 = value_pb.array_value.values.add() value0.integer_value = values[0] # The only array entry with a meaning is the middle one. value1 = value_pb.array_value.values.add() value1.integer_value = values[1] - value1.meaning = meaning + value1.meaning = sub_meaning value2 = value_pb.array_value.values.add() value2.integer_value = values[2] @@ -1179,7 +1181,46 @@ def test__get_meaning_w_array_value(): sub_value_pb2.string_value = "bye" result = _get_meaning(value_pb, is_list=True) - assert meaning == result + # should preserve sub-value meanings as list + assert (None, [meaning, meaning]) == result + + +def test__get_meaning_w_array_value_root_meaning(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _get_meaning + + value_pb = entity_pb2.Value() + meaning = 9 + value_pb.meaning = meaning + sub_value_pb1 = value_pb._pb.array_value.values.add() + sub_value_pb2 = value_pb._pb.array_value.values.add() + + sub_value_pb1.string_value = "hi" + sub_value_pb2.string_value = "bye" + + result = _get_meaning(value_pb, is_list=True) + # should preserve sub-value meanings as list + assert (meaning, None) == result + + +def test__get_meaning_w_array_value_root_and_sub_meanings(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _get_meaning + + value_pb = entity_pb2.Value() + root_meaning = 9 + sub_meaning = 3 + value_pb.meaning = root_meaning + sub_value_pb1 = value_pb._pb.array_value.values.add() + sub_value_pb2 = value_pb._pb.array_value.values.add() + + sub_value_pb1.meaning = sub_value_pb2.meaning = sub_meaning + sub_value_pb1.string_value = "hi" + sub_value_pb2.string_value = "bye" + + result = _get_meaning(value_pb, is_list=True) + # should preserve sub-value meanings as list + assert (root_meaning, [sub_meaning, sub_meaning]) == result def test__get_meaning_w_array_value_multiple_meanings(): @@ -1198,7 +1239,7 @@ def test__get_meaning_w_array_value_multiple_meanings(): sub_value_pb2.string_value = "bye" result = _get_meaning(value_pb, is_list=True) - assert result == [meaning1, meaning2] + assert result == (None, [meaning1, meaning2]) def test__get_meaning_w_array_value_meaning_partially_unset(): @@ -1215,7 +1256,102 @@ def test__get_meaning_w_array_value_meaning_partially_unset(): sub_value_pb2.string_value = "bye" result = _get_meaning(value_pb, is_list=True) - assert result == [meaning1, None] + assert result == (None, [meaning1, None]) + + +def test__get_meaning_w_array_value_meaning_fully_unset(): + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _get_meaning + + value_pb = entity_pb2.Value() + sub_value_pb1 = value_pb._pb.array_value.values.add() + sub_value_pb2 = value_pb._pb.array_value.values.add() + + sub_value_pb1.string_value = "hi" + sub_value_pb2.string_value = "bye" + + result = _get_meaning(value_pb, is_list=True) + assert result is None + + +@pytest.mark.parametrize("orig_root_meaning", [0, 1]) +@pytest.mark.parametrize("orig_sub_meaning", [0, 1]) +def test__set_pb_meaning_w_array_value_fully_unset(orig_root_meaning, orig_sub_meaning): + """ + call _set_pb_meaning_from_entity with meaning=None data. + Should not touch proto's meaning field + """ + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _set_pb_meaning_from_entity + from google.cloud.datastore.entity import Entity + + orig_pb = entity_pb2.Entity() + value_pb = orig_pb._pb.properties.get_or_create("value") + value_pb.meaning = orig_root_meaning + sub_value_pb1 = value_pb.array_value.values.add() + sub_value_pb1.meaning = orig_sub_meaning + + entity = Entity(key="key") + entity._meanings = {"value": ((None, None), None)} + _set_pb_meaning_from_entity(entity, "value", None, value_pb, is_list=True) + assert value_pb.meaning == orig_root_meaning + assert value_pb.array_value.values[0].meaning == orig_sub_meaning + + +@pytest.mark.parametrize("orig_meaning", [0, 1]) +def test__set_pb_meaning_w_value_unset(orig_meaning): + """ + call _set_pb_meaning_from_entity with meaning=None data. + Should not touch proto's meaning field + """ + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _set_pb_meaning_from_entity + from google.cloud.datastore.entity import Entity + + orig_pb = entity_pb2.Entity() + value_pb = orig_pb._pb.properties.get_or_create("value") + value_pb.meaning = orig_meaning + + entity = Entity(key="key") + entity._meanings = {"value": (None, None)} + _set_pb_meaning_from_entity(entity, "value", None, value_pb, is_list=False) + assert value_pb.meaning == orig_meaning + + +def test__array_w_meaning_end_to_end(): + """ + Test proto->entity->proto with an array with a meaning field + """ + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import entity_from_protobuf + from google.cloud.datastore.helpers import entity_to_protobuf + + orig_pb = entity_pb2.Entity() + value_pb = orig_pb._pb.properties.get_or_create("value") + value_pb.meaning = 31 + sub_value_pb1 = value_pb.array_value.values.add() + sub_value_pb1.double_value = 1 + sub_value_pb1.meaning = 1 + sub_value_pb2 = value_pb.array_value.values.add() + sub_value_pb2.double_value = 2 + sub_value_pb3 = value_pb.array_value.values.add() + sub_value_pb3.double_value = 3 + sub_value_pb3.meaning = 3 + # convert to entity + entity = entity_from_protobuf(orig_pb._pb) + assert entity._meanings["value"][0] == (31, [1, None, 3]) + assert entity._meanings["value"][1] == [1, 2, 3] + # convert back to pb + output_entity_pb = entity_to_protobuf(entity) + final_pb = output_entity_pb._pb.properties["value"] + assert final_pb.meaning == 31 + assert len(final_pb.array_value.values) == 3 + assert final_pb.array_value.values[0].meaning == 1 + assert final_pb.array_value.values[0].double_value == 1 + assert final_pb.array_value.values[1].meaning == 0 + assert final_pb.array_value.values[1].double_value == 2 + assert final_pb.array_value.values[2].meaning == 3 + assert final_pb.array_value.values[2].double_value == 3 def _make_geopoint(*args, **kwargs): From 1a39e9842c31199bf99ae9b55fc85a82afbc8abf Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 12 Dec 2024 10:48:37 -0800 Subject: [PATCH 593/611] chore(main): release 2.20.2 (#576) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-datastore/.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 7 +++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../google/cloud/datastore_admin/gapic_version.py | 2 +- .../google/cloud/datastore_admin_v1/gapic_version.py | 2 +- .../google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 13 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index a95c589d8ce9..eeb4bcda33c6 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.20.1" + ".": "2.20.2" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 45db587dbe41..549b44f46c6a 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.20.2](https://github.com/googleapis/python-datastore/compare/v2.20.1...v2.20.2) (2024-12-12) + + +### Bug Fixes + +* Preserve list meanings ([#575](https://github.com/googleapis/python-datastore/issues/575)) ([266243b](https://github.com/googleapis/python-datastore/commit/266243ba360a9d41ab4b51c323eac44d2cfc35cb)) + ## [2.20.1](https://github.com/googleapis/python-datastore/compare/v2.20.0...v2.20.1) (2024-08-14) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index 5feb6ccf27f2..6975b43d3e80 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.20.1" # {x-release-please-version} +__version__ = "2.20.2" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index 02ee97e2a149..e37230a636b1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.20.1" +__version__ = "2.20.2" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index 5585b0b1a0d8..4c1787c53865 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.1" # {x-release-please-version} +__version__ = "2.20.2" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index 9b4d43d65b8d..efd187993dda 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.20.1" # {x-release-please-version} +__version__ = "2.20.2" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index 9b4d43d65b8d..efd187993dda 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.20.1" # {x-release-please-version} +__version__ = "2.20.2" # {x-release-please-version} From ab772eae920872fedb3909380c524c1e6ed6a67e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 3 Jan 2025 14:32:48 -0500 Subject: [PATCH 594/611] chore(python): Update the python version in docs presubmit to use 3.10 (#584) Source-Link: https://github.com/googleapis/synthtool/commit/de3def663b75d8b9ae1e5d548364c960ff13af8f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:a1c5112b81d645f5bbc4d4bbc99d7dcb5089a52216c0e3fb1203a0eeabadd7d5 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 6 +-- .../.github/workflows/docs.yml | 2 +- .../.github/workflows/unittest.yml | 5 +- .../.kokoro/docker/docs/requirements.txt | 52 +++++++++++++++---- 4 files changed, 49 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 6301519a9a05..1d0fd7e7878b 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2ed982f884312e4883e01b5ab8af8b6935f0216a5a2d82928d273081fc3be562 -# created: 2024-11-12T12:09:45.821174897Z + digest: sha256:a1c5112b81d645f5bbc4d4bbc99d7dcb5089a52216c0e3fb1203a0eeabadd7d5 +# created: 2025-01-02T23:09:36.975468657Z diff --git a/packages/google-cloud-datastore/.github/workflows/docs.yml b/packages/google-cloud-datastore/.github/workflows/docs.yml index 698fbc5c94da..2833fe98fff0 100644 --- a/packages/google-cloud-datastore/.github/workflows/docs.yml +++ b/packages/google-cloud-datastore/.github/workflows/docs.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.10" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/google-cloud-datastore/.github/workflows/unittest.yml b/packages/google-cloud-datastore/.github/workflows/unittest.yml index 6a0bc07438fa..c66b757ced2b 100644 --- a/packages/google-cloud-datastore/.github/workflows/unittest.yml +++ b/packages/google-cloud-datastore/.github/workflows/unittest.yml @@ -5,7 +5,10 @@ on: name: unittest jobs: unit: - runs-on: ubuntu-latest + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. + # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix + # https://docs.github.com/en/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories + runs-on: ubuntu-22.04 strategy: matrix: python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt index 8bb0764594b1..f99a5c4aac7f 100644 --- a/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt @@ -2,11 +2,11 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes requirements.in +# pip-compile --allow-unsafe --generate-hashes synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in # -argcomplete==3.5.1 \ - --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ - --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 +argcomplete==3.5.2 \ + --hash=sha256:036d020d79048a5d525bc63880d7a4b8d1668566b8a76daf1144c0bbe0f63472 \ + --hash=sha256:23146ed7ac4403b70bd6026402468942ceba34a6732255b9edf5b7354f68a6bb # via nox colorlog==6.9.0 \ --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ @@ -23,7 +23,7 @@ filelock==3.16.1 \ nox==2024.10.9 \ --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in + # via -r synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in packaging==24.2 \ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f @@ -32,11 +32,41 @@ platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv -tomli==2.0.2 \ - --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ - --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed +tomli==2.2.1 \ + --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ + --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ + --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ + --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ + --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ + --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ + --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ + --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ + --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ + --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ + --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ + --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ + --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ + --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ + --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ + --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ + --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ + --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ + --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ + --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ + --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ + --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ + --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ + --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ + --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ + --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ + --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ + --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ + --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ + --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ + --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ + --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via nox -virtualenv==20.27.1 \ - --hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \ - --hash=sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4 +virtualenv==20.28.0 \ + --hash=sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0 \ + --hash=sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa # via nox From adb9dfdf03731a849e13ebbe62da584241459a2c Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 16 Jan 2025 12:07:03 -0800 Subject: [PATCH 595/611] chore: update protoplus for python 3.13 (#579) Fixes https://github.com/googleapis/python-datastore/issues/571 --- packages/google-cloud-datastore/setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index eb3c2660faaa..0534aa7c8d8f 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -36,6 +36,7 @@ "google-cloud-core >= 1.4.0, <3.0.0dev", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "proto-plus >= 1.25.0, <2.0.0dev; python_version>='3.13'", "protobuf>=3.20.2,<6.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = {"libcst": "libcst >= 0.2.5"} From 1b0deed389056c8d4ffa1f8b0fae31aef4f8a588 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 3 Mar 2025 11:45:08 -0500 Subject: [PATCH 596/611] chore(python): conditionally load credentials in .kokoro/build.sh (#594) Source-Link: https://github.com/googleapis/synthtool/commit/aa69fb74717c8f4c58c60f8cc101d3f4b2c07b09 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../google-cloud-datastore/.kokoro/build.sh | 20 +- .../.kokoro/docker/docs/requirements.in | 1 + .../.kokoro/docker/docs/requirements.txt | 243 +++++++++++++++++- .../.kokoro/publish-docs.sh | 4 - packages/google-cloud-datastore/README.rst | 93 ++++++- packages/google-cloud-datastore/renovate.json | 2 +- 7 files changed, 343 insertions(+), 24 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 1d0fd7e7878b..3f7634f25f8e 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:a1c5112b81d645f5bbc4d4bbc99d7dcb5089a52216c0e3fb1203a0eeabadd7d5 -# created: 2025-01-02T23:09:36.975468657Z + digest: sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf +# created: 2025-02-21T19:32:52.01306189Z diff --git a/packages/google-cloud-datastore/.kokoro/build.sh b/packages/google-cloud-datastore/.kokoro/build.sh index e0cd71b03bd2..d41b45aa1dd0 100755 --- a/packages/google-cloud-datastore/.kokoro/build.sh +++ b/packages/google-cloud-datastore/.kokoro/build.sh @@ -15,11 +15,13 @@ set -eo pipefail +CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") + if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/python-datastore" + PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") fi -cd "${PROJECT_ROOT}" +pushd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -28,10 +30,16 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] +then + export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +fi # Setup project id. -export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] +then + export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +fi # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. @@ -46,7 +54,7 @@ fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} + python3 -m nox -s ${NOX_SESSION:-} else - python3 -m nox + python3 -m nox fi diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.in b/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.in index 816817c672a1..586bd07037ae 100644 --- a/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.in +++ b/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.in @@ -1 +1,2 @@ nox +gcp-docuploader diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt index f99a5c4aac7f..a9360a25b707 100644 --- a/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt +++ b/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt @@ -2,16 +2,124 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in +# pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.5.2 \ - --hash=sha256:036d020d79048a5d525bc63880d7a4b8d1668566b8a76daf1144c0bbe0f63472 \ - --hash=sha256:23146ed7ac4403b70bd6026402468942ceba34a6732255b9edf5b7354f68a6bb +argcomplete==3.5.3 \ + --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \ + --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392 # via nox +cachetools==5.5.0 \ + --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ + --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a + # via google-auth +certifi==2024.12.14 \ + --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ + --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db + # via requests +charset-normalizer==3.4.1 \ + --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ + --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ + --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ + --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ + --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ + --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ + --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ + --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ + --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ + --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ + --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ + --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ + --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ + --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ + --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ + --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ + --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ + --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ + --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ + --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ + --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ + --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ + --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ + --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ + --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ + --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ + --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ + --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ + --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ + --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ + --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ + --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ + --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ + --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ + --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ + --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ + --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ + --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ + --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ + --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ + --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ + --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ + --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ + --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ + --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ + --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ + --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ + --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ + --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ + --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ + --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ + --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ + --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ + --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ + --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ + --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ + --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ + --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ + --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ + --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ + --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ + --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ + --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ + --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ + --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ + --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ + --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ + --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ + --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ + --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ + --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ + --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ + --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ + --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ + --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ + --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ + --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ + --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ + --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ + --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ + --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ + --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ + --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ + --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ + --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ + --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ + --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ + --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ + --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ + --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ + --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ + --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 + # via requests +click==8.1.8 \ + --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ + --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a + # via gcp-docuploader colorlog==6.9.0 \ --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 - # via nox + # via + # gcp-docuploader + # nox distlib==0.3.9 \ --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 @@ -20,10 +128,78 @@ filelock==3.16.1 \ --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv +gcp-docuploader==0.6.5 \ + --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ + --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea + # via -r requirements.in +google-api-core==2.24.0 \ + --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ + --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf + # via + # google-cloud-core + # google-cloud-storage +google-auth==2.37.0 \ + --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ + --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 + # via + # google-api-core + # google-cloud-core + # google-cloud-storage +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 + # via google-cloud-storage +google-cloud-storage==2.19.0 \ + --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ + --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 + # via gcp-docuploader +google-crc32c==1.6.0 \ + --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ + --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ + --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ + --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ + --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ + --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ + --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ + --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ + --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ + --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ + --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ + --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ + --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ + --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ + --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ + --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ + --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ + --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ + --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ + --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ + --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ + --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ + --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ + --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ + --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ + --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ + --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.7.2 \ + --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ + --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 + # via google-cloud-storage +googleapis-common-protos==1.66.0 \ + --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ + --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed + # via google-api-core +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 + # via requests nox==2024.10.9 \ --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in + # via -r requirements.in packaging==24.2 \ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f @@ -32,6 +208,51 @@ platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv +proto-plus==1.25.0 \ + --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ + --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 + # via google-api-core +protobuf==5.29.3 \ + --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ + --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ + --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ + --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ + --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ + --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ + --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ + --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ + --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ + --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ + --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 + # via + # gcp-docuploader + # google-api-core + # googleapis-common-protos + # proto-plus +pyasn1==0.6.1 \ + --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ + --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.4.1 \ + --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ + --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c + # via google-auth +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 + # via + # google-api-core + # google-cloud-storage +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +six==1.17.0 \ + --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ + --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 + # via gcp-docuploader tomli==2.2.1 \ --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ @@ -66,7 +287,11 @@ tomli==2.2.1 \ --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via nox -virtualenv==20.28.0 \ - --hash=sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0 \ - --hash=sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa +urllib3==2.3.0 \ + --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ + --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d + # via requests +virtualenv==20.28.1 \ + --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ + --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 # via nox diff --git a/packages/google-cloud-datastore/.kokoro/publish-docs.sh b/packages/google-cloud-datastore/.kokoro/publish-docs.sh index 233205d580e9..4ed4aaf1346f 100755 --- a/packages/google-cloud-datastore/.kokoro/publish-docs.sh +++ b/packages/google-cloud-datastore/.kokoro/publish-docs.sh @@ -20,10 +20,6 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" -# Install nox -python3.10 -m pip install --require-hashes -r .kokoro/requirements.txt -python3.10 -m nox --version - # build docs nox -s docs diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index c5f944dc9b40..dc21a4e83fc0 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -30,12 +30,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the Google Cloud Datastore API.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Google Cloud Datastore API.: https://cloud.google.com/datastore -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ @@ -110,3 +110,92 @@ Next Steps .. _Google Cloud Datastore API Product documentation: https://cloud.google.com/datastore .. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + +Environment-Based Examples +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + +Code-Based Examples +^^^^^^^^^^^^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/packages/google-cloud-datastore/renovate.json b/packages/google-cloud-datastore/renovate.json index 39b2a0ec9296..c7875c469bd5 100644 --- a/packages/google-cloud-datastore/renovate.json +++ b/packages/google-cloud-datastore/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } From 0ed33f52e2f317278720201a675d387bd68fdb9d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 3 Mar 2025 09:34:37 -0800 Subject: [PATCH 597/611] chore: Update gapic-generator-python to v1.23.2 (#569) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Configure Ruby clients for google-ads-ad_manager PiperOrigin-RevId: 689139590 Source-Link: https://github.com/googleapis/googleapis/commit/296f2ac1aa9abccb7708b639b7839faa1809087f Source-Link: https://github.com/googleapis/googleapis-gen/commit/26927362e0aa1293258fc23fe3ce83c5c21d5fbb Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjY5MjczNjJlMGFhMTI5MzI1OGZjMjNmZTNjZTgzYzVjMjFkNWZiYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: remove body selector from http rule PiperOrigin-RevId: 693215877 Source-Link: https://github.com/googleapis/googleapis/commit/bb6b53e326ce2db403d18be7158c265e07948920 Source-Link: https://github.com/googleapis/googleapis-gen/commit/db8b5a93484ad44055b2bacc4c7cf87e970fe0ed Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZGI4YjVhOTM0ODRhZDQ0MDU1YjJiYWNjNGM3Y2Y4N2U5NzBmZTBlZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Add support for opt-in debug logging fix: Fix typing issue with gRPC metadata when key ends in -bin chore: Update gapic-generator-python to v1.21.0 PiperOrigin-RevId: 705285820 Source-Link: https://github.com/googleapis/googleapis/commit/f9b8b9150f7fcd600b0acaeef91236b1843f5e49 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ca1e0a1e472d6e6f5de883a5cb54724f112ce348 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2ExZTBhMWU0NzJkNmU2ZjVkZTg4M2E1Y2I1NDcyNGYxMTJjZTM0OCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Add REST Interceptors which support reading metadata feat: Add support for reading selective GAPIC generation methods from service YAML chore: Update gapic-generator-python to v1.22.0 PiperOrigin-RevId: 724026024 Source-Link: https://github.com/googleapis/googleapis/commit/ad9963857109513e77eed153a66264481789109f Source-Link: https://github.com/googleapis/googleapis-gen/commit/e291c4dd1d670eda19998de76f967e1603a48993 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTI5MWM0ZGQxZDY3MGVkYTE5OTk4ZGU3NmY5NjdlMTYwM2E0ODk5MyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.23.2 PiperOrigin-RevId: 732281673 Source-Link: https://github.com/googleapis/googleapis/commit/2f37e0ad56637325b24f8603284ccb6f05796f9a Source-Link: https://github.com/googleapis/googleapis-gen/commit/016b7538ba5a798f2ae423d4ccd7f82b06cdf6d2 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDE2Yjc1MzhiYTVhNzk4ZjJhZTQyM2Q0Y2NkN2Y4MmIwNmNkZjZkMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou Co-authored-by: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> --- .../services/datastore_admin/async_client.py | 122 ++- .../services/datastore_admin/client.py | 238 +++-- .../services/datastore_admin/pagers.py | 16 +- .../datastore_admin/transports/grpc.py | 114 +- .../transports/grpc_asyncio.py | 111 +- .../datastore_admin/transports/rest.py | 795 ++++++++++++-- .../services/datastore/async_client.py | 158 ++- .../datastore_v1/services/datastore/client.py | 274 +++-- .../services/datastore/transports/grpc.py | 114 +- .../datastore/transports/grpc_asyncio.py | 113 +- .../services/datastore/transports/rest.py | 991 ++++++++++++++++-- .../test_datastore_admin.py | 174 +-- .../unit/gapic/datastore_v1/test_datastore.py | 200 ++-- 13 files changed, 2820 insertions(+), 600 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 0c2572d65b20..245f8fb18d5f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import ( @@ -54,6 +55,15 @@ from .transports.grpc_asyncio import DatastoreAdminGrpcAsyncIOTransport from .client import DatastoreAdminClient +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class DatastoreAdminAsyncClient: """Google Cloud Datastore Admin API @@ -311,6 +321,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.datastore.admin_v1.DatastoreAdminAsyncClient`.", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "credentialsType": None, + }, + ) + async def export_entities( self, request: Optional[Union[datastore_admin.ExportEntitiesRequest, dict]] = None, @@ -321,7 +353,7 @@ async def export_entities( output_url_prefix: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Exports a copy of all or a subset of entities from Google Cloud Datastore to another storage system, such @@ -420,8 +452,10 @@ async def sample_export_entities(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -434,8 +468,9 @@ async def sample_export_entities(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any( - [project_id, labels, entity_filter, output_url_prefix] + flattened_params = [project_id, labels, entity_filter, output_url_prefix] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 ) if request is not None and has_flattened_params: raise ValueError( @@ -506,7 +541,7 @@ async def import_entities( entity_filter: Optional[datastore_admin.EntityFilter] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Imports entities into Google Cloud Datastore. Existing entities with the same key are overwritten. The @@ -597,8 +632,10 @@ async def sample_import_entities(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -619,7 +656,10 @@ async def sample_import_entities(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, labels, input_url, entity_filter]) + flattened_params = [project_id, labels, input_url, entity_filter] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -685,7 +725,7 @@ async def create_index( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Creates the specified index. A newly created index's initial state is ``CREATING``. On completion of the returned @@ -739,8 +779,10 @@ async def sample_create_index(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -799,7 +841,7 @@ async def delete_index( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Deletes an existing index. An index can only be deleted if it is in a ``READY`` or ``ERROR`` state. On successful execution of @@ -852,8 +894,10 @@ async def sample_delete_index(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -915,7 +959,7 @@ async def get_index( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> index.Index: r"""Gets an index. @@ -951,8 +995,10 @@ async def sample_get_index(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_admin_v1.types.Index: @@ -1001,7 +1047,7 @@ async def list_indexes( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListIndexesAsyncPager: r"""Lists the indexes that match the specified filters. Datastore uses an eventually consistent query to fetch @@ -1041,8 +1087,10 @@ async def sample_list_indexes(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_admin_v1.services.datastore_admin.pagers.ListIndexesAsyncPager: @@ -1104,7 +1152,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1115,8 +1163,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1157,7 +1207,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1168,8 +1218,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1210,7 +1262,7 @@ async def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1226,8 +1278,10 @@ async def delete_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1264,7 +1318,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1279,8 +1333,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index ca54ec3c7467..489995bc72e7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -14,6 +14,9 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging import os import re from typing import ( @@ -48,6 +51,15 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.datastore_admin_v1.services.datastore_admin import pagers @@ -497,52 +509,45 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. Returns: - bool: True iff client_universe matches the universe in credentials. + bool: True iff the configured universe domain is valid. Raises: - ValueError: when client_universe does not match the universe in credentials. + ValueError: If the configured universe domain is not valid. """ - default_universe = DatastoreAdminClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) + # NOTE (b/349488459): universe validation is disabled until further notice. return True - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. - Raises: - ValueError: If the configured universe domain is not valid. + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DatastoreAdminClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) @property def api_endpoint(self): @@ -648,6 +653,10 @@ def __init__( # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( @@ -713,6 +722,29 @@ def __init__( api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.datastore.admin_v1.DatastoreAdminClient`.", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "credentialsType": None, + }, + ) + def export_entities( self, request: Optional[Union[datastore_admin.ExportEntitiesRequest, dict]] = None, @@ -723,7 +755,7 @@ def export_entities( output_url_prefix: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Exports a copy of all or a subset of entities from Google Cloud Datastore to another storage system, such @@ -822,8 +854,10 @@ def sample_export_entities(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -836,8 +870,9 @@ def sample_export_entities(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any( - [project_id, labels, entity_filter, output_url_prefix] + flattened_params = [project_id, labels, entity_filter, output_url_prefix] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 ) if request is not None and has_flattened_params: raise ValueError( @@ -904,7 +939,7 @@ def import_entities( entity_filter: Optional[datastore_admin.EntityFilter] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Imports entities into Google Cloud Datastore. Existing entities with the same key are overwritten. The @@ -995,8 +1030,10 @@ def sample_import_entities(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1017,7 +1054,10 @@ def sample_import_entities(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, labels, input_url, entity_filter]) + flattened_params = [project_id, labels, input_url, entity_filter] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1079,7 +1119,7 @@ def create_index( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Creates the specified index. A newly created index's initial state is ``CREATING``. On completion of the returned @@ -1133,8 +1173,10 @@ def sample_create_index(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1191,7 +1233,7 @@ def delete_index( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Deletes an existing index. An index can only be deleted if it is in a ``READY`` or ``ERROR`` state. On successful execution of @@ -1244,8 +1286,10 @@ def sample_delete_index(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1305,7 +1349,7 @@ def get_index( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> index.Index: r"""Gets an index. @@ -1341,8 +1385,10 @@ def sample_get_index(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_admin_v1.types.Index: @@ -1389,7 +1435,7 @@ def list_indexes( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListIndexesPager: r"""Lists the indexes that match the specified filters. Datastore uses an eventually consistent query to fetch @@ -1429,8 +1475,10 @@ def sample_list_indexes(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_admin_v1.services.datastore_admin.pagers.ListIndexesPager: @@ -1503,7 +1551,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1514,8 +1562,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1539,16 +1589,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1556,7 +1610,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1567,8 +1621,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1592,16 +1648,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1609,7 +1669,7 @@ def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1625,8 +1685,10 @@ def delete_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1663,7 +1725,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1678,8 +1740,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py index dc61026b6192..4c0fa8a50090 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py @@ -67,7 +67,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -81,8 +81,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = datastore_admin.ListIndexesRequest(request) @@ -141,7 +143,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -155,8 +157,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = datastore_admin.ListIndexesRequest(request) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index 4d08d9c43999..498287974287 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,14 +25,92 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index from google.longrunning import operations_pb2 # type: ignore from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class DatastoreAdminGrpcTransport(DatastoreAdminTransport): """gRPC backend transport for DatastoreAdmin. @@ -237,7 +318,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -301,7 +387,9 @@ def operations_client(self) -> operations_v1.OperationsClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) # Return the client from cache. return self._operations_client @@ -334,7 +422,7 @@ def export_entities( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "export_entities" not in self._stubs: - self._stubs["export_entities"] = self.grpc_channel.unary_unary( + self._stubs["export_entities"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/ExportEntities", request_serializer=datastore_admin.ExportEntitiesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -366,7 +454,7 @@ def import_entities( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "import_entities" not in self._stubs: - self._stubs["import_entities"] = self.grpc_channel.unary_unary( + self._stubs["import_entities"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/ImportEntities", request_serializer=datastore_admin.ImportEntitiesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -406,7 +494,7 @@ def create_index( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_index" not in self._stubs: - self._stubs["create_index"] = self.grpc_channel.unary_unary( + self._stubs["create_index"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/CreateIndex", request_serializer=datastore_admin.CreateIndexRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -445,7 +533,7 @@ def delete_index( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_index" not in self._stubs: - self._stubs["delete_index"] = self.grpc_channel.unary_unary( + self._stubs["delete_index"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/DeleteIndex", request_serializer=datastore_admin.DeleteIndexRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -469,7 +557,7 @@ def get_index(self) -> Callable[[datastore_admin.GetIndexRequest], index.Index]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_index" not in self._stubs: - self._stubs["get_index"] = self.grpc_channel.unary_unary( + self._stubs["get_index"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/GetIndex", request_serializer=datastore_admin.GetIndexRequest.serialize, response_deserializer=index.Index.deserialize, @@ -500,7 +588,7 @@ def list_indexes( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_indexes" not in self._stubs: - self._stubs["list_indexes"] = self.grpc_channel.unary_unary( + self._stubs["list_indexes"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/ListIndexes", request_serializer=datastore_admin.ListIndexesRequest.serialize, response_deserializer=datastore_admin.ListIndexesResponse.deserialize, @@ -508,7 +596,7 @@ def list_indexes( return self._stubs["list_indexes"] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def delete_operation( @@ -520,7 +608,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -537,7 +625,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -554,7 +642,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -573,7 +661,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index 366878dbc241..f895032ddf9a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -24,8 +27,11 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.datastore_admin_v1.types import datastore_admin @@ -34,6 +40,82 @@ from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO from .grpc import DatastoreAdminGrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class DatastoreAdminGrpcAsyncIOTransport(DatastoreAdminTransport): """gRPC AsyncIO backend transport for DatastoreAdmin. @@ -284,10 +366,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = ( "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -310,7 +395,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -346,7 +431,7 @@ def export_entities( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "export_entities" not in self._stubs: - self._stubs["export_entities"] = self.grpc_channel.unary_unary( + self._stubs["export_entities"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/ExportEntities", request_serializer=datastore_admin.ExportEntitiesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -380,7 +465,7 @@ def import_entities( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "import_entities" not in self._stubs: - self._stubs["import_entities"] = self.grpc_channel.unary_unary( + self._stubs["import_entities"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/ImportEntities", request_serializer=datastore_admin.ImportEntitiesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -422,7 +507,7 @@ def create_index( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_index" not in self._stubs: - self._stubs["create_index"] = self.grpc_channel.unary_unary( + self._stubs["create_index"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/CreateIndex", request_serializer=datastore_admin.CreateIndexRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -463,7 +548,7 @@ def delete_index( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_index" not in self._stubs: - self._stubs["delete_index"] = self.grpc_channel.unary_unary( + self._stubs["delete_index"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/DeleteIndex", request_serializer=datastore_admin.DeleteIndexRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -489,7 +574,7 @@ def get_index( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_index" not in self._stubs: - self._stubs["get_index"] = self.grpc_channel.unary_unary( + self._stubs["get_index"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/GetIndex", request_serializer=datastore_admin.GetIndexRequest.serialize, response_deserializer=index.Index.deserialize, @@ -521,7 +606,7 @@ def list_indexes( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_indexes" not in self._stubs: - self._stubs["list_indexes"] = self.grpc_channel.unary_unary( + self._stubs["list_indexes"] = self._logged_channel.unary_unary( "/google.datastore.admin.v1.DatastoreAdmin/ListIndexes", request_serializer=datastore_admin.ListIndexesRequest.serialize, response_deserializer=datastore_admin.ListIndexesResponse.deserialize, @@ -609,7 +694,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -625,7 +710,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -642,7 +727,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -659,7 +744,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -678,7 +763,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py index 01fcdd8597fc..1cdfd4b9059f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries @@ -45,11 +46,19 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) @@ -125,8 +134,10 @@ def post_list_indexes(self, response): def pre_create_index( self, request: datastore_admin.CreateIndexRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[datastore_admin.CreateIndexRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore_admin.CreateIndexRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for create_index Override in a subclass to manipulate the request or metadata @@ -139,17 +150,42 @@ def post_create_index( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_index - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_index_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DatastoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_index` interceptor runs + before the `post_create_index_with_metadata` interceptor. """ return response + def post_create_index_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_index + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatastoreAdmin server but before it is returned to user code. + + We recommend only using this `post_create_index_with_metadata` + interceptor in new development instead of the `post_create_index` interceptor. + When both interceptors are used, this `post_create_index_with_metadata` interceptor runs after the + `post_create_index` interceptor. The (possibly modified) response returned by + `post_create_index` will be passed to + `post_create_index_with_metadata`. + """ + return response, metadata + def pre_delete_index( self, request: datastore_admin.DeleteIndexRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[datastore_admin.DeleteIndexRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore_admin.DeleteIndexRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_index Override in a subclass to manipulate the request or metadata @@ -162,17 +198,42 @@ def post_delete_index( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_index - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_index_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DatastoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_delete_index` interceptor runs + before the `post_delete_index_with_metadata` interceptor. """ return response + def post_delete_index_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_index + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatastoreAdmin server but before it is returned to user code. + + We recommend only using this `post_delete_index_with_metadata` + interceptor in new development instead of the `post_delete_index` interceptor. + When both interceptors are used, this `post_delete_index_with_metadata` interceptor runs after the + `post_delete_index` interceptor. The (possibly modified) response returned by + `post_delete_index` will be passed to + `post_delete_index_with_metadata`. + """ + return response, metadata + def pre_export_entities( self, request: datastore_admin.ExportEntitiesRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[datastore_admin.ExportEntitiesRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore_admin.ExportEntitiesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for export_entities Override in a subclass to manipulate the request or metadata @@ -185,17 +246,42 @@ def post_export_entities( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_entities - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_entities_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DatastoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_export_entities` interceptor runs + before the `post_export_entities_with_metadata` interceptor. """ return response + def post_export_entities_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_entities + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatastoreAdmin server but before it is returned to user code. + + We recommend only using this `post_export_entities_with_metadata` + interceptor in new development instead of the `post_export_entities` interceptor. + When both interceptors are used, this `post_export_entities_with_metadata` interceptor runs after the + `post_export_entities` interceptor. The (possibly modified) response returned by + `post_export_entities` will be passed to + `post_export_entities_with_metadata`. + """ + return response, metadata + def pre_get_index( self, request: datastore_admin.GetIndexRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[datastore_admin.GetIndexRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore_admin.GetIndexRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_index Override in a subclass to manipulate the request or metadata @@ -206,17 +292,40 @@ def pre_get_index( def post_get_index(self, response: index.Index) -> index.Index: """Post-rpc interceptor for get_index - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_index_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DatastoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_index` interceptor runs + before the `post_get_index_with_metadata` interceptor. """ return response + def post_get_index_with_metadata( + self, response: index.Index, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[index.Index, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_index + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatastoreAdmin server but before it is returned to user code. + + We recommend only using this `post_get_index_with_metadata` + interceptor in new development instead of the `post_get_index` interceptor. + When both interceptors are used, this `post_get_index_with_metadata` interceptor runs after the + `post_get_index` interceptor. The (possibly modified) response returned by + `post_get_index` will be passed to + `post_get_index_with_metadata`. + """ + return response, metadata + def pre_import_entities( self, request: datastore_admin.ImportEntitiesRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[datastore_admin.ImportEntitiesRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore_admin.ImportEntitiesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for import_entities Override in a subclass to manipulate the request or metadata @@ -229,17 +338,42 @@ def post_import_entities( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_entities - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_entities_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DatastoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_import_entities` interceptor runs + before the `post_import_entities_with_metadata` interceptor. """ return response + def post_import_entities_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_entities + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatastoreAdmin server but before it is returned to user code. + + We recommend only using this `post_import_entities_with_metadata` + interceptor in new development instead of the `post_import_entities` interceptor. + When both interceptors are used, this `post_import_entities_with_metadata` interceptor runs after the + `post_import_entities` interceptor. The (possibly modified) response returned by + `post_import_entities` will be passed to + `post_import_entities_with_metadata`. + """ + return response, metadata + def pre_list_indexes( self, request: datastore_admin.ListIndexesRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[datastore_admin.ListIndexesRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore_admin.ListIndexesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_indexes Override in a subclass to manipulate the request or metadata @@ -252,17 +386,44 @@ def post_list_indexes( ) -> datastore_admin.ListIndexesResponse: """Post-rpc interceptor for list_indexes - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_indexes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DatastoreAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_indexes` interceptor runs + before the `post_list_indexes_with_metadata` interceptor. """ return response + def post_list_indexes_with_metadata( + self, + response: datastore_admin.ListIndexesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore_admin.ListIndexesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_indexes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatastoreAdmin server but before it is returned to user code. + + We recommend only using this `post_list_indexes_with_metadata` + interceptor in new development instead of the `post_list_indexes` interceptor. + When both interceptors are used, this `post_list_indexes_with_metadata` interceptor runs after the + `post_list_indexes` interceptor. The (possibly modified) response returned by + `post_list_indexes` will be passed to + `post_list_indexes_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -282,8 +443,10 @@ def post_cancel_operation(self, response: None) -> None: def pre_delete_operation( self, request: operations_pb2.DeleteOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -303,8 +466,10 @@ def post_delete_operation(self, response: None) -> None: def pre_get_operation( self, request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -326,8 +491,10 @@ def post_get_operation( def pre_list_operations( self, request: operations_pb2.ListOperationsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -574,7 +741,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the create index method over HTTP. @@ -585,8 +752,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -599,6 +768,7 @@ def __call__( http_options = ( _BaseDatastoreAdminRestTransport._BaseCreateIndex._get_http_options() ) + request, metadata = self._interceptor.pre_create_index(request, metadata) transcoded_request = _BaseDatastoreAdminRestTransport._BaseCreateIndex._get_transcoded_request( http_options, request @@ -613,6 +783,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.CreateIndex", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "CreateIndex", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._CreateIndex._get_response( self._host, @@ -632,7 +829,33 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_index(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_index_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore.admin_v1.DatastoreAdminClient.create_index", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "CreateIndex", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _DeleteIndex( @@ -669,7 +892,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the delete index method over HTTP. @@ -680,8 +903,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -694,6 +919,7 @@ def __call__( http_options = ( _BaseDatastoreAdminRestTransport._BaseDeleteIndex._get_http_options() ) + request, metadata = self._interceptor.pre_delete_index(request, metadata) transcoded_request = _BaseDatastoreAdminRestTransport._BaseDeleteIndex._get_transcoded_request( http_options, request @@ -704,6 +930,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.DeleteIndex", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "DeleteIndex", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._DeleteIndex._get_response( self._host, @@ -722,7 +975,33 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_index(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_index_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore.admin_v1.DatastoreAdminClient.delete_index", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "DeleteIndex", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ExportEntities( @@ -760,7 +1039,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the export entities method over HTTP. @@ -771,8 +1050,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -785,6 +1066,7 @@ def __call__( http_options = ( _BaseDatastoreAdminRestTransport._BaseExportEntities._get_http_options() ) + request, metadata = self._interceptor.pre_export_entities(request, metadata) transcoded_request = _BaseDatastoreAdminRestTransport._BaseExportEntities._get_transcoded_request( http_options, request @@ -799,6 +1081,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.ExportEntities", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "ExportEntities", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._ExportEntities._get_response( self._host, @@ -818,7 +1127,33 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_entities(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_entities_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore.admin_v1.DatastoreAdminClient.export_entities", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "ExportEntities", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetIndex( @@ -855,7 +1190,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> index.Index: r"""Call the get index method over HTTP. @@ -866,8 +1201,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.index.Index: @@ -877,6 +1214,7 @@ def __call__( http_options = ( _BaseDatastoreAdminRestTransport._BaseGetIndex._get_http_options() ) + request, metadata = self._interceptor.pre_get_index(request, metadata) transcoded_request = ( _BaseDatastoreAdminRestTransport._BaseGetIndex._get_transcoded_request( @@ -891,6 +1229,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.GetIndex", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "GetIndex", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._GetIndex._get_response( self._host, @@ -911,7 +1276,33 @@ def __call__( pb_resp = index.Index.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_index(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_index_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = index.Index.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore.admin_v1.DatastoreAdminClient.get_index", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "GetIndex", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ImportEntities( @@ -949,7 +1340,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the import entities method over HTTP. @@ -960,8 +1351,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -974,6 +1367,7 @@ def __call__( http_options = ( _BaseDatastoreAdminRestTransport._BaseImportEntities._get_http_options() ) + request, metadata = self._interceptor.pre_import_entities(request, metadata) transcoded_request = _BaseDatastoreAdminRestTransport._BaseImportEntities._get_transcoded_request( http_options, request @@ -988,6 +1382,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.ImportEntities", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "ImportEntities", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._ImportEntities._get_response( self._host, @@ -1007,7 +1428,33 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_entities(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_entities_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore.admin_v1.DatastoreAdminClient.import_entities", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "ImportEntities", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListIndexes( @@ -1044,7 +1491,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore_admin.ListIndexesResponse: r"""Call the list indexes method over HTTP. @@ -1055,8 +1502,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.datastore_admin.ListIndexesResponse: @@ -1068,6 +1517,7 @@ def __call__( http_options = ( _BaseDatastoreAdminRestTransport._BaseListIndexes._get_http_options() ) + request, metadata = self._interceptor.pre_list_indexes(request, metadata) transcoded_request = _BaseDatastoreAdminRestTransport._BaseListIndexes._get_transcoded_request( http_options, request @@ -1078,6 +1528,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.ListIndexes", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "ListIndexes", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._ListIndexes._get_response( self._host, @@ -1098,7 +1575,35 @@ def __call__( pb_resp = datastore_admin.ListIndexesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_indexes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_indexes_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datastore_admin.ListIndexesResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore.admin_v1.DatastoreAdminClient.list_indexes", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "ListIndexes", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -1187,7 +1692,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Call the cancel operation method over HTTP. @@ -1197,13 +1702,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BaseDatastoreAdminRestTransport._BaseCancelOperation._get_http_options() ) + request, metadata = self._interceptor.pre_cancel_operation( request, metadata ) @@ -1216,6 +1724,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.CancelOperation", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._CancelOperation._get_response( self._host, @@ -1271,7 +1806,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Call the delete operation method over HTTP. @@ -1281,13 +1816,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BaseDatastoreAdminRestTransport._BaseDeleteOperation._get_http_options() ) + request, metadata = self._interceptor.pre_delete_operation( request, metadata ) @@ -1300,6 +1838,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.DeleteOperation", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._DeleteOperation._get_response( self._host, @@ -1355,7 +1920,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. @@ -1365,8 +1930,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.Operation: Response from GetOperation method. @@ -1375,6 +1942,7 @@ def __call__( http_options = ( _BaseDatastoreAdminRestTransport._BaseGetOperation._get_http_options() ) + request, metadata = self._interceptor.pre_get_operation(request, metadata) transcoded_request = _BaseDatastoreAdminRestTransport._BaseGetOperation._get_transcoded_request( http_options, request @@ -1385,6 +1953,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.GetOperation", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._GetOperation._get_response( self._host, @@ -1404,6 +1999,27 @@ def __call__( resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore.admin_v1.DatastoreAdminAsyncClient.GetOperation", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -1444,7 +2060,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. @@ -1454,8 +2070,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.ListOperationsResponse: Response from ListOperations method. @@ -1464,6 +2082,7 @@ def __call__( http_options = ( _BaseDatastoreAdminRestTransport._BaseListOperations._get_http_options() ) + request, metadata = self._interceptor.pre_list_operations(request, metadata) transcoded_request = _BaseDatastoreAdminRestTransport._BaseListOperations._get_transcoded_request( http_options, request @@ -1474,6 +2093,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore.admin_v1.DatastoreAdminClient.ListOperations", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreAdminRestTransport._ListOperations._get_response( self._host, @@ -1493,6 +2139,27 @@ def __call__( resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore.admin_v1.DatastoreAdminAsyncClient.ListOperations", + extra={ + "serviceName": "google.datastore.admin.v1.DatastoreAdmin", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index fcef7a8bf8ce..88de0d08e474 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import ( @@ -54,6 +55,15 @@ from .transports.grpc_asyncio import DatastoreGrpcAsyncIOTransport from .client import DatastoreClient +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class DatastoreAsyncClient: """Each RPC normalizes the partition IDs of the keys in its @@ -258,6 +268,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.datastore_v1.DatastoreAsyncClient`.", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.datastore.v1.Datastore", + "credentialsType": None, + }, + ) + async def lookup( self, request: Optional[Union[datastore.LookupRequest, dict]] = None, @@ -267,7 +299,7 @@ async def lookup( keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.LookupResponse: r"""Looks up entities by key. @@ -323,8 +355,10 @@ async def sample_lookup(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.LookupResponse: @@ -335,7 +369,10 @@ async def sample_lookup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, read_options, keys]) + flattened_params = [project_id, read_options, keys] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -393,7 +430,7 @@ async def run_query( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.RunQueryResponse: r"""Queries for entities. @@ -430,8 +467,10 @@ async def sample_run_query(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.RunQueryResponse: @@ -484,7 +523,7 @@ async def run_aggregation_query( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.RunAggregationQueryResponse: r"""Runs an aggregation query. @@ -521,8 +560,10 @@ async def sample_run_aggregation_query(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.RunAggregationQueryResponse: @@ -576,7 +617,7 @@ async def begin_transaction( project_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.BeginTransactionResponse: r"""Begins a new transaction. @@ -620,8 +661,10 @@ async def sample_begin_transaction(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.BeginTransactionResponse: @@ -632,7 +675,10 @@ async def sample_begin_transaction(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id]) + flattened_params = [project_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -692,7 +738,7 @@ async def commit( mutations: Optional[MutableSequence[datastore.Mutation]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.CommitResponse: r"""Commits a transaction, optionally creating, deleting or modifying some entities. @@ -773,8 +819,10 @@ async def sample_commit(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.CommitResponse: @@ -785,7 +833,10 @@ async def sample_commit(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, mode, transaction, mutations]) + flattened_params = [project_id, mode, transaction, mutations] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -847,7 +898,7 @@ async def rollback( transaction: Optional[bytes] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.RollbackResponse: r"""Rolls back a transaction. @@ -900,8 +951,10 @@ async def sample_rollback(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.RollbackResponse: @@ -913,7 +966,10 @@ async def sample_rollback(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, transaction]) + flattened_params = [project_id, transaction] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -971,7 +1027,7 @@ async def allocate_ids( keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.AllocateIdsResponse: r"""Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. @@ -1025,8 +1081,10 @@ async def sample_allocate_ids(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.AllocateIdsResponse: @@ -1037,7 +1095,10 @@ async def sample_allocate_ids(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, keys]) + flattened_params = [project_id, keys] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1097,7 +1158,7 @@ async def reserve_ids( keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.ReserveIdsResponse: r"""Prevents the supplied keys' IDs from being auto-allocated by Cloud Datastore. @@ -1150,8 +1211,10 @@ async def sample_reserve_ids(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.ReserveIdsResponse: @@ -1162,7 +1225,10 @@ async def sample_reserve_ids(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, keys]) + flattened_params = [project_id, keys] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1220,7 +1286,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1231,8 +1297,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1273,7 +1341,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1284,8 +1352,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1326,7 +1396,7 @@ async def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1342,8 +1412,10 @@ async def delete_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1380,7 +1452,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1395,8 +1467,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index ea6fba235925..aad9d8202ba2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -14,6 +14,9 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging import os import re from typing import ( @@ -48,6 +51,15 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.cloud.datastore_v1.types import aggregation_result from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity @@ -448,52 +460,45 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. Returns: - bool: True iff client_universe matches the universe in credentials. + bool: True iff the configured universe domain is valid. Raises: - ValueError: when client_universe does not match the universe in credentials. + ValueError: If the configured universe domain is not valid. """ - default_universe = DatastoreClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) + # NOTE (b/349488459): universe validation is disabled until further notice. return True - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. - Raises: - ValueError: If the configured universe domain is not valid. + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DatastoreClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) @property def api_endpoint(self): @@ -599,6 +604,10 @@ def __init__( # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( @@ -661,6 +670,29 @@ def __init__( api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.datastore_v1.DatastoreClient`.", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.datastore.v1.Datastore", + "credentialsType": None, + }, + ) + def lookup( self, request: Optional[Union[datastore.LookupRequest, dict]] = None, @@ -670,7 +702,7 @@ def lookup( keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.LookupResponse: r"""Looks up entities by key. @@ -726,8 +758,10 @@ def sample_lookup(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.LookupResponse: @@ -738,7 +772,10 @@ def sample_lookup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, read_options, keys]) + flattened_params = [project_id, read_options, keys] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -795,7 +832,7 @@ def run_query( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.RunQueryResponse: r"""Queries for entities. @@ -832,8 +869,10 @@ def sample_run_query(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.RunQueryResponse: @@ -884,7 +923,7 @@ def run_aggregation_query( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.RunAggregationQueryResponse: r"""Runs an aggregation query. @@ -921,8 +960,10 @@ def sample_run_aggregation_query(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.RunAggregationQueryResponse: @@ -974,7 +1015,7 @@ def begin_transaction( project_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.BeginTransactionResponse: r"""Begins a new transaction. @@ -1018,8 +1059,10 @@ def sample_begin_transaction(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.BeginTransactionResponse: @@ -1030,7 +1073,10 @@ def sample_begin_transaction(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id]) + flattened_params = [project_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1087,7 +1133,7 @@ def commit( mutations: Optional[MutableSequence[datastore.Mutation]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.CommitResponse: r"""Commits a transaction, optionally creating, deleting or modifying some entities. @@ -1168,8 +1214,10 @@ def sample_commit(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.CommitResponse: @@ -1180,7 +1228,10 @@ def sample_commit(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, mode, transaction, mutations]) + flattened_params = [project_id, mode, transaction, mutations] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1241,7 +1292,7 @@ def rollback( transaction: Optional[bytes] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.RollbackResponse: r"""Rolls back a transaction. @@ -1294,8 +1345,10 @@ def sample_rollback(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.RollbackResponse: @@ -1307,7 +1360,10 @@ def sample_rollback(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, transaction]) + flattened_params = [project_id, transaction] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1364,7 +1420,7 @@ def allocate_ids( keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.AllocateIdsResponse: r"""Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. @@ -1418,8 +1474,10 @@ def sample_allocate_ids(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.AllocateIdsResponse: @@ -1430,7 +1488,10 @@ def sample_allocate_ids(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, keys]) + flattened_params = [project_id, keys] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1487,7 +1548,7 @@ def reserve_ids( keys: Optional[MutableSequence[entity.Key]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.ReserveIdsResponse: r"""Prevents the supplied keys' IDs from being auto-allocated by Cloud Datastore. @@ -1540,8 +1601,10 @@ def sample_reserve_ids(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.datastore_v1.types.ReserveIdsResponse: @@ -1552,7 +1615,10 @@ def sample_reserve_ids(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, keys]) + flattened_params = [project_id, keys] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1620,7 +1686,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1631,8 +1697,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1656,16 +1724,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1673,7 +1745,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1684,8 +1756,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1709,16 +1783,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1726,7 +1804,7 @@ def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1742,8 +1820,10 @@ def delete_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1780,7 +1860,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1795,8 +1875,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py index 620576e29f89..6306c21903d2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,13 +24,91 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.datastore_v1.types import datastore from google.longrunning import operations_pb2 # type: ignore from .base import DatastoreTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class DatastoreGrpcTransport(DatastoreTransport): """gRPC backend transport for Datastore. @@ -187,7 +268,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -259,7 +345,7 @@ def lookup(self) -> Callable[[datastore.LookupRequest], datastore.LookupResponse # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "lookup" not in self._stubs: - self._stubs["lookup"] = self.grpc_channel.unary_unary( + self._stubs["lookup"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/Lookup", request_serializer=datastore.LookupRequest.serialize, response_deserializer=datastore.LookupResponse.deserialize, @@ -285,7 +371,7 @@ def run_query( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "run_query" not in self._stubs: - self._stubs["run_query"] = self.grpc_channel.unary_unary( + self._stubs["run_query"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/RunQuery", request_serializer=datastore.RunQueryRequest.serialize, response_deserializer=datastore.RunQueryResponse.deserialize, @@ -313,7 +399,7 @@ def run_aggregation_query( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "run_aggregation_query" not in self._stubs: - self._stubs["run_aggregation_query"] = self.grpc_channel.unary_unary( + self._stubs["run_aggregation_query"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/RunAggregationQuery", request_serializer=datastore.RunAggregationQueryRequest.serialize, response_deserializer=datastore.RunAggregationQueryResponse.deserialize, @@ -341,7 +427,7 @@ def begin_transaction( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "begin_transaction" not in self._stubs: - self._stubs["begin_transaction"] = self.grpc_channel.unary_unary( + self._stubs["begin_transaction"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/BeginTransaction", request_serializer=datastore.BeginTransactionRequest.serialize, response_deserializer=datastore.BeginTransactionResponse.deserialize, @@ -366,7 +452,7 @@ def commit(self) -> Callable[[datastore.CommitRequest], datastore.CommitResponse # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "commit" not in self._stubs: - self._stubs["commit"] = self.grpc_channel.unary_unary( + self._stubs["commit"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/Commit", request_serializer=datastore.CommitRequest.serialize, response_deserializer=datastore.CommitResponse.deserialize, @@ -392,7 +478,7 @@ def rollback( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "rollback" not in self._stubs: - self._stubs["rollback"] = self.grpc_channel.unary_unary( + self._stubs["rollback"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/Rollback", request_serializer=datastore.RollbackRequest.serialize, response_deserializer=datastore.RollbackResponse.deserialize, @@ -419,7 +505,7 @@ def allocate_ids( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "allocate_ids" not in self._stubs: - self._stubs["allocate_ids"] = self.grpc_channel.unary_unary( + self._stubs["allocate_ids"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/AllocateIds", request_serializer=datastore.AllocateIdsRequest.serialize, response_deserializer=datastore.AllocateIdsResponse.deserialize, @@ -446,7 +532,7 @@ def reserve_ids( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "reserve_ids" not in self._stubs: - self._stubs["reserve_ids"] = self.grpc_channel.unary_unary( + self._stubs["reserve_ids"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/ReserveIds", request_serializer=datastore.ReserveIdsRequest.serialize, response_deserializer=datastore.ReserveIdsResponse.deserialize, @@ -454,7 +540,7 @@ def reserve_ids( return self._stubs["reserve_ids"] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def delete_operation( @@ -466,7 +552,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -483,7 +569,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -500,7 +586,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -519,7 +605,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index 4d943696f1c7..aba046cb0cb1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -23,8 +26,11 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.datastore_v1.types import datastore @@ -32,6 +38,82 @@ from .base import DatastoreTransport, DEFAULT_CLIENT_INFO from .grpc import DatastoreGrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class DatastoreGrpcAsyncIOTransport(DatastoreTransport): """gRPC AsyncIO backend transport for Datastore. @@ -234,10 +316,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = ( "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -269,7 +354,7 @@ def lookup( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "lookup" not in self._stubs: - self._stubs["lookup"] = self.grpc_channel.unary_unary( + self._stubs["lookup"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/Lookup", request_serializer=datastore.LookupRequest.serialize, response_deserializer=datastore.LookupResponse.deserialize, @@ -295,7 +380,7 @@ def run_query( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "run_query" not in self._stubs: - self._stubs["run_query"] = self.grpc_channel.unary_unary( + self._stubs["run_query"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/RunQuery", request_serializer=datastore.RunQueryRequest.serialize, response_deserializer=datastore.RunQueryResponse.deserialize, @@ -324,7 +409,7 @@ def run_aggregation_query( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "run_aggregation_query" not in self._stubs: - self._stubs["run_aggregation_query"] = self.grpc_channel.unary_unary( + self._stubs["run_aggregation_query"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/RunAggregationQuery", request_serializer=datastore.RunAggregationQueryRequest.serialize, response_deserializer=datastore.RunAggregationQueryResponse.deserialize, @@ -353,7 +438,7 @@ def begin_transaction( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "begin_transaction" not in self._stubs: - self._stubs["begin_transaction"] = self.grpc_channel.unary_unary( + self._stubs["begin_transaction"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/BeginTransaction", request_serializer=datastore.BeginTransactionRequest.serialize, response_deserializer=datastore.BeginTransactionResponse.deserialize, @@ -380,7 +465,7 @@ def commit( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "commit" not in self._stubs: - self._stubs["commit"] = self.grpc_channel.unary_unary( + self._stubs["commit"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/Commit", request_serializer=datastore.CommitRequest.serialize, response_deserializer=datastore.CommitResponse.deserialize, @@ -406,7 +491,7 @@ def rollback( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "rollback" not in self._stubs: - self._stubs["rollback"] = self.grpc_channel.unary_unary( + self._stubs["rollback"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/Rollback", request_serializer=datastore.RollbackRequest.serialize, response_deserializer=datastore.RollbackResponse.deserialize, @@ -435,7 +520,7 @@ def allocate_ids( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "allocate_ids" not in self._stubs: - self._stubs["allocate_ids"] = self.grpc_channel.unary_unary( + self._stubs["allocate_ids"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/AllocateIds", request_serializer=datastore.AllocateIdsRequest.serialize, response_deserializer=datastore.AllocateIdsResponse.deserialize, @@ -464,7 +549,7 @@ def reserve_ids( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "reserve_ids" not in self._stubs: - self._stubs["reserve_ids"] = self.grpc_channel.unary_unary( + self._stubs["reserve_ids"] = self._logged_channel.unary_unary( "/google.datastore.v1.Datastore/ReserveIds", request_serializer=datastore.ReserveIdsRequest.serialize, response_deserializer=datastore.ReserveIdsResponse.deserialize, @@ -582,7 +667,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -598,7 +683,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -615,7 +700,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -632,7 +717,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -651,7 +736,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py index abb7d45c02a5..5bd89407bb7c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries @@ -43,11 +44,19 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) @@ -137,8 +146,10 @@ def post_run_query(self, response): """ def pre_allocate_ids( - self, request: datastore.AllocateIdsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[datastore.AllocateIdsRequest, Sequence[Tuple[str, str]]]: + self, + request: datastore.AllocateIdsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.AllocateIdsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for allocate_ids Override in a subclass to manipulate the request or metadata @@ -151,17 +162,42 @@ def post_allocate_ids( ) -> datastore.AllocateIdsResponse: """Post-rpc interceptor for allocate_ids - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_allocate_ids_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastore server but before - it is returned to user code. + it is returned to user code. This `post_allocate_ids` interceptor runs + before the `post_allocate_ids_with_metadata` interceptor. """ return response + def post_allocate_ids_with_metadata( + self, + response: datastore.AllocateIdsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.AllocateIdsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for allocate_ids + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastore server but before it is returned to user code. + + We recommend only using this `post_allocate_ids_with_metadata` + interceptor in new development instead of the `post_allocate_ids` interceptor. + When both interceptors are used, this `post_allocate_ids_with_metadata` interceptor runs after the + `post_allocate_ids` interceptor. The (possibly modified) response returned by + `post_allocate_ids` will be passed to + `post_allocate_ids_with_metadata`. + """ + return response, metadata + def pre_begin_transaction( self, request: datastore.BeginTransactionRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[datastore.BeginTransactionRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore.BeginTransactionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for begin_transaction Override in a subclass to manipulate the request or metadata @@ -174,15 +210,42 @@ def post_begin_transaction( ) -> datastore.BeginTransactionResponse: """Post-rpc interceptor for begin_transaction - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_begin_transaction_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastore server but before - it is returned to user code. + it is returned to user code. This `post_begin_transaction` interceptor runs + before the `post_begin_transaction_with_metadata` interceptor. """ return response + def post_begin_transaction_with_metadata( + self, + response: datastore.BeginTransactionResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore.BeginTransactionResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for begin_transaction + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastore server but before it is returned to user code. + + We recommend only using this `post_begin_transaction_with_metadata` + interceptor in new development instead of the `post_begin_transaction` interceptor. + When both interceptors are used, this `post_begin_transaction_with_metadata` interceptor runs after the + `post_begin_transaction` interceptor. The (possibly modified) response returned by + `post_begin_transaction` will be passed to + `post_begin_transaction_with_metadata`. + """ + return response, metadata + def pre_commit( - self, request: datastore.CommitRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[datastore.CommitRequest, Sequence[Tuple[str, str]]]: + self, + request: datastore.CommitRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.CommitRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for commit Override in a subclass to manipulate the request or metadata @@ -195,15 +258,40 @@ def post_commit( ) -> datastore.CommitResponse: """Post-rpc interceptor for commit - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_commit_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastore server but before - it is returned to user code. + it is returned to user code. This `post_commit` interceptor runs + before the `post_commit_with_metadata` interceptor. """ return response + def post_commit_with_metadata( + self, + response: datastore.CommitResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.CommitResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for commit + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastore server but before it is returned to user code. + + We recommend only using this `post_commit_with_metadata` + interceptor in new development instead of the `post_commit` interceptor. + When both interceptors are used, this `post_commit_with_metadata` interceptor runs after the + `post_commit` interceptor. The (possibly modified) response returned by + `post_commit` will be passed to + `post_commit_with_metadata`. + """ + return response, metadata + def pre_lookup( - self, request: datastore.LookupRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[datastore.LookupRequest, Sequence[Tuple[str, str]]]: + self, + request: datastore.LookupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.LookupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for lookup Override in a subclass to manipulate the request or metadata @@ -216,15 +304,40 @@ def post_lookup( ) -> datastore.LookupResponse: """Post-rpc interceptor for lookup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_lookup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastore server but before - it is returned to user code. + it is returned to user code. This `post_lookup` interceptor runs + before the `post_lookup_with_metadata` interceptor. """ return response + def post_lookup_with_metadata( + self, + response: datastore.LookupResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.LookupResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for lookup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastore server but before it is returned to user code. + + We recommend only using this `post_lookup_with_metadata` + interceptor in new development instead of the `post_lookup` interceptor. + When both interceptors are used, this `post_lookup_with_metadata` interceptor runs after the + `post_lookup` interceptor. The (possibly modified) response returned by + `post_lookup` will be passed to + `post_lookup_with_metadata`. + """ + return response, metadata + def pre_reserve_ids( - self, request: datastore.ReserveIdsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[datastore.ReserveIdsRequest, Sequence[Tuple[str, str]]]: + self, + request: datastore.ReserveIdsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.ReserveIdsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for reserve_ids Override in a subclass to manipulate the request or metadata @@ -237,15 +350,40 @@ def post_reserve_ids( ) -> datastore.ReserveIdsResponse: """Post-rpc interceptor for reserve_ids - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_reserve_ids_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastore server but before - it is returned to user code. + it is returned to user code. This `post_reserve_ids` interceptor runs + before the `post_reserve_ids_with_metadata` interceptor. """ return response + def post_reserve_ids_with_metadata( + self, + response: datastore.ReserveIdsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.ReserveIdsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for reserve_ids + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastore server but before it is returned to user code. + + We recommend only using this `post_reserve_ids_with_metadata` + interceptor in new development instead of the `post_reserve_ids` interceptor. + When both interceptors are used, this `post_reserve_ids_with_metadata` interceptor runs after the + `post_reserve_ids` interceptor. The (possibly modified) response returned by + `post_reserve_ids` will be passed to + `post_reserve_ids_with_metadata`. + """ + return response, metadata + def pre_rollback( - self, request: datastore.RollbackRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[datastore.RollbackRequest, Sequence[Tuple[str, str]]]: + self, + request: datastore.RollbackRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.RollbackRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for rollback Override in a subclass to manipulate the request or metadata @@ -258,17 +396,42 @@ def post_rollback( ) -> datastore.RollbackResponse: """Post-rpc interceptor for rollback - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_rollback_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastore server but before - it is returned to user code. + it is returned to user code. This `post_rollback` interceptor runs + before the `post_rollback_with_metadata` interceptor. """ return response + def post_rollback_with_metadata( + self, + response: datastore.RollbackResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.RollbackResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for rollback + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastore server but before it is returned to user code. + + We recommend only using this `post_rollback_with_metadata` + interceptor in new development instead of the `post_rollback` interceptor. + When both interceptors are used, this `post_rollback_with_metadata` interceptor runs after the + `post_rollback` interceptor. The (possibly modified) response returned by + `post_rollback` will be passed to + `post_rollback_with_metadata`. + """ + return response, metadata + def pre_run_aggregation_query( self, request: datastore.RunAggregationQueryRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[datastore.RunAggregationQueryRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore.RunAggregationQueryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for run_aggregation_query Override in a subclass to manipulate the request or metadata @@ -281,15 +444,42 @@ def post_run_aggregation_query( ) -> datastore.RunAggregationQueryResponse: """Post-rpc interceptor for run_aggregation_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_aggregation_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastore server but before - it is returned to user code. + it is returned to user code. This `post_run_aggregation_query` interceptor runs + before the `post_run_aggregation_query_with_metadata` interceptor. """ return response + def post_run_aggregation_query_with_metadata( + self, + response: datastore.RunAggregationQueryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastore.RunAggregationQueryResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for run_aggregation_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastore server but before it is returned to user code. + + We recommend only using this `post_run_aggregation_query_with_metadata` + interceptor in new development instead of the `post_run_aggregation_query` interceptor. + When both interceptors are used, this `post_run_aggregation_query_with_metadata` interceptor runs after the + `post_run_aggregation_query` interceptor. The (possibly modified) response returned by + `post_run_aggregation_query` will be passed to + `post_run_aggregation_query_with_metadata`. + """ + return response, metadata + def pre_run_query( - self, request: datastore.RunQueryRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[datastore.RunQueryRequest, Sequence[Tuple[str, str]]]: + self, + request: datastore.RunQueryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.RunQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for run_query Override in a subclass to manipulate the request or metadata @@ -302,17 +492,42 @@ def post_run_query( ) -> datastore.RunQueryResponse: """Post-rpc interceptor for run_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastore server but before - it is returned to user code. + it is returned to user code. This `post_run_query` interceptor runs + before the `post_run_query_with_metadata` interceptor. """ return response + def post_run_query_with_metadata( + self, + response: datastore.RunQueryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastore.RunQueryResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for run_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastore server but before it is returned to user code. + + We recommend only using this `post_run_query_with_metadata` + interceptor in new development instead of the `post_run_query` interceptor. + When both interceptors are used, this `post_run_query_with_metadata` interceptor runs after the + `post_run_query` interceptor. The (possibly modified) response returned by + `post_run_query` will be passed to + `post_run_query_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -332,8 +547,10 @@ def post_cancel_operation(self, response: None) -> None: def pre_delete_operation( self, request: operations_pb2.DeleteOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -353,8 +570,10 @@ def post_delete_operation(self, response: None) -> None: def pre_get_operation( self, request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -376,8 +595,10 @@ def post_get_operation( def pre_list_operations( self, request: operations_pb2.ListOperationsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -522,7 +743,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.AllocateIdsResponse: r"""Call the allocate ids method over HTTP. @@ -533,8 +754,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.datastore.AllocateIdsResponse: @@ -546,6 +769,7 @@ def __call__( http_options = ( _BaseDatastoreRestTransport._BaseAllocateIds._get_http_options() ) + request, metadata = self._interceptor.pre_allocate_ids(request, metadata) transcoded_request = ( _BaseDatastoreRestTransport._BaseAllocateIds._get_transcoded_request( @@ -564,6 +788,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.AllocateIds", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "AllocateIds", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._AllocateIds._get_response( self._host, @@ -585,7 +836,33 @@ def __call__( pb_resp = datastore.AllocateIdsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_allocate_ids(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_allocate_ids_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datastore.AllocateIdsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreClient.allocate_ids", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "AllocateIds", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _BeginTransaction( @@ -623,7 +900,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.BeginTransactionResponse: r"""Call the begin transaction method over HTTP. @@ -634,8 +911,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.datastore.BeginTransactionResponse: @@ -647,6 +926,7 @@ def __call__( http_options = ( _BaseDatastoreRestTransport._BaseBeginTransaction._get_http_options() ) + request, metadata = self._interceptor.pre_begin_transaction( request, metadata ) @@ -663,6 +943,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.BeginTransaction", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "BeginTransaction", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._BeginTransaction._get_response( self._host, @@ -684,7 +991,35 @@ def __call__( pb_resp = datastore.BeginTransactionResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_begin_transaction(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_begin_transaction_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datastore.BeginTransactionResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreClient.begin_transaction", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "BeginTransaction", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _Commit(_BaseDatastoreRestTransport._BaseCommit, DatastoreRestStub): @@ -720,7 +1055,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.CommitResponse: r"""Call the commit method over HTTP. @@ -731,8 +1066,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.datastore.CommitResponse: @@ -742,6 +1079,7 @@ def __call__( """ http_options = _BaseDatastoreRestTransport._BaseCommit._get_http_options() + request, metadata = self._interceptor.pre_commit(request, metadata) transcoded_request = ( _BaseDatastoreRestTransport._BaseCommit._get_transcoded_request( @@ -760,6 +1098,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.Commit", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "Commit", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._Commit._get_response( self._host, @@ -781,7 +1146,33 @@ def __call__( pb_resp = datastore.CommitResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_commit(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_commit_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datastore.CommitResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreClient.commit", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "Commit", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _Lookup(_BaseDatastoreRestTransport._BaseLookup, DatastoreRestStub): @@ -817,7 +1208,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.LookupResponse: r"""Call the lookup method over HTTP. @@ -828,8 +1219,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.datastore.LookupResponse: @@ -839,6 +1232,7 @@ def __call__( """ http_options = _BaseDatastoreRestTransport._BaseLookup._get_http_options() + request, metadata = self._interceptor.pre_lookup(request, metadata) transcoded_request = ( _BaseDatastoreRestTransport._BaseLookup._get_transcoded_request( @@ -857,6 +1251,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.Lookup", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "Lookup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._Lookup._get_response( self._host, @@ -878,7 +1299,33 @@ def __call__( pb_resp = datastore.LookupResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_lookup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_lookup_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datastore.LookupResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreClient.lookup", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "Lookup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ReserveIds(_BaseDatastoreRestTransport._BaseReserveIds, DatastoreRestStub): @@ -914,7 +1361,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.ReserveIdsResponse: r"""Call the reserve ids method over HTTP. @@ -925,8 +1372,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.datastore.ReserveIdsResponse: @@ -938,6 +1387,7 @@ def __call__( http_options = ( _BaseDatastoreRestTransport._BaseReserveIds._get_http_options() ) + request, metadata = self._interceptor.pre_reserve_ids(request, metadata) transcoded_request = ( _BaseDatastoreRestTransport._BaseReserveIds._get_transcoded_request( @@ -956,6 +1406,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.ReserveIds", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "ReserveIds", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._ReserveIds._get_response( self._host, @@ -977,7 +1454,33 @@ def __call__( pb_resp = datastore.ReserveIdsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reserve_ids(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_reserve_ids_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datastore.ReserveIdsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreClient.reserve_ids", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "ReserveIds", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _Rollback(_BaseDatastoreRestTransport._BaseRollback, DatastoreRestStub): @@ -1013,7 +1516,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.RollbackResponse: r"""Call the rollback method over HTTP. @@ -1024,8 +1527,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.datastore.RollbackResponse: @@ -1036,6 +1541,7 @@ def __call__( """ http_options = _BaseDatastoreRestTransport._BaseRollback._get_http_options() + request, metadata = self._interceptor.pre_rollback(request, metadata) transcoded_request = ( _BaseDatastoreRestTransport._BaseRollback._get_transcoded_request( @@ -1054,6 +1560,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.Rollback", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "Rollback", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._Rollback._get_response( self._host, @@ -1075,7 +1608,33 @@ def __call__( pb_resp = datastore.RollbackResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_rollback(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_rollback_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datastore.RollbackResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreClient.rollback", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "Rollback", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _RunAggregationQuery( @@ -1113,7 +1672,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.RunAggregationQueryResponse: r"""Call the run aggregation query method over HTTP. @@ -1124,8 +1683,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.datastore.RunAggregationQueryResponse: @@ -1137,6 +1698,7 @@ def __call__( http_options = ( _BaseDatastoreRestTransport._BaseRunAggregationQuery._get_http_options() ) + request, metadata = self._interceptor.pre_run_aggregation_query( request, metadata ) @@ -1153,6 +1715,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.RunAggregationQuery", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "RunAggregationQuery", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._RunAggregationQuery._get_response( self._host, @@ -1174,7 +1763,35 @@ def __call__( pb_resp = datastore.RunAggregationQueryResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_aggregation_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_aggregation_query_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datastore.RunAggregationQueryResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreClient.run_aggregation_query", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "RunAggregationQuery", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _RunQuery(_BaseDatastoreRestTransport._BaseRunQuery, DatastoreRestStub): @@ -1210,7 +1827,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> datastore.RunQueryResponse: r"""Call the run query method over HTTP. @@ -1221,8 +1838,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.datastore.RunQueryResponse: @@ -1232,6 +1851,7 @@ def __call__( """ http_options = _BaseDatastoreRestTransport._BaseRunQuery._get_http_options() + request, metadata = self._interceptor.pre_run_query(request, metadata) transcoded_request = ( _BaseDatastoreRestTransport._BaseRunQuery._get_transcoded_request( @@ -1250,6 +1870,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.RunQuery", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "RunQuery", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._RunQuery._get_response( self._host, @@ -1271,7 +1918,33 @@ def __call__( pb_resp = datastore.RunQueryResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_query_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datastore.RunQueryResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreClient.run_query", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "RunQuery", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -1376,7 +2049,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Call the cancel operation method over HTTP. @@ -1386,13 +2059,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BaseDatastoreRestTransport._BaseCancelOperation._get_http_options() ) + request, metadata = self._interceptor.pre_cancel_operation( request, metadata ) @@ -1407,6 +2083,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.CancelOperation", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._CancelOperation._get_response( self._host, @@ -1462,7 +2165,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Call the delete operation method over HTTP. @@ -1472,13 +2175,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BaseDatastoreRestTransport._BaseDeleteOperation._get_http_options() ) + request, metadata = self._interceptor.pre_delete_operation( request, metadata ) @@ -1493,6 +2199,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.DeleteOperation", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._DeleteOperation._get_response( self._host, @@ -1548,7 +2281,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. @@ -1558,8 +2291,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.Operation: Response from GetOperation method. @@ -1568,6 +2303,7 @@ def __call__( http_options = ( _BaseDatastoreRestTransport._BaseGetOperation._get_http_options() ) + request, metadata = self._interceptor.pre_get_operation(request, metadata) transcoded_request = ( _BaseDatastoreRestTransport._BaseGetOperation._get_transcoded_request( @@ -1582,6 +2318,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.GetOperation", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._GetOperation._get_response( self._host, @@ -1601,6 +2364,27 @@ def __call__( resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreAsyncClient.GetOperation", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -1641,7 +2425,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. @@ -1651,8 +2435,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.ListOperationsResponse: Response from ListOperations method. @@ -1661,6 +2447,7 @@ def __call__( http_options = ( _BaseDatastoreRestTransport._BaseListOperations._get_http_options() ) + request, metadata = self._interceptor.pre_list_operations(request, metadata) transcoded_request = ( _BaseDatastoreRestTransport._BaseListOperations._get_transcoded_request( @@ -1675,6 +2462,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.datastore_v1.DatastoreClient.ListOperations", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = DatastoreRestTransport._ListOperations._get_response( self._host, @@ -1694,6 +2508,27 @@ def __call__( resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.datastore_v1.DatastoreAsyncClient.ListOperations", + extra={ + "serviceName": "google.datastore.v1.Datastore", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index f880423cbf60..f05f7e4c405c 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -73,6 +73,14 @@ import google.auth +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER chunk = data[i : i + chunk_size] @@ -331,83 +339,46 @@ def test__get_universe_domain(): @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "error_code,cred_info_json,show_cred_info", [ - (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport, "grpc"), - (DatastoreAdminClient, transports.DatastoreAdminRestTransport, "rest"), + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), ], ) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DatastoreAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DatastoreAdminClient(credentials=cred) + client._transport._credentials = cred - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) + client._add_cred_info_for_auth_errors(error) + assert error.details == [] @pytest.mark.parametrize( @@ -3171,6 +3142,7 @@ def test_export_entities_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.export_entities(request) @@ -3225,6 +3197,7 @@ def test_export_entities_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.export_entities(**mock_args) @@ -3363,6 +3336,7 @@ def test_import_entities_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.import_entities(request) @@ -3417,6 +3391,7 @@ def test_import_entities_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.import_entities(**mock_args) @@ -4094,6 +4069,7 @@ def test_export_entities_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.export_entities(request) @@ -4124,6 +4100,7 @@ def test_export_entities_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.export_entities(request) # Establish that the response is the type that we expect. @@ -4149,10 +4126,13 @@ def test_export_entities_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastoreAdminRestInterceptor, "post_export_entities" ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_export_entities_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreAdminRestInterceptor, "pre_export_entities" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore_admin.ExportEntitiesRequest.pb( datastore_admin.ExportEntitiesRequest() ) @@ -4165,6 +4145,7 @@ def test_export_entities_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -4175,6 +4156,7 @@ def test_export_entities_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_entities( request, @@ -4186,6 +4168,7 @@ def test_export_entities_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_entities_rest_bad_request( @@ -4209,6 +4192,7 @@ def test_import_entities_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.import_entities(request) @@ -4239,6 +4223,7 @@ def test_import_entities_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.import_entities(request) # Establish that the response is the type that we expect. @@ -4264,10 +4249,13 @@ def test_import_entities_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastoreAdminRestInterceptor, "post_import_entities" ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_import_entities_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreAdminRestInterceptor, "pre_import_entities" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore_admin.ImportEntitiesRequest.pb( datastore_admin.ImportEntitiesRequest() ) @@ -4280,6 +4268,7 @@ def test_import_entities_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -4290,6 +4279,7 @@ def test_import_entities_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_entities( request, @@ -4301,6 +4291,7 @@ def test_import_entities_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_index_rest_bad_request(request_type=datastore_admin.CreateIndexRequest): @@ -4322,6 +4313,7 @@ def test_create_index_rest_bad_request(request_type=datastore_admin.CreateIndexR response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_index(request) @@ -4427,6 +4419,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_index(request) # Establish that the response is the type that we expect. @@ -4452,10 +4445,13 @@ def test_create_index_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastoreAdminRestInterceptor, "post_create_index" ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_create_index_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreAdminRestInterceptor, "pre_create_index" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore_admin.CreateIndexRequest.pb( datastore_admin.CreateIndexRequest() ) @@ -4468,6 +4464,7 @@ def test_create_index_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -4478,6 +4475,7 @@ def test_create_index_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_index( request, @@ -4489,6 +4487,7 @@ def test_create_index_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_index_rest_bad_request(request_type=datastore_admin.DeleteIndexRequest): @@ -4510,6 +4509,7 @@ def test_delete_index_rest_bad_request(request_type=datastore_admin.DeleteIndexR response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_index(request) @@ -4540,6 +4540,7 @@ def test_delete_index_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_index(request) # Establish that the response is the type that we expect. @@ -4565,10 +4566,13 @@ def test_delete_index_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastoreAdminRestInterceptor, "post_delete_index" ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_delete_index_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreAdminRestInterceptor, "pre_delete_index" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore_admin.DeleteIndexRequest.pb( datastore_admin.DeleteIndexRequest() ) @@ -4581,6 +4585,7 @@ def test_delete_index_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -4591,6 +4596,7 @@ def test_delete_index_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_index( request, @@ -4602,6 +4608,7 @@ def test_delete_index_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_index_rest_bad_request(request_type=datastore_admin.GetIndexRequest): @@ -4623,6 +4630,7 @@ def test_get_index_rest_bad_request(request_type=datastore_admin.GetIndexRequest response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_index(request) @@ -4662,6 +4670,7 @@ def test_get_index_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_index(request) # Establish that the response is the type that we expect. @@ -4690,10 +4699,13 @@ def test_get_index_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreAdminRestInterceptor, "post_get_index" ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_get_index_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreAdminRestInterceptor, "pre_get_index" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore_admin.GetIndexRequest.pb( datastore_admin.GetIndexRequest() ) @@ -4706,6 +4718,7 @@ def test_get_index_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = index.Index.to_json(index.Index()) req.return_value.content = return_value @@ -4716,6 +4729,7 @@ def test_get_index_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = index.Index() + post_with_metadata.return_value = index.Index(), metadata client.get_index( request, @@ -4727,6 +4741,7 @@ def test_get_index_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_indexes_rest_bad_request(request_type=datastore_admin.ListIndexesRequest): @@ -4748,6 +4763,7 @@ def test_list_indexes_rest_bad_request(request_type=datastore_admin.ListIndexesR response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_indexes(request) @@ -4783,6 +4799,7 @@ def test_list_indexes_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_indexes(request) # Establish that the response is the type that we expect. @@ -4807,10 +4824,13 @@ def test_list_indexes_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreAdminRestInterceptor, "post_list_indexes" ) as post, mock.patch.object( + transports.DatastoreAdminRestInterceptor, "post_list_indexes_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreAdminRestInterceptor, "pre_list_indexes" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore_admin.ListIndexesRequest.pb( datastore_admin.ListIndexesRequest() ) @@ -4823,6 +4843,7 @@ def test_list_indexes_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = datastore_admin.ListIndexesResponse.to_json( datastore_admin.ListIndexesResponse() ) @@ -4835,6 +4856,10 @@ def test_list_indexes_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastore_admin.ListIndexesResponse() + post_with_metadata.return_value = ( + datastore_admin.ListIndexesResponse(), + metadata, + ) client.list_indexes( request, @@ -4846,6 +4871,7 @@ def test_list_indexes_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( @@ -4871,6 +4897,7 @@ def test_cancel_operation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.cancel_operation(request) @@ -4901,6 +4928,7 @@ def test_cancel_operation_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.cancel_operation(request) @@ -4931,6 +4959,7 @@ def test_delete_operation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_operation(request) @@ -4961,6 +4990,7 @@ def test_delete_operation_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_operation(request) @@ -4991,6 +5021,7 @@ def test_get_operation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_operation(request) @@ -5021,6 +5052,7 @@ def test_get_operation_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_operation(request) @@ -5049,6 +5081,7 @@ def test_list_operations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_operations(request) @@ -5079,6 +5112,7 @@ def test_list_operations_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_operations(request) diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 8a28ba749b81..7f0755a88868 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -70,6 +70,14 @@ import google.auth +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER chunk = data[i : i + chunk_size] @@ -303,83 +311,46 @@ def test__get_universe_domain(): @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "error_code,cred_info_json,show_cred_info", [ - (DatastoreClient, transports.DatastoreGrpcTransport, "grpc"), - (DatastoreClient, transports.DatastoreRestTransport, "rest"), + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), ], ) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DatastoreClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DatastoreClient(credentials=cred) + client._transport._credentials = cred - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) + client._add_cred_info_for_auth_errors(error) + assert error.details == [] @pytest.mark.parametrize( @@ -3357,6 +3328,7 @@ def test_lookup_rest_required_fields(request_type=datastore.LookupRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.lookup(request) @@ -3418,6 +3390,7 @@ def test_lookup_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.lookup(**mock_args) @@ -3554,6 +3527,7 @@ def test_run_query_rest_required_fields(request_type=datastore.RunQueryRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.run_query(request) @@ -3679,6 +3653,7 @@ def test_run_aggregation_query_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.run_aggregation_query(request) @@ -3801,6 +3776,7 @@ def test_begin_transaction_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.begin_transaction(request) @@ -3846,6 +3822,7 @@ def test_begin_transaction_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.begin_transaction(**mock_args) @@ -3975,6 +3952,7 @@ def test_commit_rest_required_fields(request_type=datastore.CommitRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.commit(request) @@ -4032,6 +4010,7 @@ def test_commit_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.commit(**mock_args) @@ -4177,6 +4156,7 @@ def test_rollback_rest_required_fields(request_type=datastore.RollbackRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.rollback(request) @@ -4231,6 +4211,7 @@ def test_rollback_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.rollback(**mock_args) @@ -4360,6 +4341,7 @@ def test_allocate_ids_rest_required_fields(request_type=datastore.AllocateIdsReq response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.allocate_ids(request) @@ -4418,6 +4400,7 @@ def test_allocate_ids_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.allocate_ids(**mock_args) @@ -4551,6 +4534,7 @@ def test_reserve_ids_rest_required_fields(request_type=datastore.ReserveIdsReque response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.reserve_ids(request) @@ -4609,6 +4593,7 @@ def test_reserve_ids_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.reserve_ids(**mock_args) @@ -6041,6 +6026,7 @@ def test_lookup_rest_bad_request(request_type=datastore.LookupRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.lookup(request) @@ -6076,6 +6062,7 @@ def test_lookup_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.lookup(request) # Establish that the response is the type that we expect. @@ -6098,10 +6085,13 @@ def test_lookup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreRestInterceptor, "post_lookup" ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "post_lookup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreRestInterceptor, "pre_lookup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore.LookupRequest.pb(datastore.LookupRequest()) transcode.return_value = { "method": "post", @@ -6112,6 +6102,7 @@ def test_lookup_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = datastore.LookupResponse.to_json(datastore.LookupResponse()) req.return_value.content = return_value @@ -6122,6 +6113,7 @@ def test_lookup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastore.LookupResponse() + post_with_metadata.return_value = datastore.LookupResponse(), metadata client.lookup( request, @@ -6133,6 +6125,7 @@ def test_lookup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_run_query_rest_bad_request(request_type=datastore.RunQueryRequest): @@ -6154,6 +6147,7 @@ def test_run_query_rest_bad_request(request_type=datastore.RunQueryRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.run_query(request) @@ -6189,6 +6183,7 @@ def test_run_query_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.run_query(request) # Establish that the response is the type that we expect. @@ -6211,10 +6206,13 @@ def test_run_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreRestInterceptor, "post_run_query" ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "post_run_query_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreRestInterceptor, "pre_run_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore.RunQueryRequest.pb(datastore.RunQueryRequest()) transcode.return_value = { "method": "post", @@ -6225,6 +6223,7 @@ def test_run_query_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = datastore.RunQueryResponse.to_json(datastore.RunQueryResponse()) req.return_value.content = return_value @@ -6235,6 +6234,7 @@ def test_run_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastore.RunQueryResponse() + post_with_metadata.return_value = datastore.RunQueryResponse(), metadata client.run_query( request, @@ -6246,6 +6246,7 @@ def test_run_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_run_aggregation_query_rest_bad_request( @@ -6269,6 +6270,7 @@ def test_run_aggregation_query_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.run_aggregation_query(request) @@ -6304,6 +6306,7 @@ def test_run_aggregation_query_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.run_aggregation_query(request) # Establish that the response is the type that we expect. @@ -6326,10 +6329,13 @@ def test_run_aggregation_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreRestInterceptor, "post_run_aggregation_query" ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "post_run_aggregation_query_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreRestInterceptor, "pre_run_aggregation_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore.RunAggregationQueryRequest.pb( datastore.RunAggregationQueryRequest() ) @@ -6342,6 +6348,7 @@ def test_run_aggregation_query_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = datastore.RunAggregationQueryResponse.to_json( datastore.RunAggregationQueryResponse() ) @@ -6354,6 +6361,10 @@ def test_run_aggregation_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastore.RunAggregationQueryResponse() + post_with_metadata.return_value = ( + datastore.RunAggregationQueryResponse(), + metadata, + ) client.run_aggregation_query( request, @@ -6365,6 +6376,7 @@ def test_run_aggregation_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_begin_transaction_rest_bad_request( @@ -6388,6 +6400,7 @@ def test_begin_transaction_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.begin_transaction(request) @@ -6423,6 +6436,7 @@ def test_begin_transaction_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.begin_transaction(request) # Establish that the response is the type that we expect. @@ -6445,10 +6459,13 @@ def test_begin_transaction_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreRestInterceptor, "post_begin_transaction" ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "post_begin_transaction_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreRestInterceptor, "pre_begin_transaction" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore.BeginTransactionRequest.pb( datastore.BeginTransactionRequest() ) @@ -6461,6 +6478,7 @@ def test_begin_transaction_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = datastore.BeginTransactionResponse.to_json( datastore.BeginTransactionResponse() ) @@ -6473,6 +6491,7 @@ def test_begin_transaction_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastore.BeginTransactionResponse() + post_with_metadata.return_value = datastore.BeginTransactionResponse(), metadata client.begin_transaction( request, @@ -6484,6 +6503,7 @@ def test_begin_transaction_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_commit_rest_bad_request(request_type=datastore.CommitRequest): @@ -6505,6 +6525,7 @@ def test_commit_rest_bad_request(request_type=datastore.CommitRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.commit(request) @@ -6540,6 +6561,7 @@ def test_commit_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.commit(request) # Establish that the response is the type that we expect. @@ -6562,10 +6584,13 @@ def test_commit_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreRestInterceptor, "post_commit" ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "post_commit_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreRestInterceptor, "pre_commit" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore.CommitRequest.pb(datastore.CommitRequest()) transcode.return_value = { "method": "post", @@ -6576,6 +6601,7 @@ def test_commit_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = datastore.CommitResponse.to_json(datastore.CommitResponse()) req.return_value.content = return_value @@ -6586,6 +6612,7 @@ def test_commit_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastore.CommitResponse() + post_with_metadata.return_value = datastore.CommitResponse(), metadata client.commit( request, @@ -6597,6 +6624,7 @@ def test_commit_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_rollback_rest_bad_request(request_type=datastore.RollbackRequest): @@ -6618,6 +6646,7 @@ def test_rollback_rest_bad_request(request_type=datastore.RollbackRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.rollback(request) @@ -6651,6 +6680,7 @@ def test_rollback_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.rollback(request) # Establish that the response is the type that we expect. @@ -6672,10 +6702,13 @@ def test_rollback_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreRestInterceptor, "post_rollback" ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "post_rollback_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreRestInterceptor, "pre_rollback" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore.RollbackRequest.pb(datastore.RollbackRequest()) transcode.return_value = { "method": "post", @@ -6686,6 +6719,7 @@ def test_rollback_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = datastore.RollbackResponse.to_json(datastore.RollbackResponse()) req.return_value.content = return_value @@ -6696,6 +6730,7 @@ def test_rollback_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastore.RollbackResponse() + post_with_metadata.return_value = datastore.RollbackResponse(), metadata client.rollback( request, @@ -6707,6 +6742,7 @@ def test_rollback_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_allocate_ids_rest_bad_request(request_type=datastore.AllocateIdsRequest): @@ -6728,6 +6764,7 @@ def test_allocate_ids_rest_bad_request(request_type=datastore.AllocateIdsRequest response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.allocate_ids(request) @@ -6761,6 +6798,7 @@ def test_allocate_ids_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.allocate_ids(request) # Establish that the response is the type that we expect. @@ -6782,10 +6820,13 @@ def test_allocate_ids_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreRestInterceptor, "post_allocate_ids" ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "post_allocate_ids_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreRestInterceptor, "pre_allocate_ids" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore.AllocateIdsRequest.pb(datastore.AllocateIdsRequest()) transcode.return_value = { "method": "post", @@ -6796,6 +6837,7 @@ def test_allocate_ids_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = datastore.AllocateIdsResponse.to_json( datastore.AllocateIdsResponse() ) @@ -6808,6 +6850,7 @@ def test_allocate_ids_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastore.AllocateIdsResponse() + post_with_metadata.return_value = datastore.AllocateIdsResponse(), metadata client.allocate_ids( request, @@ -6819,6 +6862,7 @@ def test_allocate_ids_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_reserve_ids_rest_bad_request(request_type=datastore.ReserveIdsRequest): @@ -6840,6 +6884,7 @@ def test_reserve_ids_rest_bad_request(request_type=datastore.ReserveIdsRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.reserve_ids(request) @@ -6873,6 +6918,7 @@ def test_reserve_ids_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.reserve_ids(request) # Establish that the response is the type that we expect. @@ -6894,10 +6940,13 @@ def test_reserve_ids_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastoreRestInterceptor, "post_reserve_ids" ) as post, mock.patch.object( + transports.DatastoreRestInterceptor, "post_reserve_ids_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastoreRestInterceptor, "pre_reserve_ids" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastore.ReserveIdsRequest.pb(datastore.ReserveIdsRequest()) transcode.return_value = { "method": "post", @@ -6908,6 +6957,7 @@ def test_reserve_ids_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = datastore.ReserveIdsResponse.to_json( datastore.ReserveIdsResponse() ) @@ -6920,6 +6970,7 @@ def test_reserve_ids_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastore.ReserveIdsResponse() + post_with_metadata.return_value = datastore.ReserveIdsResponse(), metadata client.reserve_ids( request, @@ -6931,6 +6982,7 @@ def test_reserve_ids_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( @@ -6956,6 +7008,7 @@ def test_cancel_operation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.cancel_operation(request) @@ -6986,6 +7039,7 @@ def test_cancel_operation_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.cancel_operation(request) @@ -7016,6 +7070,7 @@ def test_delete_operation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_operation(request) @@ -7046,6 +7101,7 @@ def test_delete_operation_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_operation(request) @@ -7076,6 +7132,7 @@ def test_get_operation_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_operation(request) @@ -7106,6 +7163,7 @@ def test_get_operation_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_operation(request) @@ -7134,6 +7192,7 @@ def test_list_operations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_operations(request) @@ -7164,6 +7223,7 @@ def test_list_operations_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_operations(request) From ba1462462d40490b0555d1fe5494c858957c16ef Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 17 Mar 2025 11:07:42 -0400 Subject: [PATCH 598/611] fix: Allow protobuf 6.x (#598) --- packages/google-cloud-datastore/setup.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index 0534aa7c8d8f..cc91b0696512 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -29,15 +29,15 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.0, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "google-cloud-core >= 1.4.0, <3.0.0dev", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "proto-plus >= 1.25.0, <2.0.0dev; python_version>='3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "google-cloud-core >= 1.4.0, <3.0.0", + "proto-plus >= 1.22.0, <2.0.0", + "proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'", + "proto-plus >= 1.25.0, <2.0.0; python_version>='3.13'", + "protobuf>=3.20.2,<7.0.0,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = {"libcst": "libcst >= 0.2.5"} From 69a66478f10b502db32f983058d6e8e2600a10f4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 19 Mar 2025 10:51:59 -0400 Subject: [PATCH 599/611] chore: Update gapic-generator-python to 1.23.6 (#602) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to 1.23.6 PiperOrigin-RevId: 738170370 Source-Link: https://github.com/googleapis/googleapis/commit/3f1e17aa2dec3f146a9a2a8a64c5c6d19d0b6e15 Source-Link: https://github.com/googleapis/googleapis-gen/commit/9afd8c33d4cae610b75fa4999264ea8c8c66b9d2 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOWFmZDhjMzNkNGNhZTYxMGI3NWZhNDk5OTI2NGVhOGM4YzY2YjlkMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google/cloud/datastore_admin/__init__.py | 2 +- .../google/cloud/datastore_admin_v1/__init__.py | 2 +- .../google/cloud/datastore_admin_v1/services/__init__.py | 2 +- .../datastore_admin_v1/services/datastore_admin/__init__.py | 2 +- .../datastore_admin_v1/services/datastore_admin/async_client.py | 2 +- .../cloud/datastore_admin_v1/services/datastore_admin/client.py | 2 +- .../cloud/datastore_admin_v1/services/datastore_admin/pagers.py | 2 +- .../services/datastore_admin/transports/__init__.py | 2 +- .../services/datastore_admin/transports/base.py | 2 +- .../services/datastore_admin/transports/grpc.py | 2 +- .../services/datastore_admin/transports/grpc_asyncio.py | 2 +- .../services/datastore_admin/transports/rest.py | 2 +- .../services/datastore_admin/transports/rest_base.py | 2 +- .../google/cloud/datastore_admin_v1/types/__init__.py | 2 +- .../google/cloud/datastore_admin_v1/types/datastore_admin.py | 2 +- .../google/cloud/datastore_admin_v1/types/index.py | 2 +- .../google/cloud/datastore_admin_v1/types/migration.py | 2 +- .../google/cloud/datastore_v1/__init__.py | 2 +- .../google/cloud/datastore_v1/services/__init__.py | 2 +- .../google/cloud/datastore_v1/services/datastore/__init__.py | 2 +- .../cloud/datastore_v1/services/datastore/async_client.py | 2 +- .../google/cloud/datastore_v1/services/datastore/client.py | 2 +- .../datastore_v1/services/datastore/transports/__init__.py | 2 +- .../cloud/datastore_v1/services/datastore/transports/base.py | 2 +- .../cloud/datastore_v1/services/datastore/transports/grpc.py | 2 +- .../datastore_v1/services/datastore/transports/grpc_asyncio.py | 2 +- .../cloud/datastore_v1/services/datastore/transports/rest.py | 2 +- .../datastore_v1/services/datastore/transports/rest_base.py | 2 +- .../google/cloud/datastore_v1/types/__init__.py | 2 +- .../google/cloud/datastore_v1/types/aggregation_result.py | 2 +- .../google/cloud/datastore_v1/types/datastore.py | 2 +- .../google/cloud/datastore_v1/types/entity.py | 2 +- .../google/cloud/datastore_v1/types/query.py | 2 +- .../google/cloud/datastore_v1/types/query_profile.py | 2 +- .../scripts/fixup_datastore_admin_v1_keywords.py | 2 +- .../scripts/fixup_datastore_v1_keywords.py | 2 +- packages/google-cloud-datastore/tests/__init__.py | 2 +- packages/google-cloud-datastore/tests/unit/__init__.py | 2 +- packages/google-cloud-datastore/tests/unit/gapic/__init__.py | 2 +- .../tests/unit/gapic/datastore_admin_v1/__init__.py | 2 +- .../tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py | 2 +- .../tests/unit/gapic/datastore_v1/__init__.py | 2 +- .../tests/unit/gapic/datastore_v1/test_datastore.py | 2 +- 43 files changed, 43 insertions(+), 43 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/__init__.py index 09b75aef618d..9009817cd7f1 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py index 6d57bbb98d64..4f25734e6ba4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py index 8f6cf068242c..cbf94b283c70 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py index be83caf7cb18..011b1b846d9f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index 245f8fb18d5f..fbb93fac822a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index 489995bc72e7..012344bf76fe 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py index 4c0fa8a50090..046862c5c11b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py index f3b7656e10c6..a8b80cff4263 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index bcfb2688f19b..89bb1a70117b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index 498287974287..4d5c7ce05e0f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index f895032ddf9a..5ba9ac526762 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py index 1cdfd4b9059f..826445699939 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest_base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest_base.py index a94eece52156..b36139a6557b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest_base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py index ca082a05529e..ef6c33ec2637 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py index eb838570faea..f6251eb81d16 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py index 77a7079de6e8..ef74f8968a2c 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py index ec69e94191c4..68447560ad09 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/migration.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py index a417fe1ef035..2f60c07f2a31 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py index 8f6cf068242c..cbf94b283c70 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py index e992abb33b29..271de0729359 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index 88de0d08e474..97002490a220 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index aad9d8202ba2..3442722953ea 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py index 727e271c7369..719fb6996bf6 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py index cb18d36925b3..6070ee14397d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py index 6306c21903d2..19592726b3af 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index aba046cb0cb1..e6749231d197 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py index 5bd89407bb7c..274a4a6037a4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest_base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest_base.py index c8d5c675af3a..437886be6bac 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest_base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py index 0efe33ff991b..ea54d5415722 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py index b35ca1f90538..2dbfb36a1819 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index 281866f59efd..6f74989bac8a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index 5c5bcdc403f2..7a3320ced472 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index 1f8679cc2317..7f2da72b47f3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query_profile.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query_profile.py index 8dca0f6e7498..bf43fd624d55 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query_profile.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query_profile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py index 2f999e1e5a8f..409cafd3d1ad 100644 --- a/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_admin_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py index 661d509b47c3..21c85f83d211 100644 --- a/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py +++ b/packages/google-cloud-datastore/scripts/fixup_datastore_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/__init__.py b/packages/google-cloud-datastore/tests/__init__.py index 8f6cf068242c..cbf94b283c70 100644 --- a/packages/google-cloud-datastore/tests/__init__.py +++ b/packages/google-cloud-datastore/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/__init__.py b/packages/google-cloud-datastore/tests/unit/__init__.py index 8f6cf068242c..cbf94b283c70 100644 --- a/packages/google-cloud-datastore/tests/unit/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/__init__.py b/packages/google-cloud-datastore/tests/unit/gapic/__init__.py index 8f6cf068242c..cbf94b283c70 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py index 8f6cf068242c..cbf94b283c70 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index f05f7e4c405c..540daec2dc35 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py index 8f6cf068242c..cbf94b283c70 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py index 7f0755a88868..0332001215c4 100644 --- a/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/packages/google-cloud-datastore/tests/unit/gapic/datastore_v1/test_datastore.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From af231302ddfc305325f2effb9bbaa36b9c461a40 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 24 Mar 2025 13:54:39 -0700 Subject: [PATCH 600/611] fix: backwards-compatibility for previous meaning format (#603) --- .../google/cloud/datastore/helpers.py | 11 +++- .../tests/unit/test_helpers.py | 55 +++++++++++++++++++ 2 files changed, 65 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py index d491360c00f5..cdce291c256b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/helpers.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/helpers.py @@ -18,6 +18,7 @@ """ import datetime +import itertools from google.protobuf import struct_pb2 from google.type import latlng_pb2 @@ -182,7 +183,15 @@ def _set_pb_meaning_from_entity(entity, name, value, value_pb, is_list=False): return elif is_list: # for lists, set meaning on the root pb and on each sub-element - root_meaning, sub_meaning_list = meaning + if isinstance(meaning, tuple): + root_meaning, sub_meaning_list = meaning + else: + # if meaning isn't a tuple, fall back to pre-v2.20.2 meaning format + root_meaning = None + if isinstance(meaning, list): + sub_meaning_list = meaning + else: + sub_meaning_list = itertools.repeat(meaning) if root_meaning is not None: value_pb.meaning = root_meaning if sub_meaning_list: diff --git a/packages/google-cloud-datastore/tests/unit/test_helpers.py b/packages/google-cloud-datastore/tests/unit/test_helpers.py index a6f63a8078ad..710a6cb74a99 100644 --- a/packages/google-cloud-datastore/tests/unit/test_helpers.py +++ b/packages/google-cloud-datastore/tests/unit/test_helpers.py @@ -1318,6 +1318,61 @@ def test__set_pb_meaning_w_value_unset(orig_meaning): assert value_pb.meaning == orig_meaning +def test__set_pb_meaning_w_list_and_single_value(): + """ + v2.20.2 uses a tuple to represent list meanings (https://github.com/googleapis/python-datastore/pull/575) + + This check ensures _set_pb_meaning_from_entity is backwards + compatible with the old meaning style, still used by python-ndb + """ + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _set_pb_meaning_from_entity + from google.cloud.datastore.entity import Entity + + orig_root_meaning = 1 + updated_meaning = 22 + orig_pb = entity_pb2.Entity() + value_pb = orig_pb._pb.properties.get_or_create("value") + value_pb.meaning = orig_root_meaning + sub_value_pb1 = value_pb.array_value.values.add() + sub_value_pb2 = value_pb.array_value.values.add() + + entity = Entity(key="key") + entity._meanings = {"value": (updated_meaning, None)} + _set_pb_meaning_from_entity(entity, "value", None, value_pb, is_list=True) + assert value_pb.meaning == orig_root_meaning + assert sub_value_pb1.meaning == updated_meaning + assert sub_value_pb2.meaning == updated_meaning + + +def test__set_pb_meaning_w_list_and_list(): + """ + v2.20.2 uses a tuple to represent list meanings (https://github.com/googleapis/python-datastore/pull/575) + + This check ensures _set_pb_meaning_from_entity is backwards + compatible with the old meaning style, still used by python-ndb + """ + from google.cloud.datastore_v1.types import entity as entity_pb2 + from google.cloud.datastore.helpers import _set_pb_meaning_from_entity + from google.cloud.datastore.entity import Entity + + orig_root_meaning = 1 + updated_meaning_1 = 12 + updated_meaning_2 = 4 + orig_pb = entity_pb2.Entity() + value_pb = orig_pb._pb.properties.get_or_create("value") + value_pb.meaning = orig_root_meaning + sub_value_pb1 = value_pb.array_value.values.add() + sub_value_pb2 = value_pb.array_value.values.add() + + entity = Entity(key="key") + entity._meanings = {"value": ([updated_meaning_1, updated_meaning_2], None)} + _set_pb_meaning_from_entity(entity, "value", None, value_pb, is_list=True) + assert value_pb.meaning == orig_root_meaning + assert sub_value_pb1.meaning == updated_meaning_1 + assert sub_value_pb2.meaning == updated_meaning_2 + + def test__array_w_meaning_end_to_end(): """ Test proto->entity->proto with an array with a meaning field From 0848b1176afd7dfeff6b12f28b9c76ab717aa1b4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 10 Apr 2025 12:11:23 -0400 Subject: [PATCH 601/611] chore(python): fix incorrect import statement in README (#604) Source-Link: https://github.com/googleapis/synthtool/commit/87677404f85cee860588ebe2c352d0609f683d5d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:023a21377a2a00008057f99f0118edadc30a19d1636a3fee47189ebec2f3921c Co-authored-by: Owl Bot --- packages/google-cloud-datastore/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-datastore/README.rst | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 3f7634f25f8e..c4e82889dc81 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf -# created: 2025-02-21T19:32:52.01306189Z + digest: sha256:023a21377a2a00008057f99f0118edadc30a19d1636a3fee47189ebec2f3921c +# created: 2025-03-31T16:51:40.130756953Z diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index dc21a4e83fc0..f458af4acdc6 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -166,7 +166,7 @@ Code-Based Examples import logging - from google.cloud.translate_v3 import translate + from google.cloud import library_v1 base_logger = logging.getLogger("google") base_logger.addHandler(logging.StreamHandler()) @@ -178,7 +178,7 @@ Code-Based Examples import logging - from google.cloud.translate_v3 import translate + from google.cloud import library_v1 base_logger = logging.getLogger("google.cloud.library_v1") base_logger.addHandler(logging.StreamHandler()) From 4ccd9ebac293f27145bfee429b3cb3a6b785f4bd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 10 Apr 2025 15:00:12 -0400 Subject: [PATCH 602/611] chore(python): remove CONTRIBUTING.rst from templates (#605) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): remove CONTRIBUTING.rst from templates Source-Link: https://github.com/googleapis/synthtool/commit/c96fb118e03c2b50d50fe17c1d0845479a0cfa9a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:710b70faff81151657d89db6e028c23a1051787598c8276bdd8eef25c92da8ab * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Remove replacement in owlbot.py --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +- packages/google-cloud-datastore/owlbot.py | 42 ------------------- 2 files changed, 2 insertions(+), 44 deletions(-) diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index c4e82889dc81..8bc6405eca8b 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:023a21377a2a00008057f99f0118edadc30a19d1636a3fee47189ebec2f3921c -# created: 2025-03-31T16:51:40.130756953Z + digest: sha256:710b70faff81151657d89db6e028c23a1051787598c8276bdd8eef25c92da8ab +# created: 2025-04-10T17:48:54.829145676Z diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 9fcf0e1583e9..cbb0f8aecc53 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -211,48 +211,6 @@ def docfx(session): r"Dict[str, str]", ) -# Add documentation about creating indexes and populating data for system -# tests. -assert 1 == s.replace( - "CONTRIBUTING.rst", - r""" -\*\*\*\*\*\*\*\*\*\*\*\*\* -Test Coverage -\*\*\*\*\*\*\*\*\*\*\*\*\* -""", - """ -- You'll need to create composite - `indexes `__ - with the ``gcloud`` command line - `tool `__:: - - # Install the app (App Engine Command Line Interface) component. - $ gcloud components install app-engine-python - - # Authenticate the gcloud tool with your account. - $ GOOGLE_APPLICATION_CREDENTIALS="path/to/app_credentials.json" - $ gcloud auth activate-service-account \ - > --key-file=${GOOGLE_APPLICATION_CREDENTIALS} - - # Create the indexes - $ gcloud datastore indexes create tests/system/index.yaml - -- You'll also need stored data in your dataset. To populate this data, run:: - - $ python tests/system/utils/populate_datastore.py - -- If you make a mistake during development (i.e. a failing test that - prevents clean-up) you can clear all system test data from your - datastore instance via:: - - $ python tests/system/utils/clear_datastore.py - -************* -Test Coverage -************* -""", -) - # add type checker nox session s.replace( "noxfile.py", From f47e185c11855476d84c88681c71f8304fd878cf Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 14 Apr 2025 17:59:08 -0700 Subject: [PATCH 603/611] chore(main): release 2.21.0 (#595) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-datastore/CHANGELOG.md | 15 +++++++++++++++ .../google/cloud/datastore/gapic_version.py | 2 +- .../google/cloud/datastore/version.py | 2 +- .../google/cloud/datastore_admin/gapic_version.py | 2 +- .../cloud/datastore_admin_v1/gapic_version.py | 2 +- .../google/cloud/datastore_v1/gapic_version.py | 2 +- 7 files changed, 21 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-datastore/.release-please-manifest.json b/packages/google-cloud-datastore/.release-please-manifest.json index eeb4bcda33c6..5be20145ac45 100644 --- a/packages/google-cloud-datastore/.release-please-manifest.json +++ b/packages/google-cloud-datastore/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.20.2" + ".": "2.21.0" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/CHANGELOG.md b/packages/google-cloud-datastore/CHANGELOG.md index 549b44f46c6a..0ed89fd52d49 100644 --- a/packages/google-cloud-datastore/CHANGELOG.md +++ b/packages/google-cloud-datastore/CHANGELOG.md @@ -4,6 +4,21 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.21.0](https://github.com/googleapis/python-datastore/compare/v2.20.2...v2.21.0) (2025-04-10) + + +### Features + +* Add REST Interceptors which support reading metadata ([7be9c4c](https://github.com/googleapis/python-datastore/commit/7be9c4c594af2c2414e394b8bfe62574b58ef337)) +* Add support for opt-in debug logging ([7be9c4c](https://github.com/googleapis/python-datastore/commit/7be9c4c594af2c2414e394b8bfe62574b58ef337)) + + +### Bug Fixes + +* Allow protobuf 6.x ([#598](https://github.com/googleapis/python-datastore/issues/598)) ([7c1171b](https://github.com/googleapis/python-datastore/commit/7c1171bf657f7cf4d1404e19611f6c874a8998ca)) +* Backwards-compatibility for previous meaning format ([#603](https://github.com/googleapis/python-datastore/issues/603)) ([ed92e8e](https://github.com/googleapis/python-datastore/commit/ed92e8e54a9e0f44302efee89a30a322d0a73636)) +* Fix typing issue with gRPC metadata when key ends in -bin ([7be9c4c](https://github.com/googleapis/python-datastore/commit/7be9c4c594af2c2414e394b8bfe62574b58ef337)) + ## [2.20.2](https://github.com/googleapis/python-datastore/compare/v2.20.1...v2.20.2) (2024-12-12) diff --git a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py index 6975b43d3e80..81570778317a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.20.2" # {x-release-please-version} +__version__ = "2.21.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore/version.py b/packages/google-cloud-datastore/google/cloud/datastore/version.py index e37230a636b1..563b0e160e7a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore/version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.20.2" +__version__ = "2.21.0" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index 4c1787c53865..e546bae0531e 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.2" # {x-release-please-version} +__version__ = "2.21.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index efd187993dda..6e29ec5f3feb 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.20.2" # {x-release-please-version} +__version__ = "2.21.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index efd187993dda..6e29ec5f3feb 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.20.2" # {x-release-please-version} +__version__ = "2.21.0" # {x-release-please-version} From c4280133298604c478334bfce37c042e066436a5 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 16 May 2025 19:30:24 -0400 Subject: [PATCH 604/611] fix: remove setup.cfg configuration for creating universal wheels (#601) Co-authored-by: Daniel Sanche --- packages/google-cloud-datastore/setup.cfg | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 packages/google-cloud-datastore/setup.cfg diff --git a/packages/google-cloud-datastore/setup.cfg b/packages/google-cloud-datastore/setup.cfg deleted file mode 100644 index 052350089505..000000000000 --- a/packages/google-cloud-datastore/setup.cfg +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[bdist_wheel] -universal = 1 From a59daac088c26876cbd4ea1aff3e47d9f2cef447 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 21 May 2025 06:37:06 -0400 Subject: [PATCH 605/611] chore: Update gapic-generator-python to 1.25.0 (#608) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to 1.24.0 PiperOrigin-RevId: 747419463 Source-Link: https://github.com/googleapis/googleapis/commit/340579bf7f97ba56cda0c70176dc5b03a8357667 Source-Link: https://github.com/googleapis/googleapis-gen/commit/e8997ec5136ecb6ed9a969a4c2f13b3ab6a17c12 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTg5OTdlYzUxMzZlY2I2ZWQ5YTk2OWE0YzJmMTNiM2FiNmExN2MxMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to 1.24.1 PiperOrigin-RevId: 748739072 Source-Link: https://github.com/googleapis/googleapis/commit/b947e523934dbac5d97613d8aa08e04fc38c5fb6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/8c5821aa65a921d59b3f7653d6f37c9c67410c2f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOGM1ODIxYWE2NWE5MjFkNTliM2Y3NjUzZDZmMzdjOWM2NzQxMGMyZiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to 1.25.0 PiperOrigin-RevId: 755914147 Source-Link: https://github.com/googleapis/googleapis/commit/97a83d76a09a7f6dcab43675c87bdfeb5bcf1cb5 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a9977efedc836ccece1f01d529b0315e1efe52ad Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTk5NzdlZmVkYzgzNmNjZWNlMWYwMWQ1MjliMDMxNWUxZWZlNTJhZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/datastore_admin/async_client.py | 4 ++++ .../datastore_admin_v1/services/datastore_admin/client.py | 3 +++ .../services/datastore_admin/transports/base.py | 4 ++++ .../services/datastore_admin/transports/grpc.py | 3 +-- .../services/datastore_admin/transports/rest.py | 4 ++++ .../cloud/datastore_v1/services/datastore/async_client.py | 4 ++++ .../google/cloud/datastore_v1/services/datastore/client.py | 3 +++ .../cloud/datastore_v1/services/datastore/transports/base.py | 4 ++++ .../cloud/datastore_v1/services/datastore/transports/grpc.py | 3 +-- .../cloud/datastore_v1/services/datastore/transports/rest.py | 4 ++++ 10 files changed, 32 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index fbb93fac822a..4fe3c7991342 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -37,6 +37,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -1378,5 +1379,8 @@ async def __aexit__(self, exc_type, exc, tb): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ("DatastoreAdminAsyncClient",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index 012344bf76fe..ce628e4b728d 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -45,6 +45,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -1779,5 +1780,7 @@ def cancel_operation( gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ("DatastoreAdminClient",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index 89bb1a70117b..2dce9b112210 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -26,6 +26,7 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.datastore_admin_v1.types import datastore_admin from google.cloud.datastore_admin_v1.types import index @@ -35,6 +36,9 @@ gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class DatastoreAdminTransport(abc.ABC): """Abstract transport class for DatastoreAdmin.""" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index 4d5c7ce05e0f..41b47b3f09d7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -73,12 +73,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra={ "serviceName": "google.datastore.admin.v1.DatastoreAdmin", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py index 826445699939..a87e0d04f427 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py @@ -23,6 +23,7 @@ from google.api_core import rest_helpers from google.api_core import rest_streaming from google.api_core import gapic_v1 +import google.protobuf from google.protobuf import json_format from google.api_core import operations_v1 @@ -61,6 +62,9 @@ rest_version=f"requests@{requests_version}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class DatastoreAdminRestInterceptor: """Interceptor for DatastoreAdmin. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index 97002490a220..bf2dad03754f 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -37,6 +37,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -1512,5 +1513,8 @@ async def __aexit__(self, exc_type, exc, tb): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ("DatastoreAsyncClient",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index 3442722953ea..f6c1f998f513 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -45,6 +45,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -1914,5 +1915,7 @@ def cancel_operation( gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ("DatastoreClient",) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py index 6070ee14397d..dbf3fb22e21a 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -25,6 +25,7 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.datastore_v1.types import datastore from google.longrunning import operations_pb2 # type: ignore @@ -33,6 +34,9 @@ gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class DatastoreTransport(abc.ABC): """Abstract transport class for Datastore.""" diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py index 19592726b3af..7c3f8bf3915b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -71,12 +71,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra={ "serviceName": "google.datastore.v1.Datastore", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py index 274a4a6037a4..e01295ade8e9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py @@ -23,6 +23,7 @@ from google.api_core import rest_helpers from google.api_core import rest_streaming from google.api_core import gapic_v1 +import google.protobuf from google.protobuf import json_format @@ -59,6 +60,9 @@ rest_version=f"requests@{requests_version}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class DatastoreRestInterceptor: """Interceptor for Datastore. From 2ada50066be7c092873d11012401d2bb2e762934 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 14 Jul 2025 12:02:55 -0400 Subject: [PATCH 606/611] chore: remove unused files (#597) --- .../.kokoro/docker/docs/Dockerfile | 89 --- .../.kokoro/docker/docs/fetch_gpg_keys.sh | 45 -- .../.kokoro/docker/docs/requirements.in | 2 - .../.kokoro/docker/docs/requirements.txt | 297 ---------- .../.kokoro/docs/common.cfg | 66 --- .../.kokoro/docs/docs-presubmit.cfg | 28 - .../.kokoro/docs/docs.cfg | 1 - .../.kokoro/publish-docs.sh | 58 -- .../google-cloud-datastore/.kokoro/release.sh | 29 - .../.kokoro/release/common.cfg | 49 -- .../.kokoro/release/release.cfg | 1 - .../.kokoro/requirements.in | 11 - .../.kokoro/requirements.txt | 537 ------------------ 13 files changed, 1213 deletions(-) delete mode 100644 packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile delete mode 100755 packages/google-cloud-datastore/.kokoro/docker/docs/fetch_gpg_keys.sh delete mode 100644 packages/google-cloud-datastore/.kokoro/docker/docs/requirements.in delete mode 100644 packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt delete mode 100644 packages/google-cloud-datastore/.kokoro/docs/common.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/docs/docs-presubmit.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/docs/docs.cfg delete mode 100755 packages/google-cloud-datastore/.kokoro/publish-docs.sh delete mode 100755 packages/google-cloud-datastore/.kokoro/release.sh delete mode 100644 packages/google-cloud-datastore/.kokoro/release/common.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/release/release.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/requirements.in delete mode 100644 packages/google-cloud-datastore/.kokoro/requirements.txt diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile deleted file mode 100644 index e5410e296bd8..000000000000 --- a/packages/google-cloud-datastore/.kokoro/docker/docs/Dockerfile +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from ubuntu:24.04 - -ENV DEBIAN_FRONTEND noninteractive - -# Ensure local Python is preferred over distribution Python. -ENV PATH /usr/local/bin:$PATH - -# Install dependencies. -RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - apt-transport-https \ - build-essential \ - ca-certificates \ - curl \ - dirmngr \ - git \ - gpg-agent \ - graphviz \ - libbz2-dev \ - libdb5.3-dev \ - libexpat1-dev \ - libffi-dev \ - liblzma-dev \ - libreadline-dev \ - libsnappy-dev \ - libssl-dev \ - libsqlite3-dev \ - portaudio19-dev \ - redis-server \ - software-properties-common \ - ssh \ - sudo \ - tcl \ - tcl-dev \ - tk \ - tk-dev \ - uuid-dev \ - wget \ - zlib1g-dev \ - && add-apt-repository universe \ - && apt-get update \ - && apt-get -y install jq \ - && apt-get clean autoclean \ - && apt-get autoremove -y \ - && rm -rf /var/lib/apt/lists/* \ - && rm -f /var/cache/apt/archives/*.deb - - -###################### Install python 3.10.14 for docs/docfx session - -# Download python 3.10.14 -RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz - -# Extract files -RUN tar -xvf Python-3.10.14.tgz - -# Install python 3.10.14 -RUN ./Python-3.10.14/configure --enable-optimizations -RUN make altinstall - -ENV PATH /usr/local/bin/python3.10:$PATH - -###################### Install pip -RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.10 /tmp/get-pip.py \ - && rm /tmp/get-pip.py - -# Test pip -RUN python3.10 -m pip - -# Install build requirements -COPY requirements.txt /requirements.txt -RUN python3.10 -m pip install --require-hashes -r requirements.txt - -CMD ["python3.10"] diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/fetch_gpg_keys.sh b/packages/google-cloud-datastore/.kokoro/docker/docs/fetch_gpg_keys.sh deleted file mode 100755 index d653dd868e4b..000000000000 --- a/packages/google-cloud-datastore/.kokoro/docker/docs/fetch_gpg_keys.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# A script to fetch gpg keys with retry. -# Avoid jinja parsing the file. -# - -function retry { - if [[ "${#}" -le 1 ]]; then - echo "Usage: ${0} retry_count commands.." - exit 1 - fi - local retries=${1} - local command="${@:2}" - until [[ "${retries}" -le 0 ]]; do - $command && return 0 - if [[ $? -ne 0 ]]; then - echo "command failed, retrying" - ((retries--)) - fi - done - return 1 -} - -# 3.6.9, 3.7.5 (Ned Deily) -retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ - 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D - -# 3.8.0 (Łukasz Langa) -retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ - E3FF2839C048B25C084DEBE9B26995E310250568 - -# diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.in b/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.in deleted file mode 100644 index 586bd07037ae..000000000000 --- a/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.in +++ /dev/null @@ -1,2 +0,0 @@ -nox -gcp-docuploader diff --git a/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt deleted file mode 100644 index a9360a25b707..000000000000 --- a/packages/google-cloud-datastore/.kokoro/docker/docs/requirements.txt +++ /dev/null @@ -1,297 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==3.5.3 \ - --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \ - --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392 - # via nox -cachetools==5.5.0 \ - --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ - --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a - # via google-auth -certifi==2024.12.14 \ - --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ - --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db - # via requests -charset-normalizer==3.4.1 \ - --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ - --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ - --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ - --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ - --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ - --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ - --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ - --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ - --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ - --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ - --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ - --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ - --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ - --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ - --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ - --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ - --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ - --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ - --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ - --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ - --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ - --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ - --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ - --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ - --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ - --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ - --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ - --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ - --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ - --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ - --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ - --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ - --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ - --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ - --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ - --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ - --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ - --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ - --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ - --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ - --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ - --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ - --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ - --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ - --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ - --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ - --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ - --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ - --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ - --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ - --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ - --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ - --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ - --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ - --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ - --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ - --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ - --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ - --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ - --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ - --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ - --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ - --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ - --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ - --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ - --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ - --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ - --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ - --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ - --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ - --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ - --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ - --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ - --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ - --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ - --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ - --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ - --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ - --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ - --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ - --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ - --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ - --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ - --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ - --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ - --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ - --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ - --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ - --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ - --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ - --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ - --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 - # via requests -click==8.1.8 \ - --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ - --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a - # via gcp-docuploader -colorlog==6.9.0 \ - --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ - --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 - # via - # gcp-docuploader - # nox -distlib==0.3.9 \ - --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ - --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 - # via virtualenv -filelock==3.16.1 \ - --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ - --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 - # via virtualenv -gcp-docuploader==0.6.5 \ - --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ - --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea - # via -r requirements.in -google-api-core==2.24.0 \ - --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ - --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.37.0 \ - --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ - --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 - # via - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.4.1 \ - --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ - --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 - # via google-cloud-storage -google-cloud-storage==2.19.0 \ - --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ - --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 - # via gcp-docuploader -google-crc32c==1.6.0 \ - --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ - --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ - --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ - --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ - --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ - --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ - --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ - --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ - --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ - --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ - --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ - --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ - --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ - --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ - --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ - --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ - --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ - --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ - --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ - --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ - --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ - --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ - --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ - --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ - --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ - --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ - --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.2 \ - --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ - --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 - # via google-cloud-storage -googleapis-common-protos==1.66.0 \ - --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ - --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed - # via google-api-core -idna==3.10 \ - --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ - --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 - # via requests -nox==2024.10.9 \ - --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ - --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in -packaging==24.2 \ - --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ - --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f - # via nox -platformdirs==4.3.6 \ - --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ - --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb - # via virtualenv -proto-plus==1.25.0 \ - --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ - --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 - # via google-api-core -protobuf==5.29.3 \ - --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ - --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ - --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ - --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ - --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ - --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ - --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ - --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ - --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ - --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ - --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 - # via - # gcp-docuploader - # google-api-core - # googleapis-common-protos - # proto-plus -pyasn1==0.6.1 \ - --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ - --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.1 \ - --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ - --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c - # via google-auth -requests==2.32.3 \ - --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ - --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 - # via - # google-api-core - # google-cloud-storage -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -six==1.17.0 \ - --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ - --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 - # via gcp-docuploader -tomli==2.2.1 \ - --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ - --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ - --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ - --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ - --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ - --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ - --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ - --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ - --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ - --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ - --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ - --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ - --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ - --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ - --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ - --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ - --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ - --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ - --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ - --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ - --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ - --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ - --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ - --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ - --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ - --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ - --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ - --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ - --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ - --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ - --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ - --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 - # via nox -urllib3==2.3.0 \ - --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ - --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d - # via requests -virtualenv==20.28.1 \ - --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ - --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 - # via nox diff --git a/packages/google-cloud-datastore/.kokoro/docs/common.cfg b/packages/google-cloud-datastore/.kokoro/docs/common.cfg deleted file mode 100644 index 33f2bdc708e9..000000000000 --- a/packages/google-cloud-datastore/.kokoro/docs/common.cfg +++ /dev/null @@ -1,66 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/publish-docs.sh" -} - -env_vars: { - key: "STAGING_BUCKET" - value: "docs-staging" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - # Push google cloud library docs to the Cloud RAD bucket `docs-staging-v2` - value: "docs-staging-v2" -} - -# It will upload the docker image after successful builds. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "true" -} - -# It will always build the docker image. -env_vars: { - key: "TRAMPOLINE_DOCKERFILE" - value: ".kokoro/docker/docs/Dockerfile" -} - -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "docuploader_service_account" - } - } -} diff --git a/packages/google-cloud-datastore/.kokoro/docs/docs-presubmit.cfg b/packages/google-cloud-datastore/.kokoro/docs/docs-presubmit.cfg deleted file mode 100644 index 049a9863541a..000000000000 --- a/packages/google-cloud-datastore/.kokoro/docs/docs-presubmit.cfg +++ /dev/null @@ -1,28 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "STAGING_BUCKET" - value: "gcloud-python-test" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - value: "gcloud-python-test" -} - -# We only upload the image in the main `docs` build. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "false" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/build.sh" -} - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "docs docfx" -} diff --git a/packages/google-cloud-datastore/.kokoro/docs/docs.cfg b/packages/google-cloud-datastore/.kokoro/docs/docs.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-datastore/.kokoro/docs/docs.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/publish-docs.sh b/packages/google-cloud-datastore/.kokoro/publish-docs.sh deleted file mode 100755 index 4ed4aaf1346f..000000000000 --- a/packages/google-cloud-datastore/.kokoro/publish-docs.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -export PATH="${HOME}/.local/bin:${PATH}" - -# build docs -nox -s docs - -# create metadata -python3.10 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3.10 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3.10 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3.10 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" - - -# docfx yaml files -nox -s docfx - -# create metadata. -python3.10 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3.10 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3.10 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/packages/google-cloud-datastore/.kokoro/release.sh b/packages/google-cloud-datastore/.kokoro/release.sh deleted file mode 100755 index d3805e022934..000000000000 --- a/packages/google-cloud-datastore/.kokoro/release.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Start the releasetool reporter -python3 -m pip install --require-hashes -r github/python-datastore/.kokoro/requirements.txt -python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2") -cd github/python-datastore -python3 setup.py sdist bdist_wheel -twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/google-cloud-datastore/.kokoro/release/common.cfg b/packages/google-cloud-datastore/.kokoro/release/common.cfg deleted file mode 100644 index 09d805a9c697..000000000000 --- a/packages/google-cloud-datastore/.kokoro/release/common.cfg +++ /dev/null @@ -1,49 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/release.sh" -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-2" - } - } -} - -# Tokens needed to report release status back to GitHub -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} - -# Store the packages we uploaded to PyPI. That way, we have a record of exactly -# what we published, which we can use to generate SBOMs and attestations. -action { - define_artifacts { - regex: "github/python-datastore/**/*.tar.gz" - strip_prefix: "github/python-datastore" - } -} diff --git a/packages/google-cloud-datastore/.kokoro/release/release.cfg b/packages/google-cloud-datastore/.kokoro/release/release.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-datastore/.kokoro/release/release.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/requirements.in b/packages/google-cloud-datastore/.kokoro/requirements.in deleted file mode 100644 index fff4d9ce0d0a..000000000000 --- a/packages/google-cloud-datastore/.kokoro/requirements.in +++ /dev/null @@ -1,11 +0,0 @@ -gcp-docuploader -gcp-releasetool>=2 # required for compatibility with cryptography>=42.x -importlib-metadata -typing-extensions -twine -wheel -setuptools -nox>=2022.11.21 # required to remove dependency on py -charset-normalizer<3 -click<8.1.0 -cryptography>=42.0.5 diff --git a/packages/google-cloud-datastore/.kokoro/requirements.txt b/packages/google-cloud-datastore/.kokoro/requirements.txt deleted file mode 100644 index 9622baf0ba38..000000000000 --- a/packages/google-cloud-datastore/.kokoro/requirements.txt +++ /dev/null @@ -1,537 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==3.4.0 \ - --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ - --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f - # via nox -attrs==23.2.0 \ - --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ - --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 - # via gcp-releasetool -backports-tarfile==1.2.0 \ - --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ - --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 - # via jaraco-context -cachetools==5.3.3 \ - --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ - --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 - # via google-auth -certifi==2024.7.4 \ - --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ - --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 - # via requests -cffi==1.16.0 \ - --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ - --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ - --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ - --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ - --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ - --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ - --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ - --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ - --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ - --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ - --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ - --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ - --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ - --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ - --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ - --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ - --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ - --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ - --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ - --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ - --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ - --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ - --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ - --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ - --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ - --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ - --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ - --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ - --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ - --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ - --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ - --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ - --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ - --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ - --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ - --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ - --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ - --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ - --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ - --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ - --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ - --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ - --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ - --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ - --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ - --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ - --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ - --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ - --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ - --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ - --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ - --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 - # via cryptography -charset-normalizer==2.1.1 \ - --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ - --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via - # -r requirements.in - # requests -click==8.0.4 \ - --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ - --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb - # via - # -r requirements.in - # gcp-docuploader - # gcp-releasetool -colorlog==6.8.2 \ - --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ - --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 - # via - # gcp-docuploader - # nox -cryptography==42.0.8 \ - --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \ - --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \ - --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \ - --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \ - --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \ - --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \ - --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \ - --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \ - --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \ - --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \ - --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \ - --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \ - --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \ - --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \ - --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \ - --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \ - --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \ - --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \ - --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \ - --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \ - --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \ - --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \ - --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \ - --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \ - --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \ - --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \ - --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \ - --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \ - --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \ - --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \ - --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \ - --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e - # via - # -r requirements.in - # gcp-releasetool - # secretstorage -distlib==0.3.8 \ - --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ - --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 - # via virtualenv -docutils==0.21.2 \ - --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ - --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 - # via readme-renderer -filelock==3.15.4 \ - --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ - --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 - # via virtualenv -gcp-docuploader==0.6.5 \ - --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ - --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea - # via -r requirements.in -gcp-releasetool==2.0.1 \ - --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \ - --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62 - # via -r requirements.in -google-api-core==2.19.1 \ - --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \ - --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.31.0 \ - --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \ - --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871 - # via - # gcp-releasetool - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.4.1 \ - --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ - --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 - # via google-cloud-storage -google-cloud-storage==2.17.0 \ - --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \ - --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1 - # via gcp-docuploader -google-crc32c==1.5.0 \ - --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ - --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ - --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ - --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ - --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ - --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ - --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ - --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ - --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ - --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ - --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ - --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ - --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ - --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ - --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ - --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ - --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ - --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ - --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ - --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ - --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ - --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ - --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ - --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ - --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ - --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ - --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ - --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ - --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ - --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ - --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ - --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ - --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ - --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ - --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ - --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ - --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ - --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ - --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ - --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ - --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ - --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ - --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ - --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ - --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ - --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ - --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ - --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ - --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ - --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ - --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ - --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ - --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ - --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ - --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ - --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ - --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ - --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ - --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ - --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ - --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ - --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ - --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ - --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ - --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ - --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ - --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ - --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.1 \ - --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \ - --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33 - # via google-cloud-storage -googleapis-common-protos==1.63.2 \ - --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \ - --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87 - # via google-api-core -idna==3.7 \ - --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ - --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 - # via requests -importlib-metadata==8.0.0 \ - --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \ - --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812 - # via - # -r requirements.in - # keyring - # twine -jaraco-classes==3.4.0 \ - --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ - --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 - # via keyring -jaraco-context==5.3.0 \ - --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ - --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 - # via keyring -jaraco-functools==4.0.1 \ - --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \ - --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8 - # via keyring -jeepney==0.8.0 \ - --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ - --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 - # via - # keyring - # secretstorage -jinja2==3.1.4 \ - --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ - --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d - # via gcp-releasetool -keyring==25.2.1 \ - --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ - --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b - # via - # gcp-releasetool - # twine -markdown-it-py==3.0.0 \ - --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ - --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb - # via rich -markupsafe==2.1.5 \ - --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ - --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ - --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ - --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ - --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ - --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ - --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ - --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ - --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ - --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ - --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ - --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ - --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ - --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ - --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ - --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ - --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ - --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ - --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ - --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ - --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ - --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ - --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ - --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ - --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ - --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ - --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ - --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ - --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ - --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ - --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ - --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ - --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ - --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ - --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ - --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ - --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ - --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ - --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ - --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ - --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ - --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ - --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ - --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ - --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ - --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ - --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ - --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ - --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ - --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ - --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ - --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ - --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ - --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ - --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ - --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ - --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ - --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ - --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ - --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 - # via jinja2 -mdurl==0.1.2 \ - --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ - --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba - # via markdown-it-py -more-itertools==10.3.0 \ - --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ - --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 - # via - # jaraco-classes - # jaraco-functools -nh3==0.2.18 \ - --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \ - --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \ - --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \ - --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \ - --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \ - --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \ - --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \ - --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \ - --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \ - --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \ - --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \ - --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \ - --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \ - --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \ - --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ - --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe - # via readme-renderer -nox==2024.4.15 \ - --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ - --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f - # via -r requirements.in -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 - # via - # gcp-releasetool - # nox -pkginfo==1.10.0 \ - --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ - --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 - # via twine -platformdirs==4.2.2 \ - --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ - --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 - # via virtualenv -proto-plus==1.24.0 \ - --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ - --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 - # via google-api-core -protobuf==5.27.2 \ - --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \ - --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \ - --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \ - --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \ - --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \ - --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \ - --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \ - --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \ - --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \ - --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \ - --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714 - # via - # gcp-docuploader - # gcp-releasetool - # google-api-core - # googleapis-common-protos - # proto-plus -pyasn1==0.6.0 \ - --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ - --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.0 \ - --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ - --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b - # via google-auth -pycparser==2.22 \ - --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ - --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc - # via cffi -pygments==2.18.0 \ - --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ - --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a - # via - # readme-renderer - # rich -pyjwt==2.8.0 \ - --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ - --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 - # via gcp-releasetool -pyperclip==1.9.0 \ - --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 - # via gcp-releasetool -python-dateutil==2.9.0.post0 \ - --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ - --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 - # via gcp-releasetool -readme-renderer==44.0 \ - --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \ - --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1 - # via twine -requests==2.32.3 \ - --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ - --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 - # via - # gcp-releasetool - # google-api-core - # google-cloud-storage - # requests-toolbelt - # twine -requests-toolbelt==1.0.0 \ - --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ - --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 - # via twine -rfc3986==2.0.0 \ - --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ - --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c - # via twine -rich==13.7.1 \ - --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ - --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432 - # via twine -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -secretstorage==3.3.3 \ - --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ - --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 - # via keyring -six==1.16.0 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via - # gcp-docuploader - # python-dateutil -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f - # via nox -twine==5.1.1 \ - --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ - --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db - # via -r requirements.in -typing-extensions==4.12.2 \ - --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ - --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 - # via -r requirements.in -urllib3==2.2.2 \ - --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ - --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 - # via - # requests - # twine -virtualenv==20.26.3 \ - --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ - --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 - # via nox -wheel==0.43.0 \ - --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ - --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 - # via -r requirements.in -zipp==3.19.2 \ - --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ - --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -setuptools==70.2.0 \ - --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \ - --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1 - # via -r requirements.in From 2e8194f580fde4aba7fda07b0da7dda8bde88daa Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Tue, 11 Nov 2025 07:17:45 -0500 Subject: [PATCH 607/611] chore(python): Add support for Python 3.14 (#644) This PR adds support for Python 3.14 to the library. Key changes include: - Update to `.github/workflows` files to account for Python runtimes both 3.14 and 3.8. - Adding Python 3.14 to the test matrix in `.github/workflows/unittest.yml`, etc. - Updating `.github/sync-repo-settings.yaml` to include 3.14 unit tests in required checks. - Update to `.kokoro/presubmit` files to update the system test and `presubmit.cfg` - Adding `testing/constraints-3.14.txt`. - Updates `CONTRIBUTING.rst` to list Python 3.14 as a supported version. - Updates `mypy.ini` to suppress several known type hinting errors in two files. - Updating `noxfile.py` to include 3.14 sessions. - Updates to `owlbot.py` to include Python 3.14. - Updating `setup.py` to include the Python 3.14 classifier and add conditional dependencies for `grpcio`. --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 3 +- .../.github/sync-repo-settings.yaml | 1 + .../.github/workflows/lint.yml | 2 +- .../.github/workflows/mypy.yml | 2 +- .../.github/workflows/unittest.yml | 4 +- .../.kokoro/presubmit/presubmit.cfg | 5 + .../{system-3.8.cfg => system-3.12.cfg} | 2 +- .../.kokoro/presubmit/system-3.14.cfg | 7 + .../.kokoro/samples/python3.14/common.cfg | 40 +++++ .../.kokoro/samples/python3.14/continuous.cfg | 6 + .../samples/python3.14/periodic-head.cfg | 11 ++ .../.kokoro/samples/python3.14/periodic.cfg | 6 + .../.kokoro/samples/python3.14/presubmit.cfg | 6 + .../google-cloud-datastore/CONTRIBUTING.rst | 4 +- packages/google-cloud-datastore/README.rst | 2 +- packages/google-cloud-datastore/mypy.ini | 12 +- packages/google-cloud-datastore/noxfile.py | 23 ++- packages/google-cloud-datastore/owlbot.py | 154 +----------------- .../samples/snippets/noxfile.py | 2 +- .../snippets/schedule-export/noxfile.py | 2 +- packages/google-cloud-datastore/setup.py | 4 + .../testing/constraints-3.14.txt | 1 + 22 files changed, 131 insertions(+), 168 deletions(-) rename packages/google-cloud-datastore/.kokoro/presubmit/{system-3.8.cfg => system-3.12.cfg} (82%) create mode 100644 packages/google-cloud-datastore/.kokoro/presubmit/system-3.14.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.14/common.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.14/continuous.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.14/periodic-head.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.14/periodic.cfg create mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.14/presubmit.cfg create mode 100644 packages/google-cloud-datastore/testing/constraints-3.14.txt diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml index 8bc6405eca8b..c58733d0c826 100644 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:710b70faff81151657d89db6e028c23a1051787598c8276bdd8eef25c92da8ab -# created: 2025-04-10T17:48:54.829145676Z + digest: sha256:fbbc8db67afd8b7d71bf694c5081a32da0c528eba166fbcffb3b6e56ddf907d5 diff --git a/packages/google-cloud-datastore/.github/sync-repo-settings.yaml b/packages/google-cloud-datastore/.github/sync-repo-settings.yaml index 319f6e4bc1f0..137807f26de7 100644 --- a/packages/google-cloud-datastore/.github/sync-repo-settings.yaml +++ b/packages/google-cloud-datastore/.github/sync-repo-settings.yaml @@ -30,6 +30,7 @@ branchProtectionRules: - 'unit (3.11)' - 'unit (3.12)' - 'unit (3.13)' + - 'unit (3.14)' - 'cover' - 'mypy' # List of explicit permissions to add (additive only) diff --git a/packages/google-cloud-datastore/.github/workflows/lint.yml b/packages/google-cloud-datastore/.github/workflows/lint.yml index 4866193af2a9..3ed755f0005c 100644 --- a/packages/google-cloud-datastore/.github/workflows/lint.yml +++ b/packages/google-cloud-datastore/.github/workflows/lint.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.14" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/google-cloud-datastore/.github/workflows/mypy.yml b/packages/google-cloud-datastore/.github/workflows/mypy.yml index 3915cddd3d1c..8363e7218202 100644 --- a/packages/google-cloud-datastore/.github/workflows/mypy.yml +++ b/packages/google-cloud-datastore/.github/workflows/mypy.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.14" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/google-cloud-datastore/.github/workflows/unittest.yml b/packages/google-cloud-datastore/.github/workflows/unittest.yml index c66b757ced2b..cc6fe2b2fdd5 100644 --- a/packages/google-cloud-datastore/.github/workflows/unittest.yml +++ b/packages/google-cloud-datastore/.github/workflows/unittest.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-22.04 strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] + python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13', '3.14'] steps: - name: Checkout uses: actions/checkout@v4 @@ -45,7 +45,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.14" - name: Install coverage run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/google-cloud-datastore/.kokoro/presubmit/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/presubmit/presubmit.cfg index b158096f0ae2..54593f363cf2 100644 --- a/packages/google-cloud-datastore/.kokoro/presubmit/presubmit.cfg +++ b/packages/google-cloud-datastore/.kokoro/presubmit/presubmit.cfg @@ -5,3 +5,8 @@ env_vars: { key: "RUN_SYSTEM_TESTS" value: "false" } + +env_vars: { + key: "NOX_SESSION" + value: "blacken doctests format" +} diff --git a/packages/google-cloud-datastore/.kokoro/presubmit/system-3.8.cfg b/packages/google-cloud-datastore/.kokoro/presubmit/system-3.12.cfg similarity index 82% rename from packages/google-cloud-datastore/.kokoro/presubmit/system-3.8.cfg rename to packages/google-cloud-datastore/.kokoro/presubmit/system-3.12.cfg index f4bcee3db0f0..78cdc5e85109 100644 --- a/packages/google-cloud-datastore/.kokoro/presubmit/system-3.8.cfg +++ b/packages/google-cloud-datastore/.kokoro/presubmit/system-3.12.cfg @@ -3,5 +3,5 @@ # Only run this nox session. env_vars: { key: "NOX_SESSION" - value: "system-3.8" + value: "system-3.12" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/presubmit/system-3.14.cfg b/packages/google-cloud-datastore/.kokoro/presubmit/system-3.14.cfg new file mode 100644 index 000000000000..86e7c5d7762c --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/presubmit/system-3.14.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "system-3.14" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.14/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.14/common.cfg new file mode 100644 index 000000000000..dcfec354330b --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.14/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.14" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-314" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-datastore/.kokoro/trampoline_v2.sh" diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.14/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.14/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.14/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.14/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.14/periodic-head.cfg new file mode 100644 index 000000000000..714045a75ed7 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.14/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-datastore/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.14/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.14/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.14/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.14/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.14/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-datastore/.kokoro/samples/python3.14/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-datastore/CONTRIBUTING.rst b/packages/google-cloud-datastore/CONTRIBUTING.rst index c59f8503d5f5..0c504970f7fb 100644 --- a/packages/google-cloud-datastore/CONTRIBUTING.rst +++ b/packages/google-cloud-datastore/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10, 3.11, 3.12, 3.13 and 3.14 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -253,6 +253,7 @@ We support: - `Python 3.11`_ - `Python 3.12`_ - `Python 3.13`_ +- `Python 3.14`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ @@ -261,6 +262,7 @@ We support: .. _Python 3.11: https://docs.python.org/3.11/ .. _Python 3.12: https://docs.python.org/3.12/ .. _Python 3.13: https://docs.python.org/3.13/ +.. _Python 3.14: https://docs.python.org/3.14/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/packages/google-cloud-datastore/README.rst b/packages/google-cloud-datastore/README.rst index f458af4acdc6..1ec7e45c1c6e 100644 --- a/packages/google-cloud-datastore/README.rst +++ b/packages/google-cloud-datastore/README.rst @@ -65,7 +65,7 @@ Supported Python Versions Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of Python. -Python >= 3.7 +Python >= 3.7, including 3.14 .. _active: https://devguide.python.org/devcycle/#in-development-main-branch .. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches diff --git a/packages/google-cloud-datastore/mypy.ini b/packages/google-cloud-datastore/mypy.ini index a94e6b3f0f73..5b593704f6e8 100644 --- a/packages/google-cloud-datastore/mypy.ini +++ b/packages/google-cloud-datastore/mypy.ini @@ -1,7 +1,17 @@ [mypy] -python_version = 3.8 +python_version = 3.10 namespace_packages = True ignore_missing_imports = True [mypy-google.cloud.datastore._app_engine_key_pb2] ignore_errors = True + +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): +# Remove once this generator bug is fixed +[mypy-google.cloud.datastore_v1.services.datastore.async_client] +ignore_errors = True + +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): +# Remove once this generator bug is fixed +[mypy-google.cloud.datastore_v1.services.datastore.client] +ignore_errors = True diff --git a/packages/google-cloud-datastore/noxfile.py b/packages/google-cloud-datastore/noxfile.py index 7fcab22046d6..d8151522b7c1 100644 --- a/packages/google-cloud-datastore/noxfile.py +++ b/packages/google-cloud-datastore/noxfile.py @@ -32,7 +32,7 @@ ISORT_VERSION = "isort==5.11.0" LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.8" +DEFAULT_PYTHON_VERSION = "3.14" UNIT_TEST_PYTHON_VERSIONS: List[str] = [ "3.7", @@ -42,6 +42,7 @@ "3.11", "3.12", "3.13", + "3.14", ] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", @@ -58,7 +59,7 @@ UNIT_TEST_EXTRAS: List[str] = [] UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8"] +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.12"] SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ "mock", "pytest", @@ -157,7 +158,7 @@ def mypy(session): @nox.session(python=DEFAULT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") + session.install("docutils", "pygments", "setuptools") session.run("python", "setup.py", "check", "--restructuredtext", "--strict") @@ -197,7 +198,12 @@ def install_unittest_dependencies(session, *constraints): def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and session.python in ( + "3.11", + "3.12", + "3.13", + "3.14", + ): session.skip("cpp implementation is not supported in python 3.11+") constraints_path = str( @@ -415,7 +421,7 @@ def docfx(session): ) -@nox.session(python="3.13") +@nox.session(python="3.14") @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], @@ -423,7 +429,12 @@ def docfx(session): def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and session.python in ( + "3.11", + "3.12", + "3.13", + "3.14", + ): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index cbb0f8aecc53..4638b928ef0a 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -73,18 +73,6 @@ def get_staging_dirs( s.move(library / "scripts") for library in get_staging_dirs(datastore_admin_default_version, "datastore_admin"): - s.replace( - library / "google/**/datastore_admin_client.py", - "google-cloud-datastore-admin", - "google-cloud-datstore", - ) - - # Remove spurious markup - s.replace( - library / "google/**/datastore_admin/client.py", - r"\s+---------------------------------(-)+", - "", - ) s.move(library / f"google/cloud/datastore_admin", excludes=["**/gapic_version.py"]) s.move(library / f"google/cloud/datastore_admin_{library.name}", excludes=["**/gapic_version.py"]) @@ -103,149 +91,15 @@ def get_staging_dirs( unit_test_external_dependencies=["six"], system_test_external_dependencies=["six"], cov_level=100, + unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"], + default_python_version="3.14", + system_test_python_versions=["3.14"], ) s.move( templated_files, - excludes=["docs/multiprocessing.rst", ".coveragerc", ".github/CODEOOWNERS", ".github/release-please.yml"], + excludes=["docs/multiprocessing.rst", ".coveragerc", ".github/CODEOOWNERS", ".github/release-please.yml", ".kokoro/presubmit/system-3.8.cfg", ".kokoro/presubmit/presubmit.cfg"], ) python.py_samples(skip_readmes=True) -python.configure_previous_major_version_branches() - -# Preserve system tests w/ GOOGLE_DISABLE_GRPC set (#133, PR #136) -assert 1 == s.replace( - "noxfile.py", - r"""\ -@nox.session\(python=SYSTEM_TEST_PYTHON_VERSIONS\) -def system\(session\): -""", - """\ -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -@nox.parametrize("disable_grpc", [False, True]) -def system(session, disable_grpc): -""", -) - -assert 1 == s.replace( - "noxfile.py", - """\ - # Run py.test against the system tests. -""", - """\ - env = {} - if disable_grpc: - env["GOOGLE_CLOUD_DISABLE_GRPC"] = "True" - - # Run py.test against the system tests. -""", -) - -assert 1 == s.replace( - "noxfile.py", - """system_test_path, - \*session.posargs, - \)""", - """system_test_path, - env=env, - *session.posargs, - )""", -) - -assert 1 == s.replace( - "noxfile.py", - """system_test_folder_path, - \*session.posargs, - \)""", - """system_test_folder_path, - env=env, - *session.posargs, - )""", -) - -# Add nox session to exercise doctests -assert 1 == s.replace( - "noxfile.py", - r"""\ - "blacken", - "docs", -""", - """\ - "blacken", - "docs", - "doctests", -""", -) - -assert 1 == s.replace( - "noxfile.py", - r"""\ -@nox.session\(python="3.10"\) -def docfx\(session\): -""", - """\ -@nox.session(python="3.9") -def doctests(session): - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install("mock", "pytest", "sphinx", "google-cloud-testutils") - session.install("-e", ".") - - # Run py.test against the system tests. - session.run("py.test", "tests/doctests.py") - - -@nox.session(python="3.10") -def docfx(session): -""", -) - -# Work around: https://github.com/googleapis/gapic-generator-python/issues/689 -s.replace( - [ - "google/**/datastore_admin/async_client.py", - "google/**/datastore_admin/client.py", - "google/**/types/datastore_admin.py", - ], - r"Sequence\[.*\.LabelsEntry\]", - r"Dict[str, str]", -) - -# add type checker nox session -s.replace( - "noxfile.py", - """nox.options.sessions = \[ - "unit", - "system",""", - """nox.options.sessions = [ - "unit", - "system", - "mypy",""", -) - - -s.replace( - "noxfile.py", - """\ -@nox.session\(python=DEFAULT_PYTHON_VERSION\) -def lint_setup_py\(session\): -""", - '''\ -@nox.session(python=DEFAULT_PYTHON_VERSION) -def mypy(session): - """Verify type hints are mypy compatible.""" - session.install("-e", ".") - # Exclude types-protobuf==4.24.0.20240106 - # See https://github.com/python/typeshed/issues/11254 - session.install( - "mypy", "types-setuptools", "types-mock", "types-protobuf!=4.24.0.20240106", "types-requests" - ) - session.run("mypy", "-p", "google.cloud.datastore") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): -''', -) - s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/packages/google-cloud-datastore/samples/snippets/noxfile.py b/packages/google-cloud-datastore/samples/snippets/noxfile.py index a169b5b5b464..69bcaf56de6f 100644 --- a/packages/google-cloud-datastore/samples/snippets/noxfile.py +++ b/packages/google-cloud-datastore/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py b/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py index a169b5b5b464..69bcaf56de6f 100644 --- a/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py +++ b/packages/google-cloud-datastore/samples/snippets/schedule-export/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/packages/google-cloud-datastore/setup.py b/packages/google-cloud-datastore/setup.py index cc91b0696512..3d0917ee7ade 100644 --- a/packages/google-cloud-datastore/setup.py +++ b/packages/google-cloud-datastore/setup.py @@ -38,6 +38,8 @@ "proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'", "proto-plus >= 1.25.0, <2.0.0; python_version>='3.13'", "protobuf>=3.20.2,<7.0.0,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpcio >= 1.38.0, < 2.0.0", + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", ] extras = {"libcst": "libcst >= 0.2.5"} @@ -84,6 +86,8 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Operating System :: OS Independent", "Topic :: Internet", "Topic :: Software Development :: Libraries :: Python Modules", diff --git a/packages/google-cloud-datastore/testing/constraints-3.14.txt b/packages/google-cloud-datastore/testing/constraints-3.14.txt new file mode 100644 index 000000000000..35395af59f20 --- /dev/null +++ b/packages/google-cloud-datastore/testing/constraints-3.14.txt @@ -0,0 +1 @@ +grpcio >= 1.75.1 \ No newline at end of file From cf9622471fbee41f0e997fe4ce90a515d3899100 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 11 Nov 2025 04:33:19 -0800 Subject: [PATCH 608/611] chore: Update gapic-generator-python to 1.26.2 (#642) - [ ] Regenerate this pull request now. fix: Deprecate credentials_file argument chore: Update gapic-generator-python to 1.28.0 PiperOrigin-RevId: 816753840 Source-Link: https://github.com/googleapis/googleapis/commit/d06cf27a47074d1de3fde6f0ca48680a96229306 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a524e7310882bbb99bfe1399b18bed328979211c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTUyNGU3MzEwODgyYmJiOTliZmUxMzk5YjE4YmVkMzI4OTc5MjExYyJ9 BEGIN_NESTED_COMMIT chore: Update gapic-generator-python to 1.26.2 PiperOrigin-RevId: 802200836 Source-Link: https://github.com/googleapis/googleapis/commit/d300b151a973ce0425ae4ad07b3de957ca31bec6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a1ff0ae72ddcb68a259215d8c77661e2cdbb9b02 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTFmZjBhZTcyZGRjYjY4YTI1OTIxNWQ4Yzc3NjYxZTJjZGJiOWIwMiJ9 END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../datastore_admin/transports/base.py | 5 +- .../datastore_admin/transports/grpc.py | 8 +- .../transports/grpc_asyncio.py | 8 +- .../datastore_admin/transports/rest.py | 5 +- .../cloud/datastore_admin_v1/types/index.py | 2 +- .../services/datastore/async_client.py | 8 +- .../datastore_v1/services/datastore/client.py | 8 +- .../services/datastore/transports/base.py | 5 +- .../services/datastore/transports/grpc.py | 8 +- .../datastore/transports/grpc_asyncio.py | 8 +- .../services/datastore/transports/rest.py | 5 +- .../datastore_v1/types/aggregation_result.py | 2 +- .../cloud/datastore_v1/types/datastore.py | 8 +- .../google/cloud/datastore_v1/types/entity.py | 12 +- .../google/cloud/datastore_v1/types/query.py | 105 +++++++++--------- 15 files changed, 104 insertions(+), 93 deletions(-) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index 2dce9b112210..6467d4f171e3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -73,9 +73,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index 41b47b3f09d7..81414590c735 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -207,9 +207,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -343,9 +344,10 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index 5ba9ac526762..8385c51199d3 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -204,8 +204,9 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -256,9 +257,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py index a87e0d04f427..1c3397b826a9 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py @@ -616,9 +616,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py index ef74f8968a2c..be2168b0c552 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/types/index.py @@ -48,7 +48,7 @@ class Index(proto.Message): Requires: - - A maximum of 100 properties. + - A maximum of 100 properties. state (google.cloud.datastore_admin_v1.types.Index.State): Output only. The state of the index. """ diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py index bf2dad03754f..98226fa9cf83 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/async_client.py @@ -806,10 +806,10 @@ async def sample_commit(): sequences of mutations affecting a single entity are not permitted in a single ``Commit`` request: - - ``insert`` followed by ``insert`` - - ``update`` followed by ``insert`` - - ``upsert`` followed by ``insert`` - - ``delete`` followed by ``update`` + - ``insert`` followed by ``insert`` + - ``update`` followed by ``insert`` + - ``upsert`` followed by ``insert`` + - ``delete`` followed by ``update`` When mode is ``NON_TRANSACTIONAL``, no two mutations may affect a single entity. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py index f6c1f998f513..a491e16f06b2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/client.py @@ -1201,10 +1201,10 @@ def sample_commit(): sequences of mutations affecting a single entity are not permitted in a single ``Commit`` request: - - ``insert`` followed by ``insert`` - - ``update`` followed by ``insert`` - - ``upsert`` followed by ``insert`` - - ``delete`` followed by ``update`` + - ``insert`` followed by ``insert`` + - ``update`` followed by ``insert`` + - ``upsert`` followed by ``insert`` + - ``delete`` followed by ``update`` When mode is ``NON_TRANSACTIONAL``, no two mutations may affect a single entity. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py index dbf3fb22e21a..631db6d1098b 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -71,9 +71,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py index 7c3f8bf3915b..d1b4417f7e80 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -158,9 +158,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -293,9 +294,10 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index e6749231d197..3fca948dbaf7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -155,8 +155,9 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -207,9 +208,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py index e01295ade8e9..a74b50884727 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/services/datastore/transports/rest.py @@ -673,9 +673,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py index 2dbfb36a1819..35d216a96e54 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/aggregation_result.py @@ -69,7 +69,7 @@ class AggregationResultBatch(proto.Message): The aggregation results for this batch. more_results (google.cloud.datastore_v1.types.QueryResultBatch.MoreResultsType): The state of the query after the current batch. Only - COUNT(*) aggregations are supported in the initial launch. + COUNT(\*) aggregations are supported in the initial launch. Therefore, expected result type is limited to ``NO_MORE_RESULTS``. read_time (google.protobuf.timestamp_pb2.Timestamp): diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py index 6f74989bac8a..5f6e053f9fbb 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/datastore.py @@ -564,10 +564,10 @@ class CommitRequest(proto.Message): mutations affecting a single entity are not permitted in a single ``Commit`` request: - - ``insert`` followed by ``insert`` - - ``update`` followed by ``insert`` - - ``upsert`` followed by ``insert`` - - ``delete`` followed by ``update`` + - ``insert`` followed by ``insert`` + - ``update`` followed by ``insert`` + - ``upsert`` followed by ``insert`` + - ``delete`` followed by ``update`` When mode is ``NON_TRANSACTIONAL``, no two mutations may affect a single entity. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py index 7a3320ced472..59e38c6305c7 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/entity.py @@ -46,12 +46,12 @@ class PartitionId(proto.Message): Partition dimensions: - - May be ``""``. - - Must be valid UTF-8 bytes. - - Must have values that match regex ``[A-Za-z\d\.\-_]{1,100}`` If - the value of any dimension matches regex ``__.*__``, the - partition is reserved/read-only. A reserved/read-only partition - ID is forbidden in certain documented contexts. + - May be ``""``. + - Must be valid UTF-8 bytes. + - Must have values that match regex ``[A-Za-z\d\.\-_]{1,100}`` If + the value of any dimension matches regex ``__.*__``, the partition + is reserved/read-only. A reserved/read-only partition ID is + forbidden in certain documented contexts. Foreign partition IDs (in which the project ID does not match the context project ID ) are discouraged. Reads and writes of foreign diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py index 7f2da72b47f3..69c19aec5dd4 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/types/query.py @@ -164,9 +164,9 @@ class Query(proto.Message): Requires: - - If ``order`` is specified, the set of distinct on - properties must appear before the non-distinct on - properties in ``order``. + - If ``order`` is specified, the set of distinct on + properties must appear before the non-distinct on + properties in ``order``. start_cursor (bytes): A starting point for the query results. Query cursors are returned in query result batches and `can only be used to @@ -262,8 +262,8 @@ class AggregationQuery(proto.Message): Requires: - - A minimum of one and maximum of five aggregations per - query. + - A minimum of one and maximum of five aggregations per + query. """ class Aggregation(proto.Message): @@ -323,9 +323,9 @@ class Aggregation(proto.Message): Requires: - - Must be unique across all aggregation aliases. - - Conform to [entity property - name][google.datastore.v1.Entity.properties] limitations. + - Must be unique across all aggregation aliases. + - Conform to [entity property + name][google.datastore.v1.Entity.properties] limitations. """ class Count(proto.Message): @@ -355,7 +355,7 @@ class Count(proto.Message): Requires: - - Must be non-negative when present. + - Must be non-negative when present. """ up_to: wrappers_pb2.Int64Value = proto.Field( @@ -367,26 +367,26 @@ class Count(proto.Message): class Sum(proto.Message): r"""Sum of the values of the requested property. - - Only numeric values will be aggregated. All non-numeric values - including ``NULL`` are skipped. + - Only numeric values will be aggregated. All non-numeric values + including ``NULL`` are skipped. - - If the aggregated values contain ``NaN``, returns ``NaN``. - Infinity math follows IEEE-754 standards. + - If the aggregated values contain ``NaN``, returns ``NaN``. + Infinity math follows IEEE-754 standards. - - If the aggregated value set is empty, returns 0. + - If the aggregated value set is empty, returns 0. - - Returns a 64-bit integer if all aggregated numbers are integers - and the sum result does not overflow. Otherwise, the result is - returned as a double. Note that even if all the aggregated values - are integers, the result is returned as a double if it cannot fit - within a 64-bit signed integer. When this occurs, the returned - value will lose precision. + - Returns a 64-bit integer if all aggregated numbers are integers + and the sum result does not overflow. Otherwise, the result is + returned as a double. Note that even if all the aggregated values + are integers, the result is returned as a double if it cannot fit + within a 64-bit signed integer. When this occurs, the returned + value will lose precision. - - When underflow occurs, floating-point aggregation is - non-deterministic. This means that running the same query - repeatedly without any changes to the underlying values could - produce slightly different results each time. In those cases, - values should be stored as integers over floating-point numbers. + - When underflow occurs, floating-point aggregation is + non-deterministic. This means that running the same query + repeatedly without any changes to the underlying values could + produce slightly different results each time. In those cases, + values should be stored as integers over floating-point numbers. Attributes: property (google.cloud.datastore_v1.types.PropertyReference): @@ -402,15 +402,15 @@ class Sum(proto.Message): class Avg(proto.Message): r"""Average of the values of the requested property. - - Only numeric values will be aggregated. All non-numeric values - including ``NULL`` are skipped. + - Only numeric values will be aggregated. All non-numeric values + including ``NULL`` are skipped. - - If the aggregated values contain ``NaN``, returns ``NaN``. - Infinity math follows IEEE-754 standards. + - If the aggregated values contain ``NaN``, returns ``NaN``. + Infinity math follows IEEE-754 standards. - - If the aggregated value set is empty, returns ``NULL``. + - If the aggregated value set is empty, returns ``NULL``. - - Always returns the result as a double. + - Always returns the result as a double. Attributes: property (google.cloud.datastore_v1.types.PropertyReference): @@ -482,9 +482,9 @@ class PropertyReference(proto.Message): Requires: - - MUST be a dot-delimited (``.``) string of segments, where - each segment conforms to [entity property - name][google.datastore.v1.Entity.properties] limitations. + - MUST be a dot-delimited (``.``) string of segments, where + each segment conforms to [entity property + name][google.datastore.v1.Entity.properties] limitations. """ name: str = proto.Field( @@ -592,7 +592,7 @@ class CompositeFilter(proto.Message): Requires: - - At least one filter is present. + - At least one filter is present. """ class Operator(proto.Enum): @@ -647,27 +647,27 @@ class Operator(proto.Enum): Requires: - - That ``property`` comes first in ``order_by``. + - That ``property`` comes first in ``order_by``. LESS_THAN_OR_EQUAL (2): The given ``property`` is less than or equal to the given ``value``. Requires: - - That ``property`` comes first in ``order_by``. + - That ``property`` comes first in ``order_by``. GREATER_THAN (3): The given ``property`` is greater than the given ``value``. Requires: - - That ``property`` comes first in ``order_by``. + - That ``property`` comes first in ``order_by``. GREATER_THAN_OR_EQUAL (4): The given ``property`` is greater than or equal to the given ``value``. Requires: - - That ``property`` comes first in ``order_by``. + - That ``property`` comes first in ``order_by``. EQUAL (5): The given ``property`` is equal to the given ``value``. IN (6): @@ -676,36 +676,35 @@ class Operator(proto.Enum): Requires: - - That ``value`` is a non-empty ``ArrayValue``, subject to - disjunction limits. - - No ``NOT_IN`` is in the same query. + - That ``value`` is a non-empty ``ArrayValue``, subject to + disjunction limits. + - No ``NOT_IN`` is in the same query. NOT_EQUAL (9): The given ``property`` is not equal to the given ``value``. Requires: - - No other ``NOT_EQUAL`` or ``NOT_IN`` is in the same - query. - - That ``property`` comes first in the ``order_by``. + - No other ``NOT_EQUAL`` or ``NOT_IN`` is in the same query. + - That ``property`` comes first in the ``order_by``. HAS_ANCESTOR (11): Limit the result set to the given entity and its descendants. Requires: - - That ``value`` is an entity key. - - All evaluated disjunctions must have the same - ``HAS_ANCESTOR`` filter. + - That ``value`` is an entity key. + - All evaluated disjunctions must have the same + ``HAS_ANCESTOR`` filter. NOT_IN (13): The value of the ``property`` is not in the given array. Requires: - - That ``value`` is a non-empty ``ArrayValue`` with at most - 10 values. - - No other ``OR``, ``IN``, ``NOT_IN``, ``NOT_EQUAL`` is in - the same query. - - That ``field`` comes first in the ``order_by``. + - That ``value`` is a non-empty ``ArrayValue`` with at most + 10 values. + - No other ``OR``, ``IN``, ``NOT_IN``, ``NOT_EQUAL`` is in + the same query. + - That ``field`` comes first in the ``order_by``. """ OPERATOR_UNSPECIFIED = 0 LESS_THAN = 1 From 48cfa3b153539f77f594ff4bd8e1fe531da75de7 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Thu, 13 Nov 2025 07:56:02 -0800 Subject: [PATCH 609/611] chore(librarian): onboard to librarian (#646) This PR onboards `google-cloud-datastore` to the librarian system. Towards https://github.com/googleapis/librarian/issues/2458 --------- Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 16 - .../.github/.OwlBot.yaml | 28 - .../.github/auto-approve.yml | 3 - .../.github/release-please.yml | 12 - .../.github/release-trigger.yml | 2 - .../.github/sync-repo-settings.yaml | 49 -- .../generator-input/.repo-metadata.json | 17 + .../.librarian/generator-input/noxfile.py | 535 ++++++++++++++++++ .../.librarian/generator-input/owlbot.py | 65 +++ .../.librarian/generator-input/setup.py | 106 ++++ .../.librarian/state.yaml | 36 ++ .../google-cloud-datastore/docs/README.rst | 202 ++++++- .../cloud/datastore_admin/gapic_version.py | 2 +- .../cloud/datastore_admin_v1/gapic_version.py | 5 +- .../cloud/datastore_v1/gapic_version.py | 5 +- packages/google-cloud-datastore/owlbot.py | 52 +- 16 files changed, 973 insertions(+), 162 deletions(-) delete mode 100644 packages/google-cloud-datastore/.github/.OwlBot.lock.yaml delete mode 100644 packages/google-cloud-datastore/.github/.OwlBot.yaml delete mode 100644 packages/google-cloud-datastore/.github/auto-approve.yml delete mode 100644 packages/google-cloud-datastore/.github/release-please.yml delete mode 100644 packages/google-cloud-datastore/.github/release-trigger.yml delete mode 100644 packages/google-cloud-datastore/.github/sync-repo-settings.yaml create mode 100644 packages/google-cloud-datastore/.librarian/generator-input/.repo-metadata.json create mode 100644 packages/google-cloud-datastore/.librarian/generator-input/noxfile.py create mode 100644 packages/google-cloud-datastore/.librarian/generator-input/owlbot.py create mode 100644 packages/google-cloud-datastore/.librarian/generator-input/setup.py create mode 100644 packages/google-cloud-datastore/.librarian/state.yaml mode change 120000 => 100644 packages/google-cloud-datastore/docs/README.rst diff --git a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml b/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml deleted file mode 100644 index c58733d0c826..000000000000 --- a/packages/google-cloud-datastore/.github/.OwlBot.lock.yaml +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:fbbc8db67afd8b7d71bf694c5081a32da0c528eba166fbcffb3b6e56ddf907d5 diff --git a/packages/google-cloud-datastore/.github/.OwlBot.yaml b/packages/google-cloud-datastore/.github/.OwlBot.yaml deleted file mode 100644 index 41eaf2b934f9..000000000000 --- a/packages/google-cloud-datastore/.github/.OwlBot.yaml +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - -deep-remove-regex: - - /owl-bot-staging - -deep-copy-regex: - - source: /google/datastore/(v.*)/.*-py/(.*) - dest: /owl-bot-staging/datastore/$1/$2 - - source: /google/datastore/admin/(v.*)/.*-py/(.*) - dest: /owl-bot-staging/datastore_admin/$1/$2 - -begin-after-commit-hash: de97bb0aeade880aba2cd71a55c06dbc4cd2b337 - diff --git a/packages/google-cloud-datastore/.github/auto-approve.yml b/packages/google-cloud-datastore/.github/auto-approve.yml deleted file mode 100644 index 311ebbb853a9..000000000000 --- a/packages/google-cloud-datastore/.github/auto-approve.yml +++ /dev/null @@ -1,3 +0,0 @@ -# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve -processes: - - "OwlBotTemplateChanges" diff --git a/packages/google-cloud-datastore/.github/release-please.yml b/packages/google-cloud-datastore/.github/release-please.yml deleted file mode 100644 index fe749ff6b15d..000000000000 --- a/packages/google-cloud-datastore/.github/release-please.yml +++ /dev/null @@ -1,12 +0,0 @@ -releaseType: python -handleGHRelease: true -manifest: true -# NOTE: this section is generated by synthtool.languages.python -# See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py -branches: -- branch: v1 - handleGHRelease: true - releaseType: python -- branch: v0 - handleGHRelease: true - releaseType: python diff --git a/packages/google-cloud-datastore/.github/release-trigger.yml b/packages/google-cloud-datastore/.github/release-trigger.yml deleted file mode 100644 index 932fbdd05682..000000000000 --- a/packages/google-cloud-datastore/.github/release-trigger.yml +++ /dev/null @@ -1,2 +0,0 @@ -enabled: true -multiScmName: python-datastore diff --git a/packages/google-cloud-datastore/.github/sync-repo-settings.yaml b/packages/google-cloud-datastore/.github/sync-repo-settings.yaml deleted file mode 100644 index 137807f26de7..000000000000 --- a/packages/google-cloud-datastore/.github/sync-repo-settings.yaml +++ /dev/null @@ -1,49 +0,0 @@ -# Rules for main branch protection -branchProtectionRules: -# Identifies the protection rule pattern. Name of the branch to be protected. -# Defaults to `main` -- pattern: main - # Can admins overwrite branch protection. - # Defaults to `true` - isAdminEnforced: true - # Number of approving reviews required to update matching branches. - # Defaults to `1` - requiredApprovingReviewCount: 1 - # Are reviews from code owners required to update matching branches. - # Defaults to `false` - requiresCodeOwnerReviews: true - # Require up to date branches - requiresStrictStatusChecks: true - # List of required status check contexts that must pass for commits to be accepted to matching branches. - requiredStatusCheckContexts: - - 'Kokoro' - - 'Kokoro system-3.8' - - 'cla/google' - - 'OwlBot Post Processor' - - 'docs' - - 'docfx' - - 'lint' - - 'unit (3.7)' - - 'unit (3.8)' - - 'unit (3.9)' - - 'unit (3.10)' - - 'unit (3.11)' - - 'unit (3.12)' - - 'unit (3.13)' - - 'unit (3.14)' - - 'cover' - - 'mypy' -# List of explicit permissions to add (additive only) -permissionRules: - # Team slug to add to repository permissions - - team: yoshi-admins - # Access level required, one of push|pull|admin|maintain|triage - permission: admin - # Team slug to add to repository permissions - - team: yoshi-python-admins - # Access level required, one of push|pull|admin|maintain|triage - permission: admin - # Team slug to add to repository permissions - - team: yoshi-python - # Access level required, one of push|pull|admin|maintain|triage - permission: push diff --git a/packages/google-cloud-datastore/.librarian/generator-input/.repo-metadata.json b/packages/google-cloud-datastore/.librarian/generator-input/.repo-metadata.json new file mode 100644 index 000000000000..d7b18d4a99cf --- /dev/null +++ b/packages/google-cloud-datastore/.librarian/generator-input/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "datastore", + "name_pretty": "Google Cloud Datastore", + "product_documentation": "https://cloud.google.com/datastore", + "client_documentation": "https://cloud.google.com/python/docs/reference/datastore/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559768", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_COMBO", + "repo": "googleapis/python-datastore", + "distribution_name": "google-cloud-datastore", + "api_id": "datastore.googleapis.com", + "default_version": "v1", + "codeowner_team": "@googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk @googleapis/api-firestore-partners", + "api_shortname": "datastore", + "api_description": "is a fully managed, schemaless database for\nstoring non-relational data. Cloud Datastore automatically scales with\nyour users and supports ACID transactions, high availability of reads and\nwrites, strong consistency for reads and ancestor queries, and eventual\nconsistency for all other queries." +} diff --git a/packages/google-cloud-datastore/.librarian/generator-input/noxfile.py b/packages/google-cloud-datastore/.librarian/generator-input/noxfile.py new file mode 100644 index 000000000000..d8151522b7c1 --- /dev/null +++ b/packages/google-cloud-datastore/.librarian/generator-input/noxfile.py @@ -0,0 +1,535 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +FLAKE8_VERSION = "flake8==6.1.0" +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.14" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", + "3.14", +] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ + "six", +] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ + "six", +] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +nox.options.sessions = [ + "unit", + "system", + "mypy", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", + "doctests", + "docfx", + "format", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install(FLAKE8_VERSION, BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def mypy(session): + """Verify type hints are mypy compatible.""" + session.install("-e", ".") + # Exclude types-protobuf==4.24.0.20240106 + # See https://github.com/python/typeshed/issues/11254 + session.install( + "mypy", + "types-setuptools", + "types-mock", + "types-protobuf!=4.24.0.20240106", + "types-requests", + ) + session.run("mypy", "-p", "google.cloud.datastore") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments", "setuptools") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ( + "3.11", + "3.12", + "3.13", + "3.14", + ): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +@nox.parametrize("disable_grpc", [False, True]) +def system(session, disable_grpc): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + env = {} + if disable_grpc: + env["GOOGLE_CLOUD_DISABLE_GRPC"] = "True" + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + env=env, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + env=env, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.9") +def doctests(session): + # Install all test dependencies, then install this package into the + # virtualenv's dist-packages. + session.install("mock", "pytest", "sphinx", "google-cloud-testutils") + session.install("-e", ".") + + # Run py.test against the system tests. + session.run("py.test", "tests/doctests.py") + + +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.14") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ( + "3.11", + "3.12", + "3.13", + "3.14", + ): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-datastore/.librarian/generator-input/owlbot.py b/packages/google-cloud-datastore/.librarian/generator-input/owlbot.py new file mode 100644 index 000000000000..fe1e88c4bcdd --- /dev/null +++ b/packages/google-cloud-datastore/.librarian/generator-input/owlbot.py @@ -0,0 +1,65 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This script is used to synthesize generated parts of this library.""" +from pathlib import Path +from typing import List, Optional + +import synthtool as s +from synthtool import gcp +from synthtool.languages import python + +common = gcp.CommonTemplates() + +# This library ships clients for two different APIs, +# Datastore and Datastore Admin +datastore_default_version = "v1" +datastore_admin_default_version = "v1" + +for library in s.get_staging_dirs(datastore_default_version): + s.move(library / f"google/cloud/datastore_{library.name}") + s.move(library / "tests/") + s.move(library / "scripts") + +for library in s.get_staging_dirs(datastore_admin_default_version): + + s.move(library / f"google/cloud/datastore_admin") + s.move(library / f"google/cloud/datastore_admin_{library.name}") + s.move(library / "tests") + s.move(library / "scripts") + +s.remove_staging_dirs() + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- +templated_files = common.py_library( + microgenerator=True, + split_system_tests=True, + # six required by (but not installed by) google-cloud-core < v2.0.0 + unit_test_external_dependencies=["six"], + system_test_external_dependencies=["six"], + cov_level=100, + unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"], + default_python_version="3.14", + system_test_python_versions=["3.14"], +) +s.move( + templated_files, + excludes=["docs/multiprocessing.rst", ".coveragerc", ".github/**", ".kokoro/**"], +) + +python.py_samples(skip_readmes=True) + +s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/packages/google-cloud-datastore/.librarian/generator-input/setup.py b/packages/google-cloud-datastore/.librarian/generator-input/setup.py new file mode 100644 index 000000000000..3d0917ee7ade --- /dev/null +++ b/packages/google-cloud-datastore/.librarian/generator-input/setup.py @@ -0,0 +1,106 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import os + +import setuptools + + +# Package metadata. + +name = "google-cloud-datastore" +description = "Google Cloud Datastore API client library" + +# Should be one of: +# 'Development Status :: 3 - Alpha' +# 'Development Status :: 4 - Beta' +# 'Development Status :: 5 - Production/Stable' +release_status = "Development Status :: 5 - Production/Stable" +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "google-cloud-core >= 1.4.0, <3.0.0", + "proto-plus >= 1.22.0, <2.0.0", + "proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'", + "proto-plus >= 1.25.0, <2.0.0; python_version>='3.13'", + "protobuf>=3.20.2,<7.0.0,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpcio >= 1.38.0, < 2.0.0", + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", +] +extras = {"libcst": "libcst >= 0.2.5"} + + +# Setup boilerplate below this line. + +package_root = os.path.abspath(os.path.dirname(__file__)) + +version = {} +with open(os.path.join(package_root, "google/cloud/datastore/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +# Only include packages under the 'google' namespace. Do not include tests, +# benchmarks, etc. +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url="https://github.com/googleapis/python-datastore", + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Operating System :: OS Independent", + "Topic :: Internet", + "Topic :: Software Development :: Libraries :: Python Modules", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + install_requires=dependencies, + extras_require=extras, + python_requires=">=3.7", + scripts=[ + "scripts/fixup_datastore_v1_keywords.py", + "scripts/fixup_datastore_admin_v1_keywords.py", + ], + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-datastore/.librarian/state.yaml b/packages/google-cloud-datastore/.librarian/state.yaml new file mode 100644 index 000000000000..77454d38b539 --- /dev/null +++ b/packages/google-cloud-datastore/.librarian/state.yaml @@ -0,0 +1,36 @@ +image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:ce48ed695c727f7e13efd1fd68f466a55a0d772c87b69158720cec39965bc8b2 +libraries: + - id: google-cloud-datastore + version: 2.21.0 + last_generated_commit: 659ea6e98acc7d58661ce2aa7b4cf76a7ef3fd42 + apis: + - path: google/datastore/v1 + service_config: datastore_v1.yaml + - path: google/datastore/admin/v1 + service_config: datastore_v1.yaml + source_roots: + - . + preserve_regex: [] + remove_regex: + - ^google/cloud/datastore_v1 + - ^google/cloud/datastore_admin + - ^google/cloud/datastore_admin_v1 + - ^tests/unit/gapic + - ^tests/__init__.py + - ^tests/unit/__init__.py + - ^.pre-commit-config.yaml + - ^.repo-metadata.json + - ^.trampolinerc + - ^SECURITY.md + - ^noxfile.py + - ^owlbot.py + - ^renovate.json + - ^samples/AUTHORING_GUIDE.md + - ^samples/CONTRIBUTING.md + - ^samples/generated_samples + - ^scripts/fixup_ + - ^setup.py + - ^README.rst + - ^docs/README.rst + - ^docs/summary_overview.md + tag_format: v{version} diff --git a/packages/google-cloud-datastore/docs/README.rst b/packages/google-cloud-datastore/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/packages/google-cloud-datastore/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/packages/google-cloud-datastore/docs/README.rst b/packages/google-cloud-datastore/docs/README.rst new file mode 100644 index 000000000000..1ec7e45c1c6e --- /dev/null +++ b/packages/google-cloud-datastore/docs/README.rst @@ -0,0 +1,201 @@ +Python Client for Google Cloud Datastore API +============================================ + +|stable| |pypi| |versions| + +`Google Cloud Datastore API`_: is a fully managed, schemaless database for +storing non-relational data. Cloud Datastore automatically scales with +your users and supports ACID transactions, high availability of reads and +writes, strong consistency for reads and ancestor queries, and eventual +consistency for all other queries. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-datastore.svg + :target: https://pypi.org/project/google-cloud-datastore/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-datastore.svg + :target: https://pypi.org/project/google-cloud-datastore/ +.. _Google Cloud Datastore API: https://cloud.google.com/datastore +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/datastore/latest/summary_overview +.. _Product Documentation: https://cloud.google.com/datastore + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Cloud Datastore API.`_ +4. `Set up Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Cloud Datastore API.: https://cloud.google.com/datastore +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/python-datastore/tree/main/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7, including 3.14 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-datastore + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-datastore + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Google Cloud Datastore API + to see other available methods on the client. +- Read the `Google Cloud Datastore API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Google Cloud Datastore API Product documentation: https://cloud.google.com/datastore +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + +Environment-Based Examples +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + +Code-Based Examples +^^^^^^^^^^^^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud import library_v1 + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud import library_v1 + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py index e546bae0531e..b5f2eaf6ced2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py index 6e29ec5f3feb..b5f2eaf6ced2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_admin_v1/gapic_version.py @@ -1,4 +1,5 @@ -# Copyright 2022 Google LLC +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,5 +12,5 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +# __version__ = "2.21.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py index 6e29ec5f3feb..b5f2eaf6ced2 100644 --- a/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py +++ b/packages/google-cloud-datastore/google/cloud/datastore_v1/gapic_version.py @@ -1,4 +1,5 @@ -# Copyright 2022 Google LLC +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,5 +12,5 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +# __version__ = "2.21.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py index 4638b928ef0a..fe1e88c4bcdd 100644 --- a/packages/google-cloud-datastore/owlbot.py +++ b/packages/google-cloud-datastore/owlbot.py @@ -22,60 +22,20 @@ common = gcp.CommonTemplates() - -# This is a customized version of the s.get_staging_dirs() function from synthtool to -# cater for copying 2 different folders from googleapis-gen -# which are datastore and datastore/admin -# Source https://github.com/googleapis/synthtool/blob/master/synthtool/transforms.py#L280 -def get_staging_dirs( - default_version: Optional[str] = None, sub_directory: Optional[str] = None -) -> List[Path]: - """Returns the list of directories, one per version, copied from - https://github.com/googleapis/googleapis-gen. Will return in lexical sorting - order with the exception of the default_version which will be last (if specified). - Args: - default_version (str): the default version of the API. The directory for this version - will be the last item in the returned list if specified. - sub_directory (str): if a `sub_directory` is provided, only the directories within the - specified `sub_directory` will be returned. - Returns: the empty list if no file were copied. - """ - - staging = Path("owl-bot-staging") - - if sub_directory: - staging /= sub_directory - - if staging.is_dir(): - # Collect the subdirectories of the staging directory. - versions = [v.name for v in staging.iterdir() if v.is_dir()] - # Reorder the versions so the default version always comes last. - versions = [v for v in versions if v != default_version] - versions.sort() - if default_version is not None: - versions += [default_version] - dirs = [staging / v for v in versions] - for dir in dirs: - s._tracked_paths.add(dir) - return dirs - else: - return [] - - # This library ships clients for two different APIs, # Datastore and Datastore Admin datastore_default_version = "v1" datastore_admin_default_version = "v1" -for library in get_staging_dirs(datastore_default_version, "datastore"): - s.move(library / f"google/cloud/datastore_{library.name}", excludes=["**/gapic_version.py"]) +for library in s.get_staging_dirs(datastore_default_version): + s.move(library / f"google/cloud/datastore_{library.name}") s.move(library / "tests/") s.move(library / "scripts") -for library in get_staging_dirs(datastore_admin_default_version, "datastore_admin"): +for library in s.get_staging_dirs(datastore_admin_default_version): - s.move(library / f"google/cloud/datastore_admin", excludes=["**/gapic_version.py"]) - s.move(library / f"google/cloud/datastore_admin_{library.name}", excludes=["**/gapic_version.py"]) + s.move(library / f"google/cloud/datastore_admin") + s.move(library / f"google/cloud/datastore_admin_{library.name}") s.move(library / "tests") s.move(library / "scripts") @@ -97,7 +57,7 @@ def get_staging_dirs( ) s.move( templated_files, - excludes=["docs/multiprocessing.rst", ".coveragerc", ".github/CODEOOWNERS", ".github/release-please.yml", ".kokoro/presubmit/system-3.8.cfg", ".kokoro/presubmit/presubmit.cfg"], + excludes=["docs/multiprocessing.rst", ".coveragerc", ".github/**", ".kokoro/**"], ) python.py_samples(skip_readmes=True) From f3d89f078e878934e2dc8e215ac094221f9534f1 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 24 Nov 2025 22:37:24 +0000 Subject: [PATCH 610/611] Trigger owlbot post-processor --- .../google-cloud-datastore/google-cloud-datastore.txt | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 owl-bot-staging/google-cloud-datastore/google-cloud-datastore/google-cloud-datastore.txt diff --git a/owl-bot-staging/google-cloud-datastore/google-cloud-datastore/google-cloud-datastore.txt b/owl-bot-staging/google-cloud-datastore/google-cloud-datastore/google-cloud-datastore.txt new file mode 100644 index 000000000000..e69de29bb2d1 From 3bcd9397ed071a164a943edab1385a564465968f Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 24 Nov 2025 22:37:33 +0000 Subject: [PATCH 611/611] build: google-cloud-datastore migration: adjust owlbot-related files --- .../google-cloud-datastore/.github/CODEOWNERS | 12 - .../.github/CONTRIBUTING.md | 28 - .../.github/ISSUE_TEMPLATE/bug_report.md | 43 -- .../.github/ISSUE_TEMPLATE/feature_request.md | 18 - .../.github/ISSUE_TEMPLATE/support_request.md | 7 - .../.github/PULL_REQUEST_TEMPLATE.md | 7 - .../.github/auto-label.yaml | 20 - .../.github/blunderbuss.yml | 23 - .../.github/flakybot.yaml | 1 - .../.github/header-checker-lint.yml | 15 - .../.github/snippet-bot.yml | 0 .../.github/workflows/docs.yml | 38 -- .../.github/workflows/lint.yml | 25 - .../.github/workflows/mypy.yml | 22 - .../.github/workflows/unittest.yml | 61 --- .../google-cloud-datastore/.kokoro/build.sh | 60 --- .../.kokoro/continuous/common.cfg | 27 - .../.kokoro/continuous/continuous.cfg | 1 - .../.kokoro/continuous/prerelease-deps.cfg | 7 - .../.kokoro/populate-secrets.sh | 43 -- .../.kokoro/presubmit/common.cfg | 27 - .../.kokoro/presubmit/prerelease-deps.cfg | 7 - .../.kokoro/presubmit/presubmit.cfg | 12 - .../.kokoro/presubmit/system-3.12.cfg | 7 - .../.kokoro/presubmit/system-3.14.cfg | 7 - .../.kokoro/samples/lint/common.cfg | 34 -- .../.kokoro/samples/lint/continuous.cfg | 6 - .../.kokoro/samples/lint/periodic.cfg | 6 - .../.kokoro/samples/lint/presubmit.cfg | 6 - .../.kokoro/samples/python3.10/common.cfg | 40 -- .../.kokoro/samples/python3.10/continuous.cfg | 6 - .../samples/python3.10/periodic-head.cfg | 11 - .../.kokoro/samples/python3.10/periodic.cfg | 6 - .../.kokoro/samples/python3.10/presubmit.cfg | 6 - .../.kokoro/samples/python3.11/common.cfg | 40 -- .../.kokoro/samples/python3.11/continuous.cfg | 6 - .../samples/python3.11/periodic-head.cfg | 11 - .../.kokoro/samples/python3.11/periodic.cfg | 6 - .../.kokoro/samples/python3.11/presubmit.cfg | 6 - .../.kokoro/samples/python3.12/common.cfg | 40 -- .../.kokoro/samples/python3.12/continuous.cfg | 6 - .../samples/python3.12/periodic-head.cfg | 11 - .../.kokoro/samples/python3.12/periodic.cfg | 6 - .../.kokoro/samples/python3.12/presubmit.cfg | 6 - .../.kokoro/samples/python3.13/common.cfg | 40 -- .../.kokoro/samples/python3.13/continuous.cfg | 6 - .../samples/python3.13/periodic-head.cfg | 11 - .../.kokoro/samples/python3.13/periodic.cfg | 6 - .../.kokoro/samples/python3.13/presubmit.cfg | 6 - .../.kokoro/samples/python3.14/common.cfg | 40 -- .../.kokoro/samples/python3.14/continuous.cfg | 6 - .../samples/python3.14/periodic-head.cfg | 11 - .../.kokoro/samples/python3.14/periodic.cfg | 6 - .../.kokoro/samples/python3.14/presubmit.cfg | 6 - .../.kokoro/samples/python3.7/common.cfg | 40 -- .../.kokoro/samples/python3.7/continuous.cfg | 6 - .../samples/python3.7/periodic-head.cfg | 11 - .../.kokoro/samples/python3.7/periodic.cfg | 6 - .../.kokoro/samples/python3.7/presubmit.cfg | 6 - .../.kokoro/samples/python3.8/common.cfg | 40 -- .../.kokoro/samples/python3.8/continuous.cfg | 6 - .../samples/python3.8/periodic-head.cfg | 11 - .../.kokoro/samples/python3.8/periodic.cfg | 6 - .../.kokoro/samples/python3.8/presubmit.cfg | 6 - .../.kokoro/samples/python3.9/common.cfg | 40 -- .../.kokoro/samples/python3.9/continuous.cfg | 6 - .../samples/python3.9/periodic-head.cfg | 11 - .../.kokoro/samples/python3.9/periodic.cfg | 6 - .../.kokoro/samples/python3.9/presubmit.cfg | 6 - .../.kokoro/test-samples-against-head.sh | 26 - .../.kokoro/test-samples-impl.sh | 103 ---- .../.kokoro/test-samples.sh | 44 -- .../.kokoro/trampoline.sh | 28 - .../.kokoro/trampoline_v2.sh | 487 ------------------ packages/google-cloud-datastore/.trampolinerc | 61 --- .../google-cloud-datastore/docs/changelog.md | 1 - packages/google-cloud-datastore/owlbot.py | 65 --- 77 files changed, 1937 deletions(-) delete mode 100644 packages/google-cloud-datastore/.github/CODEOWNERS delete mode 100644 packages/google-cloud-datastore/.github/CONTRIBUTING.md delete mode 100644 packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/bug_report.md delete mode 100644 packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/feature_request.md delete mode 100644 packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/support_request.md delete mode 100644 packages/google-cloud-datastore/.github/PULL_REQUEST_TEMPLATE.md delete mode 100644 packages/google-cloud-datastore/.github/auto-label.yaml delete mode 100644 packages/google-cloud-datastore/.github/blunderbuss.yml delete mode 100644 packages/google-cloud-datastore/.github/flakybot.yaml delete mode 100644 packages/google-cloud-datastore/.github/header-checker-lint.yml delete mode 100644 packages/google-cloud-datastore/.github/snippet-bot.yml delete mode 100644 packages/google-cloud-datastore/.github/workflows/docs.yml delete mode 100644 packages/google-cloud-datastore/.github/workflows/lint.yml delete mode 100644 packages/google-cloud-datastore/.github/workflows/mypy.yml delete mode 100644 packages/google-cloud-datastore/.github/workflows/unittest.yml delete mode 100755 packages/google-cloud-datastore/.kokoro/build.sh delete mode 100644 packages/google-cloud-datastore/.kokoro/continuous/common.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/continuous/continuous.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/continuous/prerelease-deps.cfg delete mode 100755 packages/google-cloud-datastore/.kokoro/populate-secrets.sh delete mode 100644 packages/google-cloud-datastore/.kokoro/presubmit/common.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/presubmit/prerelease-deps.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/presubmit/presubmit.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/presubmit/system-3.12.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/presubmit/system-3.14.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/lint/common.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/lint/continuous.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/lint/periodic.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/lint/presubmit.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.10/common.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.10/continuous.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.10/periodic-head.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.10/periodic.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.10/presubmit.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.11/common.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.11/continuous.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.11/periodic-head.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.11/periodic.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.11/presubmit.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.12/common.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.12/continuous.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.12/periodic-head.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.12/periodic.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.12/presubmit.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.13/common.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.13/continuous.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.13/periodic-head.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.13/periodic.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.13/presubmit.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.14/common.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.14/continuous.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.14/periodic-head.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.14/periodic.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.14/presubmit.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.7/common.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.7/continuous.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic-head.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.7/presubmit.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.8/common.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.8/continuous.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic-head.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.8/presubmit.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.9/common.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.9/continuous.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic-head.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic.cfg delete mode 100644 packages/google-cloud-datastore/.kokoro/samples/python3.9/presubmit.cfg delete mode 100755 packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh delete mode 100755 packages/google-cloud-datastore/.kokoro/test-samples-impl.sh delete mode 100755 packages/google-cloud-datastore/.kokoro/test-samples.sh delete mode 100755 packages/google-cloud-datastore/.kokoro/trampoline.sh delete mode 100755 packages/google-cloud-datastore/.kokoro/trampoline_v2.sh delete mode 100644 packages/google-cloud-datastore/.trampolinerc delete mode 120000 packages/google-cloud-datastore/docs/changelog.md delete mode 100644 packages/google-cloud-datastore/owlbot.py diff --git a/packages/google-cloud-datastore/.github/CODEOWNERS b/packages/google-cloud-datastore/.github/CODEOWNERS deleted file mode 100644 index 6475082470f2..000000000000 --- a/packages/google-cloud-datastore/.github/CODEOWNERS +++ /dev/null @@ -1,12 +0,0 @@ -# Code owners file. -# This file controls who is tagged for review for any given pull request. -# -# For syntax help see: -# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax -# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. - -# @googleapis/yoshi-python @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk @googleapis/api-firestore-partners are the default owners for changes in this repo -* @googleapis/yoshi-python @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk @googleapis/api-firestore-partners - -# @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk @googleapis/api-firestore-partners are the default owners for samples changes -/samples/ @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk @googleapis/api-firestore-partners diff --git a/packages/google-cloud-datastore/.github/CONTRIBUTING.md b/packages/google-cloud-datastore/.github/CONTRIBUTING.md deleted file mode 100644 index 939e5341e74d..000000000000 --- a/packages/google-cloud-datastore/.github/CONTRIBUTING.md +++ /dev/null @@ -1,28 +0,0 @@ -# How to Contribute - -We'd love to accept your patches and contributions to this project. There are -just a few small guidelines you need to follow. - -## Contributor License Agreement - -Contributions to this project must be accompanied by a Contributor License -Agreement. You (or your employer) retain the copyright to your contribution; -this simply gives us permission to use and redistribute your contributions as -part of the project. Head over to to see -your current agreements on file or to sign a new one. - -You generally only need to submit a CLA once, so if you've already submitted one -(even if it was for a different project), you probably don't need to do it -again. - -## Code reviews - -All submissions, including submissions by project members, require review. We -use GitHub pull requests for this purpose. Consult -[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more -information on using pull requests. - -## Community Guidelines - -This project follows [Google's Open Source Community -Guidelines](https://opensource.google.com/conduct/). diff --git a/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/bug_report.md b/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 02714dfe78e5..000000000000 --- a/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve - ---- - -Thanks for stopping by to let us know something could be better! - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. - -Please run down the following list and make sure you've tried the usual "quick fixes": - - - Search the issues already opened: https://github.com/googleapis/python-datastore/issues - - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python - -If you are still having issues, please be sure to include as much information as possible: - -#### Environment details - - - OS type and version: - - Python version: `python --version` - - pip version: `pip --version` - - `google-cloud-datastore` version: `pip show google-cloud-datastore` - -#### Steps to reproduce - - 1. ? - 2. ? - -#### Code example - -```python -# example -``` - -#### Stack trace -``` -# example -``` - -Making sure to follow these steps will guarantee the quickest resolution possible. - -Thanks! diff --git a/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/feature_request.md b/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 6365857f33c6..000000000000 --- a/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this library - ---- - -Thanks for stopping by to let us know something could be better! - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. - - **Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - **Describe the solution you'd like** -A clear and concise description of what you want to happen. - **Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - **Additional context** -Add any other context or screenshots about the feature request here. diff --git a/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/support_request.md b/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/support_request.md deleted file mode 100644 index 995869032125..000000000000 --- a/packages/google-cloud-datastore/.github/ISSUE_TEMPLATE/support_request.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -name: Support request -about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. - ---- - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/packages/google-cloud-datastore/.github/PULL_REQUEST_TEMPLATE.md b/packages/google-cloud-datastore/.github/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index 91cb335a75e5..000000000000 --- a/packages/google-cloud-datastore/.github/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,7 +0,0 @@ -Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: -- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-datastore/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea -- [ ] Ensure the tests and linter pass -- [ ] Code coverage does not decrease (if any source code was changed) -- [ ] Appropriate docs were updated (if necessary) - -Fixes # 🦕 diff --git a/packages/google-cloud-datastore/.github/auto-label.yaml b/packages/google-cloud-datastore/.github/auto-label.yaml deleted file mode 100644 index 21786a4eb085..000000000000 --- a/packages/google-cloud-datastore/.github/auto-label.yaml +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -requestsize: - enabled: true - -path: - pullrequest: true - paths: - samples: "samples" diff --git a/packages/google-cloud-datastore/.github/blunderbuss.yml b/packages/google-cloud-datastore/.github/blunderbuss.yml deleted file mode 100644 index 54156a1b55b2..000000000000 --- a/packages/google-cloud-datastore/.github/blunderbuss.yml +++ /dev/null @@ -1,23 +0,0 @@ -# Blunderbuss config -# -# This file controls who is assigned for pull requests and issues. -# Note: This file is autogenerated. To make changes to the assignee -# team, please update `codeowner_team` in `.repo-metadata.json`. -assign_issues: - - googleapis/cloud-native-db-dpes - - googleapis/api-datastore-sdk - - googleapis/api-firestore-partners - -assign_issues_by: - - labels: - - "samples" - to: - - googleapis/python-samples-reviewers - - googleapis/cloud-native-db-dpes - - googleapis/api-datastore-sdk - - googleapis/api-firestore-partners - -assign_prs: - - googleapis/cloud-native-db-dpes - - googleapis/api-datastore-sdk - - googleapis/api-firestore-partners diff --git a/packages/google-cloud-datastore/.github/flakybot.yaml b/packages/google-cloud-datastore/.github/flakybot.yaml deleted file mode 100644 index cb83375f9893..000000000000 --- a/packages/google-cloud-datastore/.github/flakybot.yaml +++ /dev/null @@ -1 +0,0 @@ -issuePriority: p2 diff --git a/packages/google-cloud-datastore/.github/header-checker-lint.yml b/packages/google-cloud-datastore/.github/header-checker-lint.yml deleted file mode 100644 index 6fe78aa7987a..000000000000 --- a/packages/google-cloud-datastore/.github/header-checker-lint.yml +++ /dev/null @@ -1,15 +0,0 @@ -{"allowedCopyrightHolders": ["Google LLC"], - "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], - "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], - "sourceFileExtensions": [ - "ts", - "js", - "java", - "sh", - "Dockerfile", - "yaml", - "py", - "html", - "txt" - ] -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.github/snippet-bot.yml b/packages/google-cloud-datastore/.github/snippet-bot.yml deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-datastore/.github/workflows/docs.yml b/packages/google-cloud-datastore/.github/workflows/docs.yml deleted file mode 100644 index 2833fe98fff0..000000000000 --- a/packages/google-cloud-datastore/.github/workflows/docs.yml +++ /dev/null @@ -1,38 +0,0 @@ -on: - pull_request: - branches: - - main -name: docs -jobs: - docs: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run docs - run: | - nox -s docs - docfx: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run docfx - run: | - nox -s docfx diff --git a/packages/google-cloud-datastore/.github/workflows/lint.yml b/packages/google-cloud-datastore/.github/workflows/lint.yml deleted file mode 100644 index 3ed755f0005c..000000000000 --- a/packages/google-cloud-datastore/.github/workflows/lint.yml +++ /dev/null @@ -1,25 +0,0 @@ -on: - pull_request: - branches: - - main -name: lint -jobs: - lint: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: "3.14" - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run lint - run: | - nox -s lint - - name: Run lint_setup_py - run: | - nox -s lint_setup_py diff --git a/packages/google-cloud-datastore/.github/workflows/mypy.yml b/packages/google-cloud-datastore/.github/workflows/mypy.yml deleted file mode 100644 index 8363e7218202..000000000000 --- a/packages/google-cloud-datastore/.github/workflows/mypy.yml +++ /dev/null @@ -1,22 +0,0 @@ -on: - pull_request: - branches: - - main -name: mypy -jobs: - mypy: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: "3.14" - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run mypy - run: | - nox -s mypy diff --git a/packages/google-cloud-datastore/.github/workflows/unittest.yml b/packages/google-cloud-datastore/.github/workflows/unittest.yml deleted file mode 100644 index cc6fe2b2fdd5..000000000000 --- a/packages/google-cloud-datastore/.github/workflows/unittest.yml +++ /dev/null @@ -1,61 +0,0 @@ -on: - pull_request: - branches: - - main -name: unittest -jobs: - unit: - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. - # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix - # https://docs.github.com/en/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories - runs-on: ubuntu-22.04 - strategy: - matrix: - python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13', '3.14'] - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python }} - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run unit tests - env: - COVERAGE_FILE: .coverage-${{ matrix.python }} - run: | - nox -s unit-${{ matrix.python }} - - name: Upload coverage results - uses: actions/upload-artifact@v4 - with: - name: coverage-artifact-${{ matrix.python }} - path: .coverage-${{ matrix.python }} - include-hidden-files: true - - cover: - runs-on: ubuntu-latest - needs: - - unit - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: "3.14" - - name: Install coverage - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install coverage - - name: Download coverage results - uses: actions/download-artifact@v4 - with: - path: .coverage-results/ - - name: Report coverage results - run: | - find .coverage-results -type f -name '*.zip' -exec unzip {} \; - coverage combine .coverage-results/**/.coverage* - coverage report --show-missing --fail-under=100 diff --git a/packages/google-cloud-datastore/.kokoro/build.sh b/packages/google-cloud-datastore/.kokoro/build.sh deleted file mode 100755 index d41b45aa1dd0..000000000000 --- a/packages/google-cloud-datastore/.kokoro/build.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") - -if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") -fi - -pushd "${PROJECT_ROOT}" - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Setup service account credentials. -if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] -then - export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json -fi - -# Setup project id. -if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] -then - export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -fi - -# If this is a continuous build, send the test log to the FlakyBot. -# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. -if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then - cleanup() { - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot - } - trap cleanup EXIT HUP -fi - -# If NOX_SESSION is set, it only runs the specified session, -# otherwise run all the sessions. -if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} -else - python3 -m nox -fi diff --git a/packages/google-cloud-datastore/.kokoro/continuous/common.cfg b/packages/google-cloud-datastore/.kokoro/continuous/common.cfg deleted file mode 100644 index 74d61e90239f..000000000000 --- a/packages/google-cloud-datastore/.kokoro/continuous/common.cfg +++ /dev/null @@ -1,27 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/build.sh" -} diff --git a/packages/google-cloud-datastore/.kokoro/continuous/continuous.cfg b/packages/google-cloud-datastore/.kokoro/continuous/continuous.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-datastore/.kokoro/continuous/continuous.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/continuous/prerelease-deps.cfg b/packages/google-cloud-datastore/.kokoro/continuous/prerelease-deps.cfg deleted file mode 100644 index 3595fb43f5c0..000000000000 --- a/packages/google-cloud-datastore/.kokoro/continuous/prerelease-deps.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "prerelease_deps" -} diff --git a/packages/google-cloud-datastore/.kokoro/populate-secrets.sh b/packages/google-cloud-datastore/.kokoro/populate-secrets.sh deleted file mode 100755 index c435402f473e..000000000000 --- a/packages/google-cloud-datastore/.kokoro/populate-secrets.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} -function msg { println "$*" >&2 ;} -function println { printf '%s\n' "$(now) $*" ;} - - -# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: -# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com -SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" -msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" -mkdir -p ${SECRET_LOCATION} -for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") -do - msg "Retrieving secret ${key}" - docker run --entrypoint=gcloud \ - --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ - gcr.io/google.com/cloudsdktool/cloud-sdk \ - secrets versions access latest \ - --project cloud-devrel-kokoro-resources \ - --secret ${key} > \ - "${SECRET_LOCATION}/${key}" - if [[ $? == 0 ]]; then - msg "Secret written to ${SECRET_LOCATION}/${key}" - else - msg "Error retrieving secret ${key}" - fi -done diff --git a/packages/google-cloud-datastore/.kokoro/presubmit/common.cfg b/packages/google-cloud-datastore/.kokoro/presubmit/common.cfg deleted file mode 100644 index 74d61e90239f..000000000000 --- a/packages/google-cloud-datastore/.kokoro/presubmit/common.cfg +++ /dev/null @@ -1,27 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/build.sh" -} diff --git a/packages/google-cloud-datastore/.kokoro/presubmit/prerelease-deps.cfg b/packages/google-cloud-datastore/.kokoro/presubmit/prerelease-deps.cfg deleted file mode 100644 index 3595fb43f5c0..000000000000 --- a/packages/google-cloud-datastore/.kokoro/presubmit/prerelease-deps.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "prerelease_deps" -} diff --git a/packages/google-cloud-datastore/.kokoro/presubmit/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/presubmit/presubmit.cfg deleted file mode 100644 index 54593f363cf2..000000000000 --- a/packages/google-cloud-datastore/.kokoro/presubmit/presubmit.cfg +++ /dev/null @@ -1,12 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Disable system tests. -env_vars: { - key: "RUN_SYSTEM_TESTS" - value: "false" -} - -env_vars: { - key: "NOX_SESSION" - value: "blacken doctests format" -} diff --git a/packages/google-cloud-datastore/.kokoro/presubmit/system-3.12.cfg b/packages/google-cloud-datastore/.kokoro/presubmit/system-3.12.cfg deleted file mode 100644 index 78cdc5e85109..000000000000 --- a/packages/google-cloud-datastore/.kokoro/presubmit/system-3.12.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "system-3.12" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/presubmit/system-3.14.cfg b/packages/google-cloud-datastore/.kokoro/presubmit/system-3.14.cfg deleted file mode 100644 index 86e7c5d7762c..000000000000 --- a/packages/google-cloud-datastore/.kokoro/presubmit/system-3.14.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "system-3.14" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/lint/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/lint/common.cfg deleted file mode 100644 index 80001a39bf25..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/lint/common.cfg +++ /dev/null @@ -1,34 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "lint" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/lint/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/lint/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/lint/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/lint/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/lint/periodic.cfg deleted file mode 100644 index 50fec9649732..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/lint/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/lint/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/lint/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/lint/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.10/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.10/common.cfg deleted file mode 100644 index 237cfb14398b..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.10/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.10" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-310" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.10/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.10/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.10/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.10/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.10/periodic-head.cfg deleted file mode 100644 index 714045a75ed7..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.10/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.10/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.10/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.10/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.10/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.10/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.10/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.11/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.11/common.cfg deleted file mode 100644 index e4b302a64a47..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.11/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.11" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-311" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.11/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.11/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.11/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.11/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.11/periodic-head.cfg deleted file mode 100644 index 714045a75ed7..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.11/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.11/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.11/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.11/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.11/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.11/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.11/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.12/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.12/common.cfg deleted file mode 100644 index 204de15ee400..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.12/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.12" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-312" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.12/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.12/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.12/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.12/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.12/periodic-head.cfg deleted file mode 100644 index 714045a75ed7..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.12/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.12/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.12/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.12/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.12/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.12/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.12/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.13/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.13/common.cfg deleted file mode 100644 index 33af919b04ab..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.13/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.13" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-313" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline_v2.sh" diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.13/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.13/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.13/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.13/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.13/periodic-head.cfg deleted file mode 100644 index 714045a75ed7..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.13/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.13/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.13/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.13/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.13/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.13/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.13/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.14/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.14/common.cfg deleted file mode 100644 index dcfec354330b..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.14/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.14" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-314" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline_v2.sh" diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.14/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.14/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.14/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.14/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.14/periodic-head.cfg deleted file mode 100644 index 714045a75ed7..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.14/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.14/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.14/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.14/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.14/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.14/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.14/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.7/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.7/common.cfg deleted file mode 100644 index a46730a6bbd1..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.7/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.7" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py37" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.7/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.7/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.7/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic-head.cfg deleted file mode 100644 index 714045a75ed7..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.7/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.7/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.7/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.7/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.8/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.8/common.cfg deleted file mode 100644 index d13fb561e938..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.8/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.8" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py38" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.8/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.8/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.8/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic-head.cfg deleted file mode 100644 index 714045a75ed7..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.8/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.8/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.8/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.8/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.9/common.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.9/common.cfg deleted file mode 100644 index 4fcda9f0720c..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.9/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.9" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py39" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-datastore/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.9/continuous.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.9/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.9/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic-head.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic-head.cfg deleted file mode 100644 index 714045a75ed7..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-datastore/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.9/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-datastore/.kokoro/samples/python3.9/presubmit.cfg b/packages/google-cloud-datastore/.kokoro/samples/python3.9/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-datastore/.kokoro/samples/python3.9/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh b/packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh deleted file mode 100755 index e9d8bd79a644..000000000000 --- a/packages/google-cloud-datastore/.kokoro/test-samples-against-head.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# A customized test runner for samples. -# -# For periodic builds, you can specify this file for testing against head. - -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero -set -eo pipefail -# Enables `**` to include files nested inside sub-folders -shopt -s globstar - -exec .kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh b/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh deleted file mode 100755 index 53e365bc4e79..000000000000 --- a/packages/google-cloud-datastore/.kokoro/test-samples-impl.sh +++ /dev/null @@ -1,103 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero -set -eo pipefail -# Enables `**` to include files nested inside sub-folders -shopt -s globstar - -# Exit early if samples don't exist -if ! find samples -name 'requirements.txt' | grep -q .; then - echo "No tests run. './samples/**/requirements.txt' not found" - exit 0 -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -# `virtualenv==20.26.6` is added for Python 3.7 compatibility -python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6 - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.9 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot - fi - - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" diff --git a/packages/google-cloud-datastore/.kokoro/test-samples.sh b/packages/google-cloud-datastore/.kokoro/test-samples.sh deleted file mode 100755 index 7933d820149a..000000000000 --- a/packages/google-cloud-datastore/.kokoro/test-samples.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# The default test runner for samples. -# -# For periodic builds, we rewinds the repo to the latest release, and -# run test-samples-impl.sh. - -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero -set -eo pipefail -# Enables `**` to include files nested inside sub-folders -shopt -s globstar - -# Run periodic samples tests at latest release -if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - # preserving the test runner implementation. - cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" - echo "--- IMPORTANT IMPORTANT IMPORTANT ---" - echo "Now we rewind the repo back to the latest release..." - LATEST_RELEASE=$(git describe --abbrev=0 --tags) - git checkout $LATEST_RELEASE - echo "The current head is: " - echo $(git rev-parse --verify HEAD) - echo "--- IMPORTANT IMPORTANT IMPORTANT ---" - # move back the test runner implementation if there's no file. - if [ ! -f .kokoro/test-samples-impl.sh ]; then - cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh - fi -fi - -exec .kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-datastore/.kokoro/trampoline.sh b/packages/google-cloud-datastore/.kokoro/trampoline.sh deleted file mode 100755 index 48f79699706e..000000000000 --- a/packages/google-cloud-datastore/.kokoro/trampoline.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Always run the cleanup script, regardless of the success of bouncing into -# the container. -function cleanup() { - chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh - ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh - echo "cleanup"; -} -trap cleanup EXIT - -$(dirname $0)/populate-secrets.sh # Secret Manager secrets. -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh b/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh deleted file mode 100755 index 35fa529231dc..000000000000 --- a/packages/google-cloud-datastore/.kokoro/trampoline_v2.sh +++ /dev/null @@ -1,487 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# trampoline_v2.sh -# -# This script does 3 things. -# -# 1. Prepare the Docker image for the test -# 2. Run the Docker with appropriate flags to run the test -# 3. Upload the newly built Docker image -# -# in a way that is somewhat compatible with trampoline_v1. -# -# To run this script, first download few files from gcs to /dev/shm. -# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). -# -# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm -# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm -# -# Then run the script. -# .kokoro/trampoline_v2.sh -# -# These environment variables are required: -# TRAMPOLINE_IMAGE: The docker image to use. -# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. -# -# You can optionally change these environment variables: -# TRAMPOLINE_IMAGE_UPLOAD: -# (true|false): Whether to upload the Docker image after the -# successful builds. -# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. -# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. -# Defaults to /workspace. -# Potentially there are some repo specific envvars in .trampolinerc in -# the project root. - - -set -euo pipefail - -TRAMPOLINE_VERSION="2.0.5" - -if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then - readonly IO_COLOR_RED="$(tput setaf 1)" - readonly IO_COLOR_GREEN="$(tput setaf 2)" - readonly IO_COLOR_YELLOW="$(tput setaf 3)" - readonly IO_COLOR_RESET="$(tput sgr0)" -else - readonly IO_COLOR_RED="" - readonly IO_COLOR_GREEN="" - readonly IO_COLOR_YELLOW="" - readonly IO_COLOR_RESET="" -fi - -function function_exists { - [ $(LC_ALL=C type -t $1)"" == "function" ] -} - -# Logs a message using the given color. The first argument must be one -# of the IO_COLOR_* variables defined above, such as -# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the -# given color. The log message will also have an RFC-3339 timestamp -# prepended (in UTC). You can disable the color output by setting -# TERM=vt100. -function log_impl() { - local color="$1" - shift - local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" - echo "================================================================" - echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" - echo "================================================================" -} - -# Logs the given message with normal coloring and a timestamp. -function log() { - log_impl "${IO_COLOR_RESET}" "$@" -} - -# Logs the given message in green with a timestamp. -function log_green() { - log_impl "${IO_COLOR_GREEN}" "$@" -} - -# Logs the given message in yellow with a timestamp. -function log_yellow() { - log_impl "${IO_COLOR_YELLOW}" "$@" -} - -# Logs the given message in red with a timestamp. -function log_red() { - log_impl "${IO_COLOR_RED}" "$@" -} - -readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) -readonly tmphome="${tmpdir}/h" -mkdir -p "${tmphome}" - -function cleanup() { - rm -rf "${tmpdir}" -} -trap cleanup EXIT - -RUNNING_IN_CI="${RUNNING_IN_CI:-false}" - -# The workspace in the container, defaults to /workspace. -TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" - -pass_down_envvars=( - # TRAMPOLINE_V2 variables. - # Tells scripts whether they are running as part of CI or not. - "RUNNING_IN_CI" - # Indicates which CI system we're in. - "TRAMPOLINE_CI" - # Indicates the version of the script. - "TRAMPOLINE_VERSION" -) - -log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" - -# Detect which CI systems we're in. If we're in any of the CI systems -# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be -# the name of the CI system. Both envvars will be passing down to the -# container for telling which CI system we're in. -if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then - # descriptive env var for indicating it's on CI. - RUNNING_IN_CI="true" - TRAMPOLINE_CI="kokoro" - if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then - if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then - log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." - exit 1 - fi - # This service account will be activated later. - TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" - else - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - gcloud auth list - fi - log_yellow "Configuring Container Registry access" - gcloud auth configure-docker --quiet - fi - pass_down_envvars+=( - # KOKORO dynamic variables. - "KOKORO_BUILD_NUMBER" - "KOKORO_BUILD_ID" - "KOKORO_JOB_NAME" - "KOKORO_GIT_COMMIT" - "KOKORO_GITHUB_COMMIT" - "KOKORO_GITHUB_PULL_REQUEST_NUMBER" - "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For FlakyBot - "KOKORO_GITHUB_COMMIT_URL" - "KOKORO_GITHUB_PULL_REQUEST_URL" - ) -elif [[ "${TRAVIS:-}" == "true" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="travis" - pass_down_envvars+=( - "TRAVIS_BRANCH" - "TRAVIS_BUILD_ID" - "TRAVIS_BUILD_NUMBER" - "TRAVIS_BUILD_WEB_URL" - "TRAVIS_COMMIT" - "TRAVIS_COMMIT_MESSAGE" - "TRAVIS_COMMIT_RANGE" - "TRAVIS_JOB_NAME" - "TRAVIS_JOB_NUMBER" - "TRAVIS_JOB_WEB_URL" - "TRAVIS_PULL_REQUEST" - "TRAVIS_PULL_REQUEST_BRANCH" - "TRAVIS_PULL_REQUEST_SHA" - "TRAVIS_PULL_REQUEST_SLUG" - "TRAVIS_REPO_SLUG" - "TRAVIS_SECURE_ENV_VARS" - "TRAVIS_TAG" - ) -elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="github-workflow" - pass_down_envvars+=( - "GITHUB_WORKFLOW" - "GITHUB_RUN_ID" - "GITHUB_RUN_NUMBER" - "GITHUB_ACTION" - "GITHUB_ACTIONS" - "GITHUB_ACTOR" - "GITHUB_REPOSITORY" - "GITHUB_EVENT_NAME" - "GITHUB_EVENT_PATH" - "GITHUB_SHA" - "GITHUB_REF" - "GITHUB_HEAD_REF" - "GITHUB_BASE_REF" - ) -elif [[ "${CIRCLECI:-}" == "true" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="circleci" - pass_down_envvars+=( - "CIRCLE_BRANCH" - "CIRCLE_BUILD_NUM" - "CIRCLE_BUILD_URL" - "CIRCLE_COMPARE_URL" - "CIRCLE_JOB" - "CIRCLE_NODE_INDEX" - "CIRCLE_NODE_TOTAL" - "CIRCLE_PREVIOUS_BUILD_NUM" - "CIRCLE_PROJECT_REPONAME" - "CIRCLE_PROJECT_USERNAME" - "CIRCLE_REPOSITORY_URL" - "CIRCLE_SHA1" - "CIRCLE_STAGE" - "CIRCLE_USERNAME" - "CIRCLE_WORKFLOW_ID" - "CIRCLE_WORKFLOW_JOB_ID" - "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" - "CIRCLE_WORKFLOW_WORKSPACE_ID" - ) -fi - -# Configure the service account for pulling the docker image. -function repo_root() { - local dir="$1" - while [[ ! -d "${dir}/.git" ]]; do - dir="$(dirname "$dir")" - done - echo "${dir}" -} - -# Detect the project root. In CI builds, we assume the script is in -# the git tree and traverse from there, otherwise, traverse from `pwd` -# to find `.git` directory. -if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - PROGRAM_PATH="$(realpath "$0")" - PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" - PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" -else - PROJECT_ROOT="$(repo_root $(pwd))" -fi - -log_yellow "Changing to the project root: ${PROJECT_ROOT}." -cd "${PROJECT_ROOT}" - -# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need -# to use this environment variable in `PROJECT_ROOT`. -if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then - - mkdir -p "${tmpdir}/gcloud" - gcloud_config_dir="${tmpdir}/gcloud" - - log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." - export CLOUDSDK_CONFIG="${gcloud_config_dir}" - - log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." - gcloud auth activate-service-account \ - --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" - log_yellow "Configuring Container Registry access" - gcloud auth configure-docker --quiet -fi - -required_envvars=( - # The basic trampoline configurations. - "TRAMPOLINE_IMAGE" - "TRAMPOLINE_BUILD_FILE" -) - -if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then - source "${PROJECT_ROOT}/.trampolinerc" -fi - -log_yellow "Checking environment variables." -for e in "${required_envvars[@]}" -do - if [[ -z "${!e:-}" ]]; then - log "Missing ${e} env var. Aborting." - exit 1 - fi -done - -# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 -# script: e.g. "github/repo-name/.kokoro/run_tests.sh" -TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" -log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" - -# ignore error on docker operations and test execution -set +e - -log_yellow "Preparing Docker image." -# We only download the docker image in CI builds. -if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - # Download the docker image specified by `TRAMPOLINE_IMAGE` - - # We may want to add --max-concurrent-downloads flag. - - log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." - if docker pull "${TRAMPOLINE_IMAGE}"; then - log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." - has_image="true" - else - log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." - has_image="false" - fi -else - # For local run, check if we have the image. - if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then - has_image="true" - else - has_image="false" - fi -fi - - -# The default user for a Docker container has uid 0 (root). To avoid -# creating root-owned files in the build directory we tell docker to -# use the current user ID. -user_uid="$(id -u)" -user_gid="$(id -g)" -user_name="$(id -un)" - -# To allow docker in docker, we add the user to the docker group in -# the host os. -docker_gid=$(cut -d: -f3 < <(getent group docker)) - -update_cache="false" -if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then - # Build the Docker image from the source. - context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") - docker_build_flags=( - "-f" "${TRAMPOLINE_DOCKERFILE}" - "-t" "${TRAMPOLINE_IMAGE}" - "--build-arg" "UID=${user_uid}" - "--build-arg" "USERNAME=${user_name}" - ) - if [[ "${has_image}" == "true" ]]; then - docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") - fi - - log_yellow "Start building the docker image." - if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then - echo "docker build" "${docker_build_flags[@]}" "${context_dir}" - fi - - # ON CI systems, we want to suppress docker build logs, only - # output the logs when it fails. - if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - if docker build "${docker_build_flags[@]}" "${context_dir}" \ - > "${tmpdir}/docker_build.log" 2>&1; then - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - cat "${tmpdir}/docker_build.log" - fi - - log_green "Finished building the docker image." - update_cache="true" - else - log_red "Failed to build the Docker image, aborting." - log_yellow "Dumping the build logs:" - cat "${tmpdir}/docker_build.log" - exit 1 - fi - else - if docker build "${docker_build_flags[@]}" "${context_dir}"; then - log_green "Finished building the docker image." - update_cache="true" - else - log_red "Failed to build the Docker image, aborting." - exit 1 - fi - fi -else - if [[ "${has_image}" != "true" ]]; then - log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." - exit 1 - fi -fi - -# We use an array for the flags so they are easier to document. -docker_flags=( - # Remove the container after it exists. - "--rm" - - # Use the host network. - "--network=host" - - # Run in priviledged mode. We are not using docker for sandboxing or - # isolation, just for packaging our dev tools. - "--privileged" - - # Run the docker script with the user id. Because the docker image gets to - # write in ${PWD} you typically want this to be your user id. - # To allow docker in docker, we need to use docker gid on the host. - "--user" "${user_uid}:${docker_gid}" - - # Pass down the USER. - "--env" "USER=${user_name}" - - # Mount the project directory inside the Docker container. - "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" - "--workdir" "${TRAMPOLINE_WORKSPACE}" - "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" - - # Mount the temporary home directory. - "--volume" "${tmphome}:/h" - "--env" "HOME=/h" - - # Allow docker in docker. - "--volume" "/var/run/docker.sock:/var/run/docker.sock" - - # Mount the /tmp so that docker in docker can mount the files - # there correctly. - "--volume" "/tmp:/tmp" - # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR - # TODO(tmatsuo): This part is not portable. - "--env" "TRAMPOLINE_SECRET_DIR=/secrets" - "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" - "--env" "KOKORO_GFILE_DIR=/secrets/gfile" - "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" - "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" -) - -# Add an option for nicer output if the build gets a tty. -if [[ -t 0 ]]; then - docker_flags+=("-it") -fi - -# Passing down env vars -for e in "${pass_down_envvars[@]}" -do - if [[ -n "${!e:-}" ]]; then - docker_flags+=("--env" "${e}=${!e}") - fi -done - -# If arguments are given, all arguments will become the commands run -# in the container, otherwise run TRAMPOLINE_BUILD_FILE. -if [[ $# -ge 1 ]]; then - log_yellow "Running the given commands '" "${@:1}" "' in the container." - readonly commands=("${@:1}") - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" - fi - docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" -else - log_yellow "Running the tests in a Docker container." - docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" - fi - docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" -fi - - -test_retval=$? - -if [[ ${test_retval} -eq 0 ]]; then - log_green "Build finished with ${test_retval}" -else - log_red "Build finished with ${test_retval}" -fi - -# Only upload it when the test passes. -if [[ "${update_cache}" == "true" ]] && \ - [[ $test_retval == 0 ]] && \ - [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then - log_yellow "Uploading the Docker image." - if docker push "${TRAMPOLINE_IMAGE}"; then - log_green "Finished uploading the Docker image." - else - log_red "Failed uploading the Docker image." - fi - # Call trampoline_after_upload_hook if it's defined. - if function_exists trampoline_after_upload_hook; then - trampoline_after_upload_hook - fi - -fi - -exit "${test_retval}" diff --git a/packages/google-cloud-datastore/.trampolinerc b/packages/google-cloud-datastore/.trampolinerc deleted file mode 100644 index 0080152373d5..000000000000 --- a/packages/google-cloud-datastore/.trampolinerc +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Add required env vars here. -required_envvars+=( -) - -# Add env vars which are passed down into the container here. -pass_down_envvars+=( - "NOX_SESSION" - ############### - # Docs builds - ############### - "STAGING_BUCKET" - "V2_STAGING_BUCKET" - ################## - # Samples builds - ################## - "INSTALL_LIBRARY_FROM_SOURCE" - "RUN_TESTS_SESSION" - "BUILD_SPECIFIC_GCLOUD_PROJECT" - # Target directories. - "RUN_TESTS_DIRS" - # The nox session to run. - "RUN_TESTS_SESSION" -) - -# Prevent unintentional override on the default image. -if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ - [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then - echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." - exit 1 -fi - -# Define the default value if it makes sense. -if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then - TRAMPOLINE_IMAGE_UPLOAD="" -fi - -if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then - TRAMPOLINE_IMAGE="" -fi - -if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then - TRAMPOLINE_DOCKERFILE="" -fi - -if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then - TRAMPOLINE_BUILD_FILE="" -fi diff --git a/packages/google-cloud-datastore/docs/changelog.md b/packages/google-cloud-datastore/docs/changelog.md deleted file mode 120000 index 04c99a55caae..000000000000 --- a/packages/google-cloud-datastore/docs/changelog.md +++ /dev/null @@ -1 +0,0 @@ -../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-datastore/owlbot.py b/packages/google-cloud-datastore/owlbot.py deleted file mode 100644 index fe1e88c4bcdd..000000000000 --- a/packages/google-cloud-datastore/owlbot.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" -from pathlib import Path -from typing import List, Optional - -import synthtool as s -from synthtool import gcp -from synthtool.languages import python - -common = gcp.CommonTemplates() - -# This library ships clients for two different APIs, -# Datastore and Datastore Admin -datastore_default_version = "v1" -datastore_admin_default_version = "v1" - -for library in s.get_staging_dirs(datastore_default_version): - s.move(library / f"google/cloud/datastore_{library.name}") - s.move(library / "tests/") - s.move(library / "scripts") - -for library in s.get_staging_dirs(datastore_admin_default_version): - - s.move(library / f"google/cloud/datastore_admin") - s.move(library / f"google/cloud/datastore_admin_{library.name}") - s.move(library / "tests") - s.move(library / "scripts") - -s.remove_staging_dirs() - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- -templated_files = common.py_library( - microgenerator=True, - split_system_tests=True, - # six required by (but not installed by) google-cloud-core < v2.0.0 - unit_test_external_dependencies=["six"], - system_test_external_dependencies=["six"], - cov_level=100, - unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"], - default_python_version="3.14", - system_test_python_versions=["3.14"], -) -s.move( - templated_files, - excludes=["docs/multiprocessing.rst", ".coveragerc", ".github/**", ".kokoro/**"], -) - -python.py_samples(skip_readmes=True) - -s.shell.run(["nox", "-s", "blacken"], hide_output=False)